Lines Matching defs:obj

31 	struct drm_i915_gem_object *obj = dma_buf_to_obj(attach->dmabuf);
46 ret = sg_alloc_table(sgt, obj->mm.pages->orig_nents, GFP_KERNEL);
51 for_each_sg(obj->mm.pages->sgl, src, obj->mm.pages->orig_nents, i) {
73 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf);
76 vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB);
88 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf);
90 i915_gem_object_flush_map(obj);
91 i915_gem_object_unpin_map(obj);
96 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf);
97 struct drm_i915_private *i915 = to_i915(obj->base.dev);
100 if (obj->base.size < vma->vm_end - vma->vm_start)
104 return drm_gem_prime_mmap(&obj->base, vma);
106 if (!obj->base.filp)
109 ret = call_mmap(obj->base.filp, vma);
113 vma_set_file(vma, obj->base.filp);
120 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf);
127 err = i915_gem_object_lock(obj, &ww);
129 err = i915_gem_object_pin_pages(obj);
131 err = i915_gem_object_set_to_cpu_domain(obj, write);
132 i915_gem_object_unpin_pages(obj);
145 struct drm_i915_gem_object *obj = dma_buf_to_obj(dma_buf);
151 err = i915_gem_object_lock(obj, &ww);
153 err = i915_gem_object_pin_pages(obj);
155 err = i915_gem_object_set_to_gtt_domain(obj, false);
156 i915_gem_object_unpin_pages(obj);
170 struct drm_i915_gem_object *obj = dma_buf_to_obj(dmabuf);
174 if (!i915_gem_object_can_migrate(obj, INTEL_REGION_SMEM))
178 err = i915_gem_object_lock(obj, &ww);
182 err = i915_gem_object_migrate(obj, &ww, INTEL_REGION_SMEM);
186 err = i915_gem_object_wait_migration(obj, 0);
190 err = i915_gem_object_pin_pages(obj);
199 struct drm_i915_gem_object *obj = dma_buf_to_obj(dmabuf);
201 i915_gem_object_unpin_pages(obj);
219 struct drm_i915_gem_object *obj = to_intel_bo(gem_obj);
226 exp_info.resv = obj->base.resv;
228 if (obj->ops->dmabuf_export) {
229 int ret = obj->ops->dmabuf_export(obj);
237 static int i915_gem_object_get_pages_dmabuf(struct drm_i915_gem_object *obj)
239 struct drm_i915_private *i915 = to_i915(obj->base.dev);
242 assert_object_held(obj);
244 sgt = dma_buf_map_attachment(obj->base.import_attach,
260 if (i915_gem_object_can_bypass_llc(obj) ||
264 __i915_gem_object_set_pages(obj, sgt);
269 static void i915_gem_object_put_pages_dmabuf(struct drm_i915_gem_object *obj,
272 dma_buf_unmap_attachment(obj->base.import_attach, sgt,
287 struct drm_i915_gem_object *obj;
292 obj = dma_buf_to_obj(dma_buf);
294 if (obj->base.dev == dev &&
300 return &i915_gem_object_get(obj)->base;
314 obj = i915_gem_object_alloc();
315 if (!obj) {
320 drm_gem_private_object_init(dev, &obj->base, dma_buf->size);
321 i915_gem_object_init(obj, &i915_gem_object_dmabuf_ops, &lock_class,
323 obj->base.import_attach = attach;
324 obj->base.resv = dma_buf->resv;
333 obj->read_domains = I915_GEM_DOMAIN_GTT;
334 obj->write_domain = 0;
336 return &obj->base;