Lines Matching defs:obj

262 	/** identity of the batch obj/vma */
286 * obj/page
547 i915_gem_object_is_tiled(vma->obj))
635 const struct drm_i915_gem_object *obj)
637 if (!i915_gem_object_has_struct_page(obj))
653 obj->cache_dirty ||
654 !i915_gem_object_has_cache_level(obj, I915_CACHE_NONE));
865 struct drm_i915_gem_object *obj = vma->obj;
867 spin_lock(&obj->lut_lock);
868 if (idr_find(&eb->file->object_idr, handle) == obj) {
869 list_add(&lut->obj_link, &obj->lut_list);
874 spin_unlock(&obj->lut_lock);
895 struct drm_i915_gem_object *obj;
907 obj = i915_gem_object_lookup(eb->file, handle);
908 if (unlikely(!obj))
919 i915_gem_object_is_protected(obj)) {
920 err = intel_pxp_key_check(eb->i915->pxp, obj, true);
922 i915_gem_object_put(obj);
927 vma = i915_vma_instance(obj, vm, NULL);
929 i915_gem_object_put(obj);
937 i915_gem_object_put(obj);
969 if (i915_gem_object_is_userptr(vma->obj)) {
970 err = i915_gem_object_userptr_submit_init(vma->obj);
1006 err = i915_gem_object_lock(vma->obj, &eb->ww);
1051 err = dma_resv_reserve_fences(vma->obj->base.resv, eb->num_batches);
1168 struct drm_i915_gem_object *obj)
1176 struct page *page = i915_gem_object_get_page(obj, cache->page);
1203 struct drm_i915_gem_object *obj =
1209 i915_gem_object_finish_access(obj);
1232 static void *reloc_kmap(struct drm_i915_gem_object *obj,
1245 err = i915_gem_object_prepare_write(obj, &flushes);
1253 cache->node.mm = (void *)obj;
1258 page = i915_gem_object_get_page(obj, pageno);
1259 if (!obj->mm.dirty)
1273 struct drm_i915_gem_object *obj = batch->obj;
1286 if (i915_gem_object_is_tiled(obj))
1289 if (use_cpu_reloc(cache, obj))
1292 err = i915_gem_object_set_to_gtt_domain(obj, true);
1305 vma = i915_gem_object_ggtt_pin_ww(obj, &eb->ww, NULL, 0, 0,
1334 i915_gem_object_get_dma_address(obj, page),
1365 vaddr = reloc_kmap(vma->obj, cache, page);
1478 target->vma->obj->pat_index,
1481 reloc_cache_remap(&eb->reloc_cache, ev->vma->obj);
1770 if (!i915_gem_object_is_userptr(ev->vma->obj))
1773 ret = i915_gem_object_userptr_submit_init(ev->vma->obj);
2106 struct drm_i915_gem_object *obj = vma->obj;
2117 * obj->cache_dirty &&
2118 * !(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_READ)
2133 if (unlikely(obj->cache_dirty & ~obj->cache_coherent)) {
2134 if (i915_gem_clflush_object(obj, 0))
2141 (eb_find_first_request_added(eb), obj,
2165 struct drm_i915_gem_object *obj = ev->vma->obj;
2167 if (!i915_gem_object_is_userptr(obj))
2170 err = i915_gem_object_userptr_submit_done(obj);
2249 struct drm_i915_gem_object *obj,
2256 vma = i915_vma_instance(obj, vm, NULL);
2274 return i915_gem_object_ggtt_pin_ww(vma->obj, &eb->ww, NULL, 0, 0, PIN_VALIDATE);
2323 err = i915_gem_object_lock(pool->obj, &eb->ww);
2327 shadow = shadow_batch_pin(eb, pool->obj, eb->context->vm, PIN_USER);
2332 i915_gem_object_set_readonly(shadow->obj);
2339 shadow = shadow_batch_pin(eb, pool->obj,
2354 err = dma_resv_reserve_fences(shadow->obj->base.resv, 1);