Lines Matching defs:obj

70 eb_add_object(struct eb_objects *eb, struct drm_i915_gem_object *obj)
72 hlist_add_head(&obj->exec_node,
73 &eb->buckets[obj->exec_handle & eb->and]);
81 struct drm_i915_gem_object *obj;
85 obj = hlist_entry(node, struct drm_i915_gem_object, exec_node);
86 if (obj->exec_handle == handle)
87 return obj;
99 static inline int use_cpu_reloc(struct drm_i915_gem_object *obj)
101 return (obj->base.write_domain == I915_GEM_DOMAIN_CPU ||
102 !obj->map_and_fenceable ||
103 obj->cache_level != I915_CACHE_NONE);
107 i915_gem_execbuffer_relocate_entry(struct drm_i915_gem_object *obj,
111 struct drm_device *dev = obj->base.dev;
138 "obj %p target %d offset %d "
140 obj, reloc->target_handle,
149 "obj %p target %d offset %d "
151 obj, reloc->target_handle,
160 "obj %p target %d offset %d "
162 obj, reloc->target_handle,
179 if (unlikely(reloc->offset > obj->base.size - 4)) {
181 "obj %p target %d offset %d size %d.\n",
182 obj, reloc->target_handle,
184 (int) obj->base.size);
189 "obj %p target %d offset %d.\n",
190 obj, reloc->target_handle,
196 if (obj->active && (curthread->td_pflags & TDP_NOFAULTING) != 0)
200 if (use_cpu_reloc(obj)) {
205 ret = i915_gem_object_set_to_cpu_domain(obj, 1);
209 sf = sf_buf_alloc(obj->pages[OFF_TO_IDX(reloc->offset)],
221 ret = i915_gem_object_set_to_gtt_domain(obj, true);
225 ret = i915_gem_object_put_fence(obj);
230 reloc->offset += obj->gtt_offset;
246 i915_gem_execbuffer_relocate_object(struct drm_i915_gem_object *obj,
252 struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
271 ret = i915_gem_execbuffer_relocate_entry(obj, eb, r);
292 i915_gem_execbuffer_relocate_object_slow(struct drm_i915_gem_object *obj,
296 const struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
300 ret = i915_gem_execbuffer_relocate_entry(obj, eb, &relocs[i]);
313 struct drm_i915_gem_object *obj;
324 list_for_each_entry(obj, objects, exec_list) {
325 ret = i915_gem_execbuffer_relocate_object(obj, eb);
338 need_reloc_mappable(struct drm_i915_gem_object *obj)
340 struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
341 return entry->relocation_count && !use_cpu_reloc(obj);
345 i915_gem_execbuffer_reserve_object(struct drm_i915_gem_object *obj,
348 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
349 struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
357 obj->tiling_mode != I915_TILING_NONE;
358 need_mappable = need_fence || need_reloc_mappable(obj);
360 ret = i915_gem_object_pin(obj, entry->alignment, need_mappable, false);
368 ret = i915_gem_object_get_fence(obj);
372 if (i915_gem_object_pin_fence(obj))
375 obj->pending_fenced_gpu_access = true;
380 if (dev_priv->mm.aliasing_ppgtt && !obj->has_aliasing_ppgtt_mapping) {
382 obj, obj->cache_level);
384 obj->has_aliasing_ppgtt_mapping = 1;
387 entry->offset = obj->gtt_offset;
392 i915_gem_execbuffer_unreserve_object(struct drm_i915_gem_object *obj)
396 if (!obj->gtt_space)
399 entry = obj->exec_entry;
402 i915_gem_object_unpin_fence(obj);
405 i915_gem_object_unpin(obj);
415 struct drm_i915_gem_object *obj;
425 obj = list_first_entry(objects,
428 entry = obj->exec_entry;
433 obj->tiling_mode != I915_TILING_NONE;
434 need_mappable = need_fence || need_reloc_mappable(obj);
437 list_move(&obj->exec_list, &ordered_objects);
439 list_move_tail(&obj->exec_list, &ordered_objects);
441 obj->base.pending_read_domains = 0;
442 obj->base.pending_write_domain = 0;
443 obj->pending_fenced_gpu_access = false;
464 list_for_each_entry(obj, objects, exec_list) {
465 struct drm_i915_gem_exec_object2 *entry = obj->exec_entry;
468 if (!obj->gtt_space)
474 obj->tiling_mode != I915_TILING_NONE;
475 need_mappable = need_fence || need_reloc_mappable(obj);
477 if ((entry->alignment && obj->gtt_offset & (entry->alignment - 1)) ||
478 (need_mappable && !obj->map_and_fenceable))
479 ret = i915_gem_object_unbind(obj);
481 ret = i915_gem_execbuffer_reserve_object(obj, ring);
487 list_for_each_entry(obj, objects, exec_list) {
488 if (obj->gtt_space)
491 ret = i915_gem_execbuffer_reserve_object(obj, ring);
497 list_for_each_entry(obj, objects, exec_list)
498 i915_gem_execbuffer_unreserve_object(obj);
519 struct drm_i915_gem_object *obj;
525 obj = list_first_entry(objects,
528 list_del_init(&obj->exec_list);
529 drm_gem_object_unreference(&obj->base);
594 obj = to_intel_bo(drm_gem_object_lookup(dev, file,
596 if (&obj->base == NULL) {
603 list_add_tail(&obj->exec_list, objects);
604 obj->exec_handle = exec[i].handle;
605 obj->exec_entry = &exec[i];
606 eb_add_object(eb, obj);
613 list_for_each_entry(obj, objects, exec_list) {
614 int offset = obj->exec_entry - exec;
615 ret = i915_gem_execbuffer_relocate_object_slow(obj, eb,
669 struct drm_i915_gem_object *obj;
674 list_for_each_entry(obj, objects, exec_list) {
675 ret = i915_gem_object_sync(obj, ring);
679 if (obj->base.write_domain & I915_GEM_DOMAIN_CPU)
680 i915_gem_clflush_object(obj);
682 if (obj->base.pending_write_domain)
683 flips |= atomic_read(&obj->pending_flip);
685 flush_domains |= obj->base.write_domain;
773 struct drm_i915_gem_object *obj;
775 list_for_each_entry(obj, objects, exec_list) {
777 u32 old_read = obj->base.read_domains;
778 u32 old_write = obj->base.write_domain;
781 obj->base.read_domains = obj->base.pending_read_domains;
782 obj->base.write_domain = obj->base.pending_write_domain;
783 obj->fenced_gpu_access = obj->pending_fenced_gpu_access;
785 i915_gem_object_move_to_active(obj, ring);
786 if (obj->base.write_domain) {
787 obj->dirty = 1;
788 obj->last_write_seqno = intel_ring_get_seqno(ring);
789 if (obj->pin_count) /* check for potential scanout */
790 intel_mark_fb_busy(obj);
794 obj, old_read, old_write);
1005 struct drm_i915_gem_object *obj;
1007 obj = to_intel_bo(drm_gem_object_lookup(dev, file,
1009 if (&obj->base == NULL) {
1017 if (!list_empty(&obj->exec_list)) {
1019 obj, exec[i].handle, i);
1024 list_add_tail(&obj->exec_list, &objects);
1025 obj->exec_handle = exec[i].handle;
1026 obj->exec_entry = &exec[i];
1027 eb_add_object(eb, obj);
1130 struct drm_i915_gem_object *obj;
1132 obj = list_first_entry(&objects,
1135 list_del_init(&obj->exec_list);
1136 drm_gem_object_unreference(&obj->base);