Searched refs:bo (Results 1 - 25 of 374) sorted by relevance

1234567891011>>

/linux-master/drivers/gpu/drm/xe/display/
H A Dintel_fb_bo.c12 void intel_fb_bo_framebuffer_fini(struct xe_bo *bo) argument
14 if (bo->flags & XE_BO_CREATE_PINNED_BIT) {
16 xe_bo_lock(bo, false);
17 xe_bo_unpin(bo);
18 xe_bo_unlock(bo);
20 xe_bo_put(bo);
24 struct xe_bo *bo,
27 struct drm_i915_private *i915 = to_i915(bo->ttm.base.dev);
30 xe_bo_get(bo);
32 ret = ttm_bo_reserve(&bo
23 intel_fb_bo_framebuffer_init(struct intel_framebuffer *intel_fb, struct xe_bo *bo, struct drm_mode_fb_cmd2 *mode_cmd) argument
62 struct drm_i915_gem_object *bo; local
[all...]
/linux-master/drivers/accel/ivpu/
H A Divpu_gem.c25 static inline void ivpu_dbg_bo(struct ivpu_device *vdev, struct ivpu_bo *bo, const char *action) argument
28 "%6s: bo %8p vpu_addr %9llx size %8zu ctx %d has_pages %d dma_mapped %d mmu_mapped %d wc %d imported %d\n",
29 action, bo, bo->vpu_addr, ivpu_bo_size(bo), bo->ctx ? bo->ctx->id : 0,
30 (bool)bo->base.pages, (bool)bo->base.sgt, bo
41 ivpu_bo_pin(struct ivpu_bo *bo) argument
76 ivpu_bo_alloc_vpu_addr(struct ivpu_bo *bo, struct ivpu_mmu_context *ctx, const struct ivpu_addr_range *range) argument
104 ivpu_bo_unbind_locked(struct ivpu_bo *bo) argument
138 struct ivpu_bo *bo; local
157 struct ivpu_bo *bo; local
178 struct ivpu_bo *bo; local
207 struct ivpu_bo *bo = to_ivpu_bo(obj); local
229 struct ivpu_bo *bo = to_ivpu_bo(obj); local
265 struct ivpu_bo *bo; local
295 struct ivpu_bo *bo; local
341 ivpu_bo_free(struct ivpu_bo *bo) argument
358 struct ivpu_bo *bo; local
404 ivpu_bo_print_info(struct ivpu_bo *bo, struct drm_printer *p) argument
429 struct ivpu_bo *bo; local
[all...]
H A Divpu_gem.h27 int ivpu_bo_pin(struct ivpu_bo *bo);
34 void ivpu_bo_free(struct ivpu_bo *bo);
48 static inline void *ivpu_bo_vaddr(struct ivpu_bo *bo) argument
50 return bo->base.vaddr;
53 static inline size_t ivpu_bo_size(struct ivpu_bo *bo) argument
55 return bo->base.base.size;
58 static inline u32 ivpu_bo_cache_mode(struct ivpu_bo *bo) argument
60 return bo->flags & DRM_IVPU_BO_CACHE_MASK;
63 static inline bool ivpu_bo_is_snooped(struct ivpu_bo *bo) argument
65 return ivpu_bo_cache_mode(bo)
68 ivpu_bo_to_vdev(struct ivpu_bo *bo) argument
73 ivpu_to_cpu_addr(struct ivpu_bo *bo, u32 vpu_addr) argument
84 cpu_to_vpu_addr(struct ivpu_bo *bo, void *cpu_addr) argument
[all...]
/linux-master/drivers/gpu/drm/qxl/
H A Dqxl_object.h30 static inline int qxl_bo_reserve(struct qxl_bo *bo) argument
34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL);
37 struct drm_device *ddev = bo->tbo.base.dev;
39 dev_err(ddev->dev, "%p reserve failed\n", bo);
46 static inline void qxl_bo_unreserve(struct qxl_bo *bo) argument
48 ttm_bo_unreserve(&bo->tbo);
51 static inline unsigned long qxl_bo_size(struct qxl_bo *bo) argument
53 return bo->tbo.base.size;
62 int qxl_bo_vmap(struct qxl_bo *bo, struct iosys_map *map);
63 int qxl_bo_vmap_locked(struct qxl_bo *bo, struc
[all...]
H A Dqxl_object.c32 static int __qxl_bo_pin(struct qxl_bo *bo);
33 static void __qxl_bo_unpin(struct qxl_bo *bo);
37 struct qxl_bo *bo; local
40 bo = to_qxl_bo(tbo);
41 qdev = to_qxl(bo->tbo.base.dev);
43 qxl_surface_evict(qdev, bo, false);
44 WARN_ON_ONCE(bo->map_count > 0);
46 list_del_init(&bo->list);
48 drm_gem_object_release(&bo->tbo.base);
49 kfree(bo);
52 qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) argument
113 struct qxl_bo *bo; local
159 qxl_bo_vmap_locked(struct qxl_bo *bo, struct iosys_map *map) argument
192 qxl_bo_vmap(struct qxl_bo *bo, struct iosys_map *map) argument
205 qxl_bo_kmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, int page_offset) argument
238 qxl_bo_vunmap_locked(struct qxl_bo *bo) argument
252 qxl_bo_vunmap(struct qxl_bo *bo) argument
265 qxl_bo_kunmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, void *pmap) argument
278 qxl_bo_unref(struct qxl_bo **bo) argument
287 qxl_bo_ref(struct qxl_bo *bo) argument
293 __qxl_bo_pin(struct qxl_bo *bo) argument
312 __qxl_bo_unpin(struct qxl_bo *bo) argument
322 qxl_bo_pin(struct qxl_bo *bo) argument
340 qxl_bo_unpin(struct qxl_bo *bo) argument
355 struct qxl_bo *bo, *n; local
382 qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo) argument
[all...]
H A Dqxl_prime.c33 struct qxl_bo *bo = gem_to_qxl_bo(obj); local
35 return qxl_bo_pin(bo);
40 struct qxl_bo *bo = gem_to_qxl_bo(obj); local
42 qxl_bo_unpin(bo);
59 struct qxl_bo *bo = gem_to_qxl_bo(obj); local
62 ret = qxl_bo_vmap_locked(bo, map);
72 struct qxl_bo *bo = gem_to_qxl_bo(obj); local
74 qxl_bo_vunmap_locked(bo);
/linux-master/drivers/gpu/drm/radeon/
H A Dradeon_mn.c53 struct radeon_bo *bo = container_of(mn, struct radeon_bo, notifier); local
57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm))
63 r = radeon_bo_reserve(bo, true);
65 DRM_ERROR("(%ld) failed to reserve user bo\n", r);
69 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP,
72 DRM_ERROR("(%ld) failed to wait for user bo\n", r);
74 radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_CPU);
75 r = ttm_bo_validate(&bo
96 radeon_mn_register(struct radeon_bo *bo, unsigned long addr) argument
122 radeon_mn_unregister(struct radeon_bo *bo) argument
[all...]
H A Dradeon_prime.c39 struct radeon_bo *bo = gem_to_radeon_bo(obj); local
41 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages,
42 bo->tbo.ttm->num_pages);
51 struct radeon_bo *bo; local
56 RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo);
61 bo->tbo.base.funcs = &radeon_gem_object_funcs;
64 list_add_tail(&bo->list, &rdev->gem.objects);
67 bo->prime_shared_count = 1;
68 return &bo->tbo.base;
73 struct radeon_bo *bo local
91 struct radeon_bo *bo = gem_to_radeon_bo(obj); local
108 struct radeon_bo *bo = gem_to_radeon_bo(gobj); local
[all...]
H A Dradeon_object.c45 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
54 struct radeon_bo *bo; local
56 bo = container_of(tbo, struct radeon_bo, tbo);
58 mutex_lock(&bo->rdev->gem.mutex);
59 list_del_init(&bo->list);
60 mutex_unlock(&bo->rdev->gem.mutex);
61 radeon_bo_clear_surface_reg(bo);
62 WARN_ON_ONCE(!list_empty(&bo->va));
63 if (bo->tbo.base.import_attach)
64 drm_prime_gem_destroy(&bo
69 radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo) argument
134 struct radeon_bo *bo; local
217 radeon_bo_kmap(struct radeon_bo *bo, void **ptr) argument
245 radeon_bo_kunmap(struct radeon_bo *bo) argument
254 radeon_bo_ref(struct radeon_bo *bo) argument
263 radeon_bo_unref(struct radeon_bo **bo) argument
274 radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain, u64 max_offset, u64 *gpu_addr) argument
333 radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) argument
338 radeon_bo_unpin(struct radeon_bo *bo) argument
370 struct radeon_bo *bo, *n; local
486 struct radeon_bo *bo = lobj->robj; local
540 radeon_bo_get_surface_reg(struct radeon_bo *bo) argument
594 radeon_bo_clear_surface_reg(struct radeon_bo *bo) argument
609 radeon_bo_set_tiling_flags(struct radeon_bo *bo, uint32_t tiling_flags, uint32_t pitch) argument
669 radeon_bo_get_tiling_flags(struct radeon_bo *bo, uint32_t *tiling_flags, uint32_t *pitch) argument
681 radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved, bool force_drop) argument
710 radeon_bo_move_notify(struct ttm_buffer_object *bo) argument
722 radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo) argument
784 radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, bool shared) argument
[all...]
H A Dradeon_object.h56 * radeon_bo_reserve - reserve bo
57 * @bo: bo structure
64 static inline int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr) argument
68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL);
71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo);
77 static inline void radeon_bo_unreserve(struct radeon_bo *bo) argument
79 ttm_bo_unreserve(&bo->tbo);
83 * radeon_bo_gpu_offset - return GPU offset of bo
91 radeon_bo_gpu_offset(struct radeon_bo *bo) argument
110 radeon_bo_size(struct radeon_bo *bo) argument
115 radeon_bo_ngpu_pages(struct radeon_bo *bo) argument
120 radeon_bo_gpu_page_alignment(struct radeon_bo *bo) argument
131 radeon_bo_mmap_offset(struct radeon_bo *bo) argument
[all...]
/linux-master/drivers/staging/media/atomisp/pci/hmm/
H A Dhmm_bo.c47 static int __bo_init(struct hmm_bo_device *bdev, struct hmm_buffer_object *bo, argument
59 memset(bo, 0, sizeof(*bo));
60 mutex_init(&bo->mutex);
62 /* init the bo->list HEAD as an element of entire_bo_list */
63 INIT_LIST_HEAD(&bo->list);
65 bo->bdev = bdev;
66 bo->vmap_addr = NULL;
67 bo->status = HMM_BO_FREE;
68 bo
132 struct hmm_buffer_object *bo; local
157 struct hmm_buffer_object *bo; local
178 __bo_insert_to_free_rbtree(struct rb_root *root, struct hmm_buffer_object *bo) argument
211 __bo_insert_to_alloc_rbtree(struct rb_root *root, struct hmm_buffer_object *bo) argument
236 __bo_break_up(struct hmm_bo_device *bdev, struct hmm_buffer_object *bo, unsigned int pgnr) argument
268 __bo_take_off_handling(struct hmm_buffer_object *bo) argument
309 __bo_merge(struct hmm_buffer_object *bo, struct hmm_buffer_object *next_bo) argument
336 struct hmm_buffer_object *bo; local
396 struct hmm_buffer_object *bo, *new_bo; local
439 hmm_bo_release(struct hmm_buffer_object *bo) argument
503 struct hmm_buffer_object *bo; local
546 hmm_bo_allocated(struct hmm_buffer_object *bo) argument
556 struct hmm_buffer_object *bo; local
576 struct hmm_buffer_object *bo; local
597 struct hmm_buffer_object *bo; local
626 free_private_bo_pages(struct hmm_buffer_object *bo) argument
633 alloc_private_pages(struct hmm_buffer_object *bo) argument
655 alloc_vmalloc_pages(struct hmm_buffer_object *bo, void *vmalloc_addr) argument
680 hmm_bo_alloc_pages(struct hmm_buffer_object *bo, enum hmm_bo_type type, void *vmalloc_addr) argument
731 hmm_bo_free_pages(struct hmm_buffer_object *bo) argument
760 hmm_bo_page_allocated(struct hmm_buffer_object *bo) argument
770 hmm_bo_bind(struct hmm_buffer_object *bo) argument
848 hmm_bo_unbind(struct hmm_buffer_object *bo) argument
891 hmm_bo_binded(struct hmm_buffer_object *bo) argument
906 hmm_bo_vmap(struct hmm_buffer_object *bo, bool cached) argument
937 hmm_bo_flush_vmap(struct hmm_buffer_object *bo) argument
951 hmm_bo_vunmap(struct hmm_buffer_object *bo) argument
966 hmm_bo_ref(struct hmm_buffer_object *bo) argument
981 hmm_bo_unref(struct hmm_buffer_object *bo) argument
990 struct hmm_buffer_object *bo = local
1008 struct hmm_buffer_object *bo = local
1035 hmm_bo_mmap(struct vm_area_struct *vma, struct hmm_buffer_object *bo) argument
[all...]
H A Dhmm.c52 struct hmm_buffer_object *bo; local
67 list_for_each_entry(bo, bo_list, list) {
68 if ((active && (bo->status & HMM_BO_ALLOCED)) ||
69 (!active && !(bo->status & HMM_BO_ALLOCED))) {
72 hmm_bo_type_string[bo->type], bo->pgnr);
74 total[bo->type] += bo->pgnr;
75 count[bo->type]++;
173 struct hmm_buffer_object *bo; local
232 struct hmm_buffer_object *bo; local
253 hmm_check_bo(struct hmm_buffer_object *bo, unsigned int ptr) argument
281 struct hmm_buffer_object *bo; local
324 struct hmm_buffer_object *bo; local
381 struct hmm_buffer_object *bo; local
464 struct hmm_buffer_object *bo; local
525 struct hmm_buffer_object *bo; local
543 struct hmm_buffer_object *bo; local
559 struct hmm_buffer_object *bo; local
580 struct hmm_buffer_object *bo; local
595 struct hmm_buffer_object *bo; local
[all...]
/linux-master/drivers/gpu/drm/xe/
H A Dxe_bo_evict.c30 struct xe_bo *bo; local
56 bo = list_first_entry_or_null(&xe->pinned.external_vram,
57 typeof(*bo), pinned_link);
58 if (!bo)
60 xe_bo_get(bo);
61 list_move_tail(&bo->pinned_link, &still_in_list);
64 xe_bo_lock(bo, false);
65 ret = xe_bo_evict_pinned(bo);
66 xe_bo_unlock(bo);
67 xe_bo_put(bo);
125 struct xe_bo *bo; local
185 struct xe_bo *bo; local
[all...]
H A Dxe_bo.h96 void xe_bo_free(struct xe_bo *bo);
98 struct xe_bo *___xe_bo_create_locked(struct xe_device *xe, struct xe_bo *bo,
134 int xe_bo_placement_for_flags(struct xe_device *xe, struct xe_bo *bo,
137 static inline struct xe_bo *ttm_to_xe_bo(const struct ttm_buffer_object *bo) argument
139 return container_of(bo, struct xe_bo, ttm);
147 #define xe_bo_device(bo) ttm_to_xe_device((bo)->ttm.bdev)
149 static inline struct xe_bo *xe_bo_get(struct xe_bo *bo) argument
151 if (bo)
152 drm_gem_object_get(&bo
157 xe_bo_put(struct xe_bo *bo) argument
163 __xe_bo_unset_bulk_move(struct xe_bo *bo) argument
169 xe_bo_assert_held(struct xe_bo *bo) argument
179 xe_bo_unlock_vm_held(struct xe_bo *bo) argument
196 xe_bo_is_pinned(struct xe_bo *bo) argument
201 xe_bo_unpin_map_no_vm(struct xe_bo *bo) argument
217 xe_bo_main_addr(struct xe_bo *bo, size_t page_size) argument
223 xe_bo_ggtt_addr(struct xe_bo *bo) argument
262 xe_bo_ccs_pages_start(struct xe_bo *bo) argument
267 xe_bo_has_pages(struct xe_bo *bo) argument
302 xe_bo_put_deferred(struct xe_bo *bo, struct llist_head *deferred) argument
354 xe_bo_is_mem_type(struct xe_bo *bo, u32 mem_type) argument
[all...]
H A Dxe_sa.c19 struct xe_bo *bo = sa_manager->bo; local
21 if (!bo) {
22 drm_err(drm, "no bo for sa manager\n");
28 if (bo->vmap.is_iomem)
31 xe_bo_unpin_map_no_vm(bo);
32 sa_manager->bo = NULL;
39 struct xe_bo *bo; local
48 sa_manager->bo = NULL;
50 bo
[all...]
H A Dxe_bo.c86 bool xe_bo_is_vram(struct xe_bo *bo) argument
88 return resource_is_vram(bo->ttm.resource) ||
89 resource_is_stolen_vram(xe_bo_device(bo), bo->ttm.resource);
92 bool xe_bo_is_stolen(struct xe_bo *bo) argument
94 return bo->ttm.resource->mem_type == XE_PL_STOLEN;
99 * @bo: The BO
106 bool xe_bo_is_stolen_devmem(struct xe_bo *bo) argument
108 return xe_bo_is_stolen(bo) &&
109 GRAPHICS_VERx100(xe_bo_device(bo)) >
112 xe_bo_is_user(struct xe_bo *bo) argument
137 try_add_system(struct xe_device *xe, struct xe_bo *bo, u32 bo_flags, u32 *c) argument
150 add_vram(struct xe_device *xe, struct xe_bo *bo, struct ttm_place *places, u32 bo_flags, u32 mem_type, u32 *c) argument
183 try_add_vram(struct xe_device *xe, struct xe_bo *bo, u32 bo_flags, u32 *c) argument
192 try_add_stolen(struct xe_device *xe, struct xe_bo *bo, u32 bo_flags, u32 *c) argument
208 __xe_bo_placement_for_flags(struct xe_device *xe, struct xe_bo *bo, u32 bo_flags) argument
228 xe_bo_placement_for_flags(struct xe_device *xe, struct xe_bo *bo, u32 bo_flags) argument
304 xe_bo_sg(struct xe_bo *bo) argument
315 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); local
449 xe_bo_trigger_rebind(struct xe_device *xe, struct xe_bo *bo, const struct ttm_operation_ctx *ctx) argument
571 xe_bo_move_notify(struct xe_bo *bo, const struct ttm_operation_ctx *ctx) argument
618 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); local
804 xe_bo_evict_pinned(struct xe_bo *bo) argument
876 xe_bo_restore_pinned(struct xe_bo *bo) argument
924 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); local
968 struct xe_bo *bo; local
1046 struct xe_bo *bo = ttm_to_xe_bo(ttm_bo); local
1096 struct xe_bo *bo = gem_to_xe_bo(obj); local
1112 struct xe_bo *bo = ttm_to_xe_bo(tbo); local
1184 struct xe_bo *bo = kzalloc(sizeof(*bo), GFP_KERNEL); local
1198 xe_bo_free(struct xe_bo *bo) argument
1203 ___xe_bo_create_locked(struct xe_device *xe, struct xe_bo *bo, struct xe_tile *tile, struct dma_resv *resv, struct ttm_lru_bulk_move *bulk, size_t size, u16 cpu_caching, enum ttm_bo_type type, u32 flags) argument
1328 __xe_bo_fixed_placement(struct xe_device *xe, struct xe_bo *bo, u32 flags, u64 start, u64 end, u64 size) argument
1372 struct xe_bo *bo = NULL; local
1457 struct xe_bo *bo = __xe_bo_create_locked(xe, tile, vm, size, 0, ~0ULL, local
1470 struct xe_bo *bo = xe_bo_create_locked(xe, tile, vm, size, type, flags); local
1483 struct xe_bo *bo; local
1528 struct xe_bo *bo = xe_bo_create_pin_map(xe, tile, NULL, local
1547 struct xe_bo *bo; local
1564 struct xe_bo *bo = xe_managed_bo_create_pin_map(xe, tile, ALIGN(size, PAGE_SIZE), flags); local
1589 struct xe_bo *bo; local
1630 xe_bo_pin_external(struct xe_bo *bo) argument
1662 xe_bo_pin(struct xe_bo *bo) argument
1730 xe_bo_unpin_external(struct xe_bo *bo) argument
1753 xe_bo_unpin(struct xe_bo *bo) argument
1792 xe_bo_validate(struct xe_bo *bo, struct xe_vm *vm, bool allow_res_evict) argument
1810 xe_bo_is_xe_bo(struct ttm_buffer_object *bo) argument
1824 __xe_bo_addr(struct xe_bo *bo, u64 offset, size_t page_size) argument
1849 xe_bo_addr(struct xe_bo *bo, u64 offset, size_t page_size) argument
1856 xe_bo_vmap(struct xe_bo *bo) argument
1890 __xe_bo_vunmap(struct xe_bo *bo) argument
1898 xe_bo_vunmap(struct xe_bo *bo) argument
1911 struct xe_bo *bo; local
2051 xe_bo_lock(struct xe_bo *bo, bool intr) argument
2067 xe_bo_unlock(struct xe_bo *bo) argument
2087 xe_bo_can_migrate(struct xe_bo *bo, u32 mem_type) argument
2127 xe_bo_migrate(struct xe_bo *bo, u32 mem_type) argument
2177 xe_bo_evict(struct xe_bo *bo, bool force_alloc) argument
2205 xe_bo_needs_ccs_pages(struct xe_bo *bo) argument
2244 struct xe_bo *bo, *next; local
2272 struct xe_bo *bo; local
2300 xe_bo_runtime_pm_release_mmap_offset(struct xe_bo *bo) argument
[all...]
/linux-master/drivers/gpu/drm/ttm/
H A Dttm_bo.c49 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, argument
60 man = ttm_manager_type(bo->bdev, mem_type);
68 * @bo: The buffer object.
74 void ttm_bo_move_to_lru_tail(struct ttm_buffer_object *bo) argument
76 dma_resv_assert_held(bo->base.resv);
78 if (bo->resource)
79 ttm_resource_move_to_lru_tail(bo->resource);
86 * @bo: The buffer object.
97 void ttm_bo_set_bulk_move(struct ttm_buffer_object *bo, argument
100 dma_resv_assert_held(bo
115 ttm_bo_handle_move_mem(struct ttm_buffer_object *bo, struct ttm_resource *mem, bool evict, struct ttm_operation_ctx *ctx, struct ttm_place *hop) argument
177 ttm_bo_cleanup_memtype_use(struct ttm_buffer_object *bo) argument
186 ttm_bo_individualize_resv(struct ttm_buffer_object *bo) argument
213 ttm_bo_flush_all_fences(struct ttm_buffer_object *bo) argument
241 ttm_bo_cleanup_refs(struct ttm_buffer_object *bo, bool interruptible, bool no_wait_gpu, bool unlock_resv) argument
307 struct ttm_buffer_object *bo; local
321 struct ttm_buffer_object *bo = local
398 ttm_bo_put(struct ttm_buffer_object *bo) argument
404 ttm_bo_bounce_temp_buffer(struct ttm_buffer_object *bo, struct ttm_resource **mem, struct ttm_operation_ctx *ctx, struct ttm_place *hop) argument
429 ttm_bo_evict(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx) argument
492 ttm_bo_eviction_valuable(struct ttm_buffer_object *bo, const struct ttm_place *place) argument
519 ttm_bo_evict_swapout_allowable(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx, const struct ttm_place *place, bool *locked, bool *busy) argument
600 struct ttm_buffer_object *bo = NULL, *busy_bo = NULL; local
662 ttm_bo_pin(struct ttm_buffer_object *bo) argument
680 ttm_bo_unpin(struct ttm_buffer_object *bo) argument
699 ttm_bo_add_move_fence(struct ttm_buffer_object *bo, struct ttm_resource_manager *man, struct ttm_resource *mem, bool no_wait_gpu) argument
731 ttm_bo_mem_force_space(struct ttm_buffer_object *bo, const struct ttm_place *place, struct ttm_resource **mem, struct ttm_operation_ctx *ctx) argument
776 ttm_bo_mem_space(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_resource **mem, struct ttm_operation_ctx *ctx) argument
849 ttm_bo_move_buffer(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_operation_ctx *ctx) argument
901 ttm_bo_validate(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_operation_ctx *ctx) argument
975 ttm_bo_init_reserved(struct ttm_device *bdev, struct ttm_buffer_object *bo, enum ttm_bo_type type, struct ttm_placement *placement, uint32_t alignment, struct ttm_operation_ctx *ctx, struct sg_table *sg, struct dma_resv *resv, void (*destroy) (struct ttm_buffer_object *)) argument
1068 ttm_bo_init_validate(struct ttm_device *bdev, struct ttm_buffer_object *bo, enum ttm_bo_type type, struct ttm_placement *placement, uint32_t alignment, bool interruptible, struct sg_table *sg, struct dma_resv *resv, void (*destroy) (struct ttm_buffer_object *)) argument
1098 ttm_bo_unmap_virtual(struct ttm_buffer_object *bo) argument
1117 ttm_bo_wait_ctx(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx) argument
1139 ttm_bo_swapout(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx, gfp_t gfp_flags) argument
1226 ttm_bo_tt_destroy(struct ttm_buffer_object *bo) argument
[all...]
H A Dttm_bo_vm.c41 static vm_fault_t ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo, argument
49 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_KERNEL))
61 ttm_bo_get(bo);
63 (void)dma_resv_wait_timeout(bo->base.resv,
66 dma_resv_unlock(bo->base.resv);
67 ttm_bo_put(bo);
74 err = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_KERNEL, true,
84 static unsigned long ttm_bo_io_mem_pfn(struct ttm_buffer_object *bo, argument
87 struct ttm_device *bdev = bo->bdev;
90 return bdev->funcs->io_mem_pfn(bo, page_offse
116 ttm_bo_vm_reserve(struct ttm_buffer_object *bo, struct vm_fault *vmf) argument
186 struct ttm_buffer_object *bo = vma->vm_private_data; local
294 struct ttm_buffer_object *bo = vma->vm_private_data; local
325 struct ttm_buffer_object *bo = vma->vm_private_data; local
352 struct ttm_buffer_object *bo = vma->vm_private_data; local
362 struct ttm_buffer_object *bo = vma->vm_private_data; local
369 ttm_bo_vm_access_kmap(struct ttm_buffer_object *bo, unsigned long offset, uint8_t *buf, int len, int write) argument
411 struct ttm_buffer_object *bo = vma->vm_private_data; local
459 ttm_bo_mmap_obj(struct vm_area_struct *vma, struct ttm_buffer_object *bo) argument
[all...]
H A Dttm_bo_util.c42 struct ttm_buffer_object *bo; member in struct:ttm_transfer_obj
129 * @bo: A pointer to a struct ttm_buffer_object.
136 * and update the (@bo)->mem placement flags. If unsuccessful, the old
142 int ttm_bo_move_memcpy(struct ttm_buffer_object *bo, argument
146 struct ttm_device *bdev = bo->bdev;
148 ttm_manager_type(bo->bdev, dst_mem->mem_type);
149 struct ttm_tt *ttm = bo->ttm;
150 struct ttm_resource *src_mem = bo->resource;
173 dst_iter = ttm_kmap_iter_tt_init(&_dst_iter.tt, bo->ttm);
179 src_iter = ttm_kmap_iter_tt_init(&_src_iter.tt, bo
201 ttm_transfered_destroy(struct ttm_buffer_object *bo) argument
226 ttm_buffer_object_transfer(struct ttm_buffer_object *bo, struct ttm_buffer_object **new_obj) argument
290 ttm_io_prot(struct ttm_buffer_object *bo, struct ttm_resource *res, pgprot_t tmp) argument
309 ttm_bo_ioremap(struct ttm_buffer_object *bo, unsigned long offset, unsigned long size, struct ttm_bo_kmap_obj *map) argument
335 ttm_bo_kmap_ttm(struct ttm_buffer_object *bo, unsigned long start_page, unsigned long num_pages, struct ttm_bo_kmap_obj *map) argument
396 ttm_bo_kmap(struct ttm_buffer_object *bo, unsigned long start_page, unsigned long num_pages, struct ttm_bo_kmap_obj *map) argument
469 ttm_bo_vmap(struct ttm_buffer_object *bo, struct iosys_map *map) argument
538 ttm_bo_vunmap(struct ttm_buffer_object *bo, struct iosys_map *map) argument
557 ttm_bo_wait_free_node(struct ttm_buffer_object *bo, bool dst_use_tt) argument
575 ttm_bo_move_to_ghost(struct ttm_buffer_object *bo, struct dma_fence *fence, bool dst_use_tt) argument
613 ttm_bo_move_pipeline_evict(struct ttm_buffer_object *bo, struct dma_fence *fence) argument
651 ttm_bo_move_accel_cleanup(struct ttm_buffer_object *bo, struct dma_fence *fence, bool evict, bool pipeline, struct ttm_resource *new_mem) argument
688 ttm_bo_move_sync_cleanup(struct ttm_buffer_object *bo, struct ttm_resource *new_mem) argument
714 ttm_bo_pipeline_gutting(struct ttm_buffer_object *bo) argument
[all...]
/linux-master/drivers/gpu/drm/xe/compat-i915-headers/
H A Di915_gem_stolen.h10 struct xe_bo *bo; member in struct:i915_stolen_fb
18 struct xe_bo *bo; local
25 bo = xe_bo_create_locked_range(xe, xe_device_get_root_tile(xe),
28 if (IS_ERR(bo)) {
29 err = PTR_ERR(bo);
30 bo = NULL;
33 err = xe_bo_pin(bo);
34 xe_bo_unlock_vm_held(bo);
37 xe_bo_put(fb->bo);
38 bo
[all...]
/linux-master/include/drm/ttm/
H A Dttm_bo.h78 * @type: The bo type.
115 * Members protected by the bo::resv::reserved lock.
132 * and the bo::lock when written to. Can be read with
145 * Object describing a kernel mapping. Since a TTM bo may be located
160 struct ttm_buffer_object *bo; member in struct:ttm_bo_kmap_obj
191 * @bo: The buffer object.
193 static inline void ttm_bo_get(struct ttm_buffer_object *bo) argument
195 kref_get(&bo->kref);
201 * @bo: The buffer object.
206 * Returns: @bo i
209 ttm_bo_get_unless_zero(struct ttm_buffer_object *bo) argument
238 ttm_bo_reserve(struct ttm_buffer_object *bo, bool interruptible, bool no_wait, struct ww_acquire_ctx *ticket) argument
273 ttm_bo_reserve_slowpath(struct ttm_buffer_object *bo, bool interruptible, struct ww_acquire_ctx *ticket) argument
291 ttm_bo_move_to_lru_tail_unlocked(struct ttm_buffer_object *bo) argument
298 ttm_bo_assign_mem(struct ttm_buffer_object *bo, struct ttm_resource *new_mem) argument
312 ttm_bo_move_null(struct ttm_buffer_object *bo, struct ttm_resource *new_mem) argument
326 ttm_bo_unreserve(struct ttm_buffer_object *bo) argument
[all...]
/linux-master/drivers/gpu/drm/xe/compat-i915-headers/gem/
H A Di915_gem_mman.h12 static inline int i915_gem_fb_mmap(struct xe_bo *bo, struct vm_area_struct *vma) argument
14 return drm_gem_prime_mmap(&bo->ttm.base, vma);
H A Di915_gem_object.h15 static inline dma_addr_t i915_gem_object_get_dma_address(const struct xe_bo *bo, pgoff_t n) argument
22 static inline bool i915_gem_object_is_tiled(const struct xe_bo *bo) argument
28 static inline bool i915_gem_object_is_userptr(const struct xe_bo *bo) argument
34 static inline int i915_gem_object_read_from_page(struct xe_bo *bo, argument
42 ret = xe_bo_lock(bo, true);
46 ret = ttm_bo_kmap(&bo->ttm, ofs >> PAGE_SHIFT, 1, &map);
60 xe_bo_unlock(bo);
/linux-master/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_object.c58 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); local
60 amdgpu_bo_kunmap(bo);
62 if (bo->tbo.base.import_attach)
63 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg);
64 drm_gem_object_release(&bo->tbo.base);
65 amdgpu_bo_unref(&bo->parent);
66 kvfree(bo);
71 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); local
74 ubo = to_amdgpu_bo_user(bo);
82 struct amdgpu_bo *shadow_bo = ttm_to_amdgpu_bo(tbo), *bo; local
107 amdgpu_bo_is_amdgpu_bo(struct ttm_buffer_object *bo) argument
435 amdgpu_bo_free_kernel(struct amdgpu_bo **bo, u64 *gpu_addr, void **cpu_addr) argument
547 struct amdgpu_bo *bo; local
774 amdgpu_bo_kmap(struct amdgpu_bo *bo, void **ptr) argument
813 amdgpu_bo_kptr(struct amdgpu_bo *bo) argument
826 amdgpu_bo_kunmap(struct amdgpu_bo *bo) argument
841 amdgpu_bo_ref(struct amdgpu_bo *bo) argument
856 amdgpu_bo_unref(struct amdgpu_bo **bo) argument
890 amdgpu_bo_pin_restricted(struct amdgpu_bo *bo, u32 domain, u64 min_offset, u64 max_offset) argument
997 amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain) argument
1013 amdgpu_bo_unpin(struct amdgpu_bo *bo) argument
1117 amdgpu_bo_set_tiling_flags(struct amdgpu_bo *bo, u64 tiling_flags) argument
1140 amdgpu_bo_get_tiling_flags(struct amdgpu_bo *bo, u64 *tiling_flags) argument
1165 amdgpu_bo_set_metadata(struct amdgpu_bo *bo, void *metadata, u32 metadata_size, uint64_t flags) argument
1212 amdgpu_bo_get_metadata(struct amdgpu_bo *bo, void *buffer, size_t buffer_size, uint32_t *metadata_size, uint64_t *flags) argument
1249 amdgpu_bo_move_notify(struct ttm_buffer_object *bo, bool evict) argument
1271 amdgpu_bo_get_memory(struct amdgpu_bo *bo, struct amdgpu_mem_stats *stats) argument
1332 amdgpu_bo_release_notify(struct ttm_buffer_object *bo) argument
1383 amdgpu_bo_fault_reserve_notify(struct ttm_buffer_object *bo) argument
1431 amdgpu_bo_fence(struct amdgpu_bo *bo, struct dma_fence *fence, bool shared) argument
1486 amdgpu_bo_sync_wait(struct amdgpu_bo *bo, void *owner, bool intr) argument
1504 amdgpu_bo_gpu_offset(struct amdgpu_bo *bo) argument
1523 amdgpu_bo_gpu_offset_no_check(struct amdgpu_bo *bo) argument
1578 amdgpu_bo_print_info(int id, struct amdgpu_bo *bo, struct seq_file *m) argument
[all...]
/linux-master/drivers/gpu/drm/vc4/
H A Dvc4_bo.c134 struct vc4_bo *bo = to_vc4_bo(gem_obj); local
144 vc4->bo_labels[bo->label].num_allocated--;
145 vc4->bo_labels[bo->label].size_allocated -= gem_obj->size;
147 if (vc4->bo_labels[bo->label].num_allocated == 0 &&
148 is_user_label(bo->label)) {
154 kfree(vc4->bo_labels[bo->label].name);
155 vc4->bo_labels[bo->label].name = NULL;
158 bo->label = label;
166 static void vc4_bo_destroy(struct vc4_bo *bo) argument
168 struct drm_gem_object *obj = &bo
186 vc4_bo_remove_from_cache(struct vc4_bo *bo) argument
242 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, local
250 vc4_bo_add_to_purgeable_pool(struct vc4_bo *bo) argument
264 vc4_bo_remove_from_purgeable_pool_locked(struct vc4_bo *bo) argument
288 vc4_bo_remove_from_purgeable_pool(struct vc4_bo *bo) argument
299 struct vc4_bo *bo = to_vc4_bo(obj); local
318 struct vc4_bo *bo = list_first_entry(&vc4->purgeable.list, local
365 struct vc4_bo *bo = NULL; local
397 struct vc4_bo *bo; local
428 struct vc4_bo *bo; local
496 struct vc4_bo *bo = NULL; local
526 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, local
547 struct vc4_bo *bo = to_vc4_bo(gem_bo); local
620 vc4_bo_inc_usecnt(struct vc4_bo *bo) argument
660 vc4_bo_dec_usecnt(struct vc4_bo *bo) argument
689 struct vc4_bo *bo = to_vc4_bo(obj); local
720 struct vc4_bo *bo = to_vc4_bo(obj); local
734 struct vc4_bo *bo = to_vc4_bo(obj); local
783 struct vc4_bo *bo = NULL; local
839 struct vc4_bo *bo = NULL; local
918 struct vc4_bo *bo; local
965 struct vc4_bo *bo; local
[all...]

Completed in 251 milliseconds

1234567891011>>