Searched refs:vm_ctx (Results 1 - 10 of 10) sorted by relevance

/linux-master/drivers/gpu/drm/imagination/
H A Dpvr_vm.h32 bool pvr_device_addr_and_size_are_valid(struct pvr_vm_context *vm_ctx,
38 int pvr_vm_map(struct pvr_vm_context *vm_ctx,
41 int pvr_vm_unmap(struct pvr_vm_context *vm_ctx, u64 device_addr, u64 size);
43 dma_addr_t pvr_vm_get_page_table_root_addr(struct pvr_vm_context *vm_ctx);
44 struct dma_resv *pvr_vm_get_dma_resv(struct pvr_vm_context *vm_ctx);
53 struct pvr_gem_object *pvr_vm_find_gem_object(struct pvr_vm_context *vm_ctx,
59 pvr_vm_get_fw_mem_context(struct pvr_vm_context *vm_ctx);
62 struct pvr_vm_context *pvr_vm_context_get(struct pvr_vm_context *vm_ctx);
63 bool pvr_vm_context_put(struct pvr_vm_context *vm_ctx);
H A Dpvr_vm.c73 struct pvr_vm_context *pvr_vm_context_get(struct pvr_vm_context *vm_ctx) argument
75 if (vm_ctx)
76 kref_get(&vm_ctx->ref_count);
78 return vm_ctx;
84 * @vm_ctx: Target VM context.
86 dma_addr_t pvr_vm_get_page_table_root_addr(struct pvr_vm_context *vm_ctx) argument
88 return pvr_mmu_get_root_table_dma_addr(vm_ctx->mmu_ctx);
93 * @vm_ctx: Target VM context.
100 struct dma_resv *pvr_vm_get_dma_resv(struct pvr_vm_context *vm_ctx) argument
102 return vm_ctx
135 struct pvr_vm_context *vm_ctx; member in struct:pvr_vm_bind_op
222 pvr_vm_bind_op_map_init(struct pvr_vm_bind_op *bind_op, struct pvr_vm_context *vm_ctx, struct pvr_gem_object *pvr_obj, u64 offset, u64 device_addr, u64 size) argument
292 pvr_vm_bind_op_unmap_init(struct pvr_vm_bind_op *bind_op, struct pvr_vm_context *vm_ctx, u64 device_addr, u64 size) argument
485 pvr_device_addr_and_size_are_valid(struct pvr_vm_context *vm_ctx, u64 device_addr, u64 size) argument
510 struct pvr_vm_context *vm_ctx = priv; local
536 struct pvr_vm_context *vm_ctx; local
605 struct pvr_vm_context *vm_ctx = local
635 struct pvr_vm_context *vm_ctx; local
656 pvr_vm_context_put(struct pvr_vm_context *vm_ctx) argument
675 struct pvr_vm_context *vm_ctx; local
721 pvr_vm_map(struct pvr_vm_context *vm_ctx, struct pvr_gem_object *pvr_obj, u64 pvr_obj_offset, u64 device_addr, u64 size) argument
773 pvr_vm_unmap(struct pvr_vm_context *vm_ctx, u64 device_addr, u64 size) argument
1048 pvr_vm_find_gem_object(struct pvr_vm_context *vm_ctx, u64 device_addr, u64 *mapped_offset_out, u64 *mapped_size_out) argument
1087 pvr_vm_get_fw_mem_context(struct pvr_vm_context *vm_ctx) argument
[all...]
H A Dpvr_drv.c914 struct pvr_vm_context *vm_ctx; local
926 vm_ctx = pvr_vm_create_context(pvr_file->pvr_dev, true);
927 if (IS_ERR(vm_ctx)) {
928 err = PTR_ERR(vm_ctx);
935 vm_ctx,
946 pvr_vm_context_put(vm_ctx);
973 struct pvr_vm_context *vm_ctx; local
978 vm_ctx = xa_erase(&pvr_file->vm_ctx_handles, args->handle);
979 if (!vm_ctx)
982 pvr_vm_context_put(vm_ctx);
1016 struct pvr_vm_context *vm_ctx; local
1110 struct pvr_vm_context *vm_ctx; local
[all...]
H A Dpvr_context.c317 ctx->vm_ctx = pvr_vm_context_lookup(pvr_file, args->vm_context_handle);
318 if (IS_ERR(ctx->vm_ctx)) {
319 err = PTR_ERR(ctx->vm_ctx);
369 pvr_vm_context_put(ctx->vm_ctx);
387 pvr_vm_context_put(ctx->vm_ctx);
H A Dpvr_free_list.c54 struct pvr_vm_context *vm_ctx; local
71 vm_ctx = pvr_vm_context_lookup(pvr_file, args->vm_context_handle);
72 if (!vm_ctx)
75 free_list_obj = pvr_vm_find_gem_object(vm_ctx, args->free_list_gpu_addr,
98 pvr_vm_context_put(vm_ctx);
106 pvr_vm_context_put(vm_ctx);
H A Dpvr_context.h39 /** @vm_ctx: Pointer to associated VM context. */
40 struct pvr_vm_context *vm_ctx; member in struct:pvr_context
H A Dpvr_queue.c1021 struct pvr_fw_object *fw_mem_ctx_obj = pvr_vm_get_fw_mem_context(ctx->vm_ctx);
/linux-master/include/linux/
H A Duserfaultfd_k.h150 struct vm_userfaultfd_ctx vm_ctx)
152 return vma->vm_userfaultfd_ctx.ctx == vm_ctx.ctx;
284 struct vm_userfaultfd_ctx vm_ctx)
149 is_mergeable_vm_userfaultfd_ctx(struct vm_area_struct *vma, struct vm_userfaultfd_ctx vm_ctx) argument
283 is_mergeable_vm_userfaultfd_ctx(struct vm_area_struct *vma, struct vm_userfaultfd_ctx vm_ctx) argument
/linux-master/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_trace.h331 pid_t pid, uint64_t vm_ctx),
332 TP_ARGS(p, start, end, nptes, dst, incr, flags, pid, vm_ctx),
340 __field(u64, vm_ctx)
353 __entry->vm_ctx = vm_ctx;
362 TP_printk("pid:%u vm_ctx:0x%llx start:0x%010llx end:0x%010llx,"
364 __entry->vm_ctx, __entry->start, __entry->end,
/linux-master/fs/
H A Duserfaultfd.c731 struct vm_userfaultfd_ctx *vm_ctx)
741 vm_ctx->ctx = ctx;
754 void mremap_userfaultfd_complete(struct vm_userfaultfd_ctx *vm_ctx, argument
758 struct userfaultfd_ctx *ctx = vm_ctx->ctx;
730 mremap_userfaultfd_prep(struct vm_area_struct *vma, struct vm_userfaultfd_ctx *vm_ctx) argument

Completed in 146 milliseconds