Searched refs:vm (Results 226 - 250 of 485) sorted by relevance

1234567891011>>

/linux-master/drivers/gpu/drm/i915/
H A Di915_vma.h45 struct i915_address_space *vm,
80 return i915_is_dpt(vma->vm);
185 return i915_vm_to_ggtt(vma->vm)->pin_bias;
209 struct i915_address_space *vm,
214 GEM_BUG_ON(view && !i915_is_ggtt_or_dpt(vm));
216 cmp = ptrdiff(vma->vm, vm);
263 void vma_invalidate_tlb(struct i915_address_space *vm, u32 *tlb);
208 i915_vma_compare(struct i915_vma *vma, struct i915_address_space *vm, const struct i915_gtt_view *view) argument
/linux-master/drivers/gpu/drm/i915/gt/
H A Dintel_ring.c57 if (i915_vma_is_map_and_fenceable(vma) && !HAS_LLC(vma->vm->i915)) {
60 int type = intel_gt_coherent_map_type(vma->vm->gt, vma->obj, false);
102 if (i915_vma_is_map_and_fenceable(vma) && !HAS_LLC(vma->vm->i915))
113 struct i915_address_space *vm = &ggtt->vm; local
114 struct drm_i915_private *i915 = vm->i915;
131 if (vm->has_read_only)
134 vma = i915_vma_instance(obj, vm, NULL);
H A Dselftest_reset.c87 ggtt->vm.insert_page(&ggtt->vm, dma,
111 ggtt->vm.clear_range(&ggtt->vm, ggtt->error_capture.start, PAGE_SIZE);
130 ggtt->vm.insert_page(&ggtt->vm, dma,
160 ggtt->vm.clear_range(&ggtt->vm, ggtt->error_capture.start, PAGE_SIZE);
H A Dintel_ggtt_fencing.c51 return fence->ggtt->vm.i915;
56 return fence->ggtt->vm.gt->uncore;
296 lockdep_assert_held(&vma->vm->mutex);
355 if (intel_has_pending_fb_unpin(ggtt->vm.i915))
363 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm);
368 lockdep_assert_held(&vma->vm->mutex);
434 assert_rpm_wakelock_held(vma->vm->gt->uncore->rpm);
437 err = mutex_lock_interruptible(&vma->vm->mutex);
442 mutex_unlock(&vma->vm->mutex);
460 lockdep_assert_held(&ggtt->vm
[all...]
/linux-master/drivers/gpu/drm/i915/gem/
H A Di915_gem_context_types.h194 /** @vm: See &i915_gem_context.vm */
195 struct i915_address_space *vm; member in struct:i915_gem_proto_context
276 * @vm: unique address space (GTT)
284 struct i915_address_space *vm; member in struct:i915_gem_context
397 * per vm, which may be one per context or shared with the global GTT)
/linux-master/tools/testing/selftests/kvm/
H A Dget-reg-list.c128 static struct kvm_vcpu *vcpu_config_get_vcpu(struct vcpu_reg_list *c, struct kvm_vm *vm) argument
134 vcpu = __vm_vcpu_add(vm, 0);
140 static struct kvm_vcpu *vcpu_config_get_vcpu(struct vcpu_reg_list *c, struct kvm_vm *vm) argument
142 return __vm_vcpu_add(vm, 0);
169 struct kvm_vm *vm; local
174 vm = vm_create_barebones();
175 vcpu = vcpu_config_get_vcpu(c, vm);
307 kvm_vm_free(vm);
H A Dmemslot_perf_test.c84 struct kvm_vm *vm; member in struct:vm_data
190 uint32_t guest_page_size = data->vm->page_size;
221 uint32_t guest_page_size = data->vm->page_size;
235 data->vm = NULL;
260 uint32_t guest_page_size = data->vm->page_size;
294 data->vm = __vm_create_with_one_vcpu(&data->vcpu, mempages, guest_code);
295 TEST_ASSERT(data->vm->page_size == guest_page_size, "Invalid VM page size");
322 vm_userspace_mem_region_add(data->vm, VM_MEM_SRC_ANONYMOUS,
337 gpa = vm_phy_pages_alloc(data->vm, npages, guest_addr, slot);
341 data->hva_slots[slot - 1] = addr_gpa2hva(data->vm, guest_add
[all...]
/linux-master/drivers/gpu/drm/lima/
H A Dlima_sched.h23 struct lima_vm *vm; member in struct:lima_sched_task
90 struct lima_vm *vm);
/linux-master/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_ib.c55 * @vm: amdgpu_vm pointer
64 int amdgpu_ib_get(struct amdgpu_device *adev, struct amdgpu_vm *vm, argument
82 if (!vm)
134 struct amdgpu_vm *vm; local
152 vm = job->vm;
160 vm = NULL;
173 if (vm && !job->vmid && !ring->is_mes_queue) {
302 if (vm && ring->funcs->emit_switch_buffer)
H A Damdgpu_vm_sdma.c56 struct drm_sched_entity *entity = p->immediate ? &p->vm->immediate
57 : &p->vm->delayed;
101 r = amdgpu_sync_resv(p->adev, &sync, resv, sync_mode, p->vm);
124 ring = container_of(p->vm->delayed.rq->sched, struct amdgpu_ring,
135 swap(p->vm->last_unlocked, tmp);
138 dma_resv_add_fence(p->vm->root.bo->tbo.base.resv, f,
H A Damdgpu_amdkfd_gpuvm.c84 if (entry->bo_va->base.vm == avm)
375 struct amdgpu_vm *vm; local
388 vm = vm_bo->vm;
389 if (!vm)
392 info = vm->process_info;
467 static int vm_validate_pt_pd_bos(struct amdgpu_vm *vm, argument
470 struct amdgpu_bo *pd = vm->root.bo;
474 ret = amdgpu_vm_validate(adev, vm, ticket,
481 vm
486 vm_update_pds(struct amdgpu_vm *vm, struct amdgpu_sync *sync) argument
867 kfd_mem_attach(struct amdgpu_device *adev, struct kgd_mem *mem, struct amdgpu_vm *vm, bool is_aql) argument
1141 reserve_bo_and_vm(struct kgd_mem *mem, struct amdgpu_vm *vm, struct bo_vm_reservation_context *ctx) argument
1182 reserve_bo_and_cond_vms(struct kgd_mem *mem, struct amdgpu_vm *vm, enum bo_vm_match map_type, struct bo_vm_reservation_context *ctx) argument
1250 struct amdgpu_vm *vm = bo_va->base.vm; local
1367 init_kfd_vm(struct amdgpu_vm *vm, void **process_info, struct dma_fence **ef) argument
1550 amdgpu_amdkfd_gpuvm_destroy_cb(struct amdgpu_device *adev, struct amdgpu_vm *vm) argument
2095 struct amdgpu_vm *vm; local
[all...]
H A Damdgpu_job.c97 int amdgpu_job_alloc(struct amdgpu_device *adev, struct amdgpu_vm *vm, argument
113 (*job)->vm = vm;
116 (*job)->generation = amdgpu_vm_generation(adev, vm);
273 while (!fence && job->vm && !job->vmid) {
274 r = amdgpu_vmid_grab(job->vm, ring, job, &fence);
302 if (job->generation != amdgpu_vm_generation(adev, job->vm) ||
/linux-master/tools/testing/selftests/kvm/s390x/
H A Dmemop.c114 struct kvm_vm *vm; member in struct:test_info
125 printf("vm memop(");
169 return __vm_ioctl(info.vm, KVM_S390_MEM_OP, ksmo);
203 __desc.gaddr = addr_gva2gpa(__info.vm, __desc.gaddr_v); \
239 struct test_info vm; member in struct:test_default
252 t.vm = (struct test_info) { t.kvm_vm, NULL };
334 MOP(test->vm, ABSOLUTE, CMPXCHG, new + offset,
339 MOP(test->vm, ABSOLUTE, READ, mem2, 16, GADDR_V(mem2));
347 MOP(test->vm, ABSOLUTE, CMPXCHG, new + offset,
352 MOP(test->vm, ABSOLUT
[all...]
/linux-master/drivers/gpu/drm/xe/
H A Dxe_preempt_fence.c28 xe_vm_queue_rebind_worker(q->vm);
52 queue_work(q->vm->xe->preempt_fence_wq, &pfence->preempt_work);
H A Dxe_sync.c281 * @vm: VM
292 struct xe_exec_queue *q, struct xe_vm *vm)
299 lockdep_assert_held(&vm->lock);
311 fence = xe_exec_queue_last_fence_get(q, vm);
325 fences[current_fence++] = xe_exec_queue_last_fence_get(q, vm);
327 vm->composite_fence_ctx,
328 vm->composite_fence_seqno++,
331 --vm->composite_fence_seqno;
291 xe_sync_in_fence_get(struct xe_sync_entry *sync, int num_sync, struct xe_exec_queue *q, struct xe_vm *vm) argument
/linux-master/tools/testing/selftests/kvm/x86_64/
H A Dnested_exceptions_test.c222 struct kvm_vm *vm; local
227 vm = vm_create_with_one_vcpu(&vcpu, l1_guest_code);
228 vm_enable_cap(vm, KVM_CAP_EXCEPTION_PAYLOAD, -2ul);
231 vcpu_alloc_svm(vm, &nested_test_data_gva);
233 vcpu_alloc_vmx(vm, &nested_test_data_gva);
289 kvm_vm_free(vm);
/linux-master/drivers/gpu/drm/sprd/
H A Dsprd_dsi.c450 struct videomode *vm = &ctx->vm; local
470 video_size = round_video_size(coding, vm->hactive);
473 ratio_x1000 = byte_clk * 1000 / (vm->pixelclock / 1000);
474 hline = vm->hactive + vm->hsync_len + vm->hfront_porch +
475 vm->hback_porch;
485 byte_cycle = vm->hsync_len * ratio_x1000 / 1000;
487 byte_cycle = vm
[all...]
/linux-master/include/linux/
H A Dvmalloc.h79 struct vm_struct *vm; /* in "busy" tree */ member in union:vmap_area::__anon1262
243 struct vm_struct *vm = find_vm_area(addr); local
245 if (vm)
246 vm->flags |= VM_FLUSH_RESET_PERMS;
261 extern __init void vm_area_add_early(struct vm_struct *vm);
262 extern __init void vm_area_register_early(struct vm_struct *vm, size_t align);
/linux-master/mm/kasan/
H A Dshadow.c255 struct vm_struct *vm; local
269 vm = find_vm_area((void *)shadow_start);
270 if (vm)
608 struct vm_struct *vm = find_vm_area(addr); local
610 vm->flags |= VM_KASAN;
613 if (vm->flags & VM_DEFER_KMEMLEAK)
614 kmemleak_vmalloc(vm, size, gfp_mask);
622 void kasan_free_module_shadow(const struct vm_struct *vm) argument
627 if (vm->flags & VM_KASAN)
628 vfree(kasan_mem_to_shadow(vm
[all...]
/linux-master/drivers/gpu/drm/
H A Ddrm_gpuvm.c629 * drm_gpuva_map(ctx->vm, ctx->new_va, &op->map);
818 __drm_gpuvm_bo_list_add((__vm_bo)->vm, \
819 __lock ? &(__vm_bo)->vm->__list_name.lock : \
822 &(__vm_bo)->vm->__list_name.list)
847 __drm_gpuvm_bo_list_del((__vm_bo)->vm, \
848 __lock ? &(__vm_bo)->vm->__list_name.lock : \
862 __drm_gpuvm_bo_list_del((__vm_bo)->vm, \
863 __lock ? &(__vm_bo)->vm->__list_name.lock : \
1248 struct drm_gpuvm *gpuvm = vm_exec->vm;
1340 struct drm_gpuvm *gpuvm = vm_exec->vm;
2427 struct drm_gpuvm *vm; member in struct:__anon510
2523 struct drm_gpuvm *vm; member in struct:__anon511
2586 struct drm_gpuvm *vm; member in struct:__anon512
[all...]
/linux-master/drivers/video/fbdev/core/
H A Dfbmon.c1311 int fb_videomode_from_videomode(const struct videomode *vm, argument
1316 fbmode->xres = vm->hactive;
1317 fbmode->left_margin = vm->hback_porch;
1318 fbmode->right_margin = vm->hfront_porch;
1319 fbmode->hsync_len = vm->hsync_len;
1321 fbmode->yres = vm->vactive;
1322 fbmode->upper_margin = vm->vback_porch;
1323 fbmode->lower_margin = vm->vfront_porch;
1324 fbmode->vsync_len = vm->vsync_len;
1327 fbmode->pixclock = vm
1383 struct videomode vm; local
[all...]
/linux-master/arch/arm64/kvm/hyp/nvhe/
H A Dmem_protect.c31 static void guest_lock_component(struct pkvm_hyp_vm *vm) argument
33 hyp_spin_lock(&vm->lock);
34 current_vm = vm;
37 static void guest_unlock_component(struct pkvm_hyp_vm *vm) argument
40 hyp_spin_unlock(&vm->lock);
232 int kvm_guest_prepare_stage2(struct pkvm_hyp_vm *vm, void *pgd) argument
234 struct kvm_s2_mmu *mmu = &vm->kvm.arch.mmu;
239 ret = hyp_pool_init(&vm->pool, hyp_virt_to_pfn(pgd), nr_pages, 0);
243 hyp_spin_lock_init(&vm->lock);
244 vm
269 reclaim_guest_pages(struct pkvm_hyp_vm *vm, struct kvm_hyp_memcache *mc) argument
[all...]
/linux-master/drivers/gpu/drm/i915/gem/selftests/
H A Dhuge_pages.c32 struct i915_address_space *vm; local
37 vm = ctx->vm;
38 if (vm)
39 WRITE_ONCE(vm->scrub_64K, true);
364 struct drm_i915_private *i915 = vma->vm->i915;
425 struct drm_i915_private *i915 = ppgtt->vm.i915;
461 vma = i915_vma_instance(obj, &ppgtt->vm, NULL);
501 struct drm_i915_private *i915 = ppgtt->vm.i915;
529 vma = i915_vma_instance(obj, &ppgtt->vm, NUL
717 struct i915_address_space *vm; local
862 struct i915_address_space *vm; local
1618 struct i915_address_space *vm; local
1762 struct i915_address_space *vm; local
1833 struct i915_address_space *vm; local
[all...]
/linux-master/drivers/video/fbdev/
H A Dsh7760fb.c201 struct fb_videomode *vm = par->pd->def_mode; local
211 if (par->rot && (vm->xres > 320)) {
217 hsynp = vm->right_margin + vm->xres;
218 hsynw = vm->hsync_len;
219 htcn = vm->left_margin + hsynp + hsynw;
220 hdcn = vm->xres;
221 vsynp = vm->lower_margin + vm->yres;
222 vsynw = vm
[all...]
/linux-master/arch/s390/kernel/
H A Dsysinfo.c256 if (!convert_ext_name(info->vm[lvl].evmne, info->ext_names[lvl], len))
264 if (uuid_is_null(&info->vm[i].uuid))
266 seq_printf(m, "VM%02d UUID: %pUb\n", i, &info->vm[i].uuid);
276 EBCASC(info->vm[i].name, sizeof(info->vm[i].name));
277 EBCASC(info->vm[i].cpi, sizeof(info->vm[i].cpi));
279 seq_printf(m, "VM%02d Name: %-8.8s\n", i, info->vm[i].name);
280 seq_printf(m, "VM%02d Control Program: %-16.16s\n", i, info->vm[i].cpi);
281 seq_printf(m, "VM%02d Adjustment: %d\n", i, info->vm[
[all...]

Completed in 326 milliseconds

1234567891011>>