Searched refs:vma (Results 401 - 425 of 1013) sorted by relevance

<<11121314151617181920>>

/linux-master/arch/loongarch/kernel/
H A Dsmp.c639 struct vm_area_struct *vma; member in struct:flush_tlb_data
648 local_flush_tlb_range(fd->vma, fd->addr1, fd->addr2);
651 void flush_tlb_range(struct vm_area_struct *vma, unsigned long start, unsigned long end) argument
653 struct mm_struct *mm = vma->vm_mm;
658 .vma = vma,
671 local_flush_tlb_range(vma, start, end);
697 local_flush_tlb_page(fd->vma, fd->addr1);
700 void flush_tlb_page(struct vm_area_struct *vma, unsigned long page) argument
703 if ((atomic_read(&vma
[all...]
/linux-master/drivers/gpu/drm/i915/
H A Di915_gpu_error.c514 __find_vma(struct i915_vma_coredump *vma, const char *name) argument
516 while (vma) {
517 if (strcmp(vma->name, name) == 0)
518 return vma;
519 vma = vma->next;
528 return __find_vma(ee->vma, "batch");
616 const struct i915_vma_coredump *vma)
621 if (!vma)
625 engine ? engine->name : "global", vma
614 intel_gpu_error_print_vma(struct drm_i915_error_state_buf *m, const struct intel_engine_cs *engine, const struct i915_vma_coredump *vma) argument
814 const struct i915_vma_coredump *vma; local
1014 i915_vma_coredump_free(struct i915_vma_coredump *vma) argument
1468 capture_vma(struct intel_engine_capture_vma *next, struct i915_vma *vma, const char *name, gfp_t gfp) argument
1503 add_vma(struct intel_engine_coredump *ee, struct i915_vma_coredump *vma) argument
1513 create_vma_coredump(const struct intel_gt *gt, struct i915_vma *vma, const char *name, struct i915_vma_compress *compress) argument
1533 add_vma_coredump(struct intel_engine_coredump *ee, const struct intel_gt *gt, struct i915_vma *vma, const char *name, struct i915_vma_compress *compress) argument
1566 struct intel_engine_capture_vma *vma = NULL; local
1588 struct intel_engine_capture_vma *vma; local
[all...]
/linux-master/drivers/gpu/drm/i915/gem/selftests/
H A Di915_gem_context.c445 struct i915_vma *vma; local
451 vma = i915_vma_instance(obj, ce->vm, NULL);
452 if (IS_ERR(vma))
453 return PTR_ERR(vma);
455 err = i915_vma_pin(vma, 0, 0, PIN_HIGH | PIN_USER);
467 err = igt_gpu_fill_dw(ce, vma,
472 i915_vma_unpin(vma);
914 struct i915_vma *vma,
919 GEM_BUG_ON(GRAPHICS_VER(vma->vm->i915) < 8);
927 *cmd++ = lower_32_bits(i915_vma_offset(vma));
913 rpcs_query_batch(struct drm_i915_gem_object *rpcs, struct i915_vma *vma, struct intel_engine_cs *engine) argument
948 struct i915_vma *vma; local
1516 struct i915_vma *vma; local
1605 struct i915_vma *vma; local
[all...]
/linux-master/arch/arm64/kvm/
H A Dmmu.c964 struct vm_area_struct *vma; local
967 vma = find_vma_intersection(current->mm, hva, reg_end);
968 if (!vma)
974 vm_start = max(hva, vma->vm_start);
975 vm_end = min(reg_end, vma->vm_end);
977 if (!(vma->vm_flags & VM_PFNMAP)) {
1315 static int get_vma_page_shift(struct vm_area_struct *vma, unsigned long hva) argument
1319 if (is_vm_hugetlb_page(vma) && !(vma->vm_flags & VM_PFNMAP))
1320 return huge_page_shift(hstate_vma(vma));
1372 kvm_vma_mte_allowed(struct vm_area_struct *vma) argument
1388 struct vm_area_struct *vma; local
1992 struct vm_area_struct *vma; local
[all...]
/linux-master/fs/proc/
H A Dvmcore.c224 int __weak remap_oldmem_pfn_range(struct vm_area_struct *vma, argument
229 return remap_pfn_range(vma, from, pfn, size, prot);
276 static int vmcoredd_mmap_dumps(struct vm_area_struct *vma, unsigned long dst, argument
290 if (remap_vmalloc_range_partial(vma, dst, buf, 0,
427 struct address_space *mapping = vmf->vma->vm_file->f_mapping;
495 * @vma: vm_area_struct describing requested mapping
503 static int remap_oldmem_pfn_checked(struct vm_area_struct *vma, argument
525 if (remap_oldmem_pfn_range(vma, from + len,
532 if (remap_oldmem_pfn_range(vma, from + len,
543 if (remap_oldmem_pfn_range(vma, fro
553 vmcore_remap_oldmem_pfn(struct vm_area_struct *vma, unsigned long from, unsigned long pfn, unsigned long size, pgprot_t prot) argument
572 mmap_vmcore(struct file *file, struct vm_area_struct *vma) argument
685 mmap_vmcore(struct file *file, struct vm_area_struct *vma) argument
[all...]
/linux-master/include/linux/
H A Duprobes.h117 extern int uprobe_mmap(struct vm_area_struct *vma);
118 extern void uprobe_munmap(struct vm_area_struct *vma, unsigned long start, unsigned long end);
169 static inline int uprobe_mmap(struct vm_area_struct *vma) argument
174 uprobe_munmap(struct vm_area_struct *vma, unsigned long start, unsigned long end) argument
H A Ddm-io.h47 void *vma; member in union:dm_io_memory::__anon1093
H A Dmmdebug.h14 void dump_vma(const struct vm_area_struct *vma);
34 #define VM_BUG_ON_VMA(cond, vma) \
37 dump_vma(vma); \
99 #define VM_BUG_ON_VMA(cond, vma) VM_BUG_ON(cond)
H A Dmm_inline.h395 static inline void free_anon_vma_name(struct vm_area_struct *vma) argument
401 anon_vma_name_put(vma->anon_name);
419 static inline void free_anon_vma_name(struct vm_area_struct *vma) {} argument
547 pte_install_uffd_wp_if_needed(struct vm_area_struct *vma, unsigned long addr, argument
562 if (vma_is_anonymous(vma) || !userfaultfd_wp(vma))
577 set_pte_at(vma->vm_mm, addr, pte,
582 static inline bool vma_has_recency(struct vm_area_struct *vma) argument
584 if (vma->vm_flags & (VM_SEQ_READ | VM_RAND_READ))
587 if (vma
[all...]
/linux-master/arch/arm/mm/
H A Dcopypage-v4wt.c44 unsigned long vaddr, struct vm_area_struct *vma)
43 v4wt_copy_user_highpage(struct page *to, struct page *from, unsigned long vaddr, struct vm_area_struct *vma) argument
H A Dcopypage-fa.c39 unsigned long vaddr, struct vm_area_struct *vma)
38 fa_copy_user_highpage(struct page *to, struct page *from, unsigned long vaddr, struct vm_area_struct *vma) argument
/linux-master/drivers/gpu/drm/i915/gt/
H A Dintel_ggtt_fencing.h25 struct i915_vma *vma; member in struct:i915_fence_reg
H A Dintel_renderstate.h38 struct i915_vma *vma; member in struct:intel_renderstate
H A Dintel_context.c116 static int __context_pin_state(struct i915_vma *vma, struct i915_gem_ww_ctx *ww) argument
118 unsigned int bias = i915_ggtt_pin_bias(vma) | PIN_OFFSET_BIAS;
121 err = i915_ggtt_pin(vma, ww, 0, bias | PIN_HIGH);
125 err = i915_active_acquire(&vma->active);
133 i915_vma_make_unshrinkable(vma);
134 vma->obj->mm.dirty = true;
139 i915_vma_unpin(vma);
143 static void __context_unpin_state(struct i915_vma *vma) argument
145 i915_vma_make_shrinkable(vma);
146 i915_active_release(&vma
[all...]
H A Dmock_engine.c43 struct i915_vma *vma; local
49 vma = i915_vma_instance(obj, vm, NULL);
50 if (IS_ERR(vma))
53 return vma;
57 return vma;
75 ring->vma = create_ring_vma(engine->gt->ggtt, PAGE_SIZE);
76 if (IS_ERR(ring->vma)) {
88 i915_vma_put(ring->vma);
145 i915_vma_unpin(ce->ring->vma);
190 return i915_vma_pin_ww(ce->ring->vma, w
[all...]
H A Dgen6_ppgtt.c211 if (flush && i915_vma_is_bound(ppgtt->vma, I915_VMA_GLOBAL_BIND)) {
332 * Workaround the limited maximum vma->pin_count and the aliasing_ppgtt
334 * (When vma->pin_count becomes atomic, I expect we will naturally
351 err = i915_ggtt_pin(ppgtt->vma, ww, GEN6_PD_ALIGN, PIN_HIGH);
353 GEM_BUG_ON(ppgtt->vma->fence);
354 clear_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(ppgtt->vma));
402 ppgtt->vma = i915_vma_instance(pd->pt.base, &ggtt->vm, NULL);
403 if (IS_ERR(ppgtt->vma)) {
404 err = PTR_ERR(ppgtt->vma);
405 ppgtt->vma
[all...]
/linux-master/drivers/gpu/drm/i915/gt/uc/
H A Dintel_guc_log_debugfs.c38 if (!log->vma)
41 return obj_to_guc_log_dump_size(log->vma->obj);
86 if (!log->vma)
98 if (!log->vma)
/linux-master/drivers/gpu/drm/i915/selftests/
H A Di915_mock_selftests.h31 selftest(vma, i915_vma_mock_selftests)
/linux-master/drivers/staging/media/atomisp/include/hmm/
H A Dhmm.h70 * map ISP memory starts with virt to specific vma.
77 int hmm_mmap(struct vm_area_struct *vma, ia_css_ptr virt);
/linux-master/mm/
H A Dpage_idle.c53 struct vm_area_struct *vma,
56 DEFINE_FOLIO_VMA_WALK(pvmw, folio, vma, addr, 0);
66 if (ptep_clear_young_notify(vma, addr, pvmw.pte))
69 if (pmdp_clear_young_notify(vma, addr, pvmw.pmd))
52 page_idle_clear_pte_refs_one(struct folio *folio, struct vm_area_struct *vma, unsigned long addr, void *arg) argument
/linux-master/arch/arm64/mm/
H A Dcopypage.c37 unsigned long vaddr, struct vm_area_struct *vma)
36 copy_user_highpage(struct page *to, struct page *from, unsigned long vaddr, struct vm_area_struct *vma) argument
/linux-master/drivers/misc/sgi-gru/
H A Dgrufault.c46 * Find the vma of a GRU segment. Caller must hold mmap_lock.
50 struct vm_area_struct *vma; local
52 vma = vma_lookup(current->mm, vaddr);
53 if (vma && vma->vm_ops == &gru_vm_ops)
54 return vma;
69 struct vm_area_struct *vma; local
73 vma = gru_find_vma(vaddr);
74 if (vma)
75 gts = gru_find_thread_state(vma, TSI
86 struct vm_area_struct *vma; local
177 non_atomic_pte_lookup(struct vm_area_struct *vma, unsigned long vaddr, int write, unsigned long *paddr, int *pageshift) argument
205 atomic_pte_lookup(struct vm_area_struct *vma, unsigned long vaddr, int write, unsigned long *paddr, int *pageshift) argument
256 struct vm_area_struct *vma; local
[all...]
/linux-master/drivers/gpu/drm/etnaviv/
H A Detnaviv_gem.c128 struct vm_area_struct *vma)
132 vm_flags_set(vma, VM_PFNMAP | VM_DONTEXPAND | VM_DONTDUMP);
134 vm_page_prot = vm_get_page_prot(vma->vm_flags);
137 vma->vm_page_prot = pgprot_writecombine(vm_page_prot);
139 vma->vm_page_prot = pgprot_noncached(vm_page_prot);
146 vma->vm_pgoff = 0;
147 vma_set_file(vma, etnaviv_obj->base.filp);
149 vma->vm_page_prot = vm_page_prot;
155 static int etnaviv_gem_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma) argument
159 return etnaviv_obj->ops->mmap(etnaviv_obj, vma);
127 etnaviv_gem_mmap_obj(struct etnaviv_gem_object *etnaviv_obj, struct vm_area_struct *vma) argument
164 struct vm_area_struct *vma = vmf->vma; local
694 etnaviv_gem_userptr_mmap_obj(struct etnaviv_gem_object *etnaviv_obj, struct vm_area_struct *vma) argument
[all...]
/linux-master/drivers/gpu/drm/i915/display/
H A Dintel_fbdev.c59 struct i915_vma *vma; member in struct:intel_fbdev
128 static int intel_fbdev_mmap(struct fb_info *info, struct vm_area_struct *vma) argument
134 return i915_gem_fb_mmap(obj, vma);
149 intel_unpin_fb_vma(ifbdev->vma, ifbdev->vma_flags);
186 struct i915_vma *vma; local
226 /* Pin the GGTT vma for our access via info->screen_base.
230 vma = intel_pin_and_fence_fb_obj(&ifbdev->fb->base, false,
232 if (IS_ERR(vma)) {
233 ret = PTR_ERR(vma);
250 ret = intel_fbdev_fb_fill_info(dev_priv, info, obj, vma);
[all...]
/linux-master/arch/riscv/kvm/
H A Dmmu.c490 struct vm_area_struct *vma = find_vma(current->mm, hva); local
493 if (!vma || vma->vm_start >= reg_end)
500 if (writable && !(vma->vm_flags & VM_WRITE)) {
506 vm_start = max(hva, vma->vm_start);
507 vm_end = min(reg_end, vma->vm_end);
509 if (vma->vm_flags & VM_PFNMAP) {
513 pa = (phys_addr_t)vma->vm_pgoff << PAGE_SHIFT;
514 pa += vm_start - vma->vm_start;
598 struct vm_area_struct *vma; local
[all...]

Completed in 484 milliseconds

<<11121314151617181920>>