Searched refs:pte (Results 201 - 225 of 486) sorted by relevance

1234567891011>>

/linux-master/arch/powerpc/include/asm/book3s/64/
H A Dtlbflush-hash.h18 real_pte_t pte[PPC64_TLB_BATCH_NR]; member in struct:ppc64_tlb_batch
62 extern void flush_hash_page(unsigned long vpn, real_pte_t pte, int psize,
H A Dhash.h143 * pud comparison that will work with both pte and page table pointer.
161 pte_t *ptep, unsigned long pte, int huge);
225 static inline int hash__pte_none(pte_t pte) argument
227 return (pte_val(pte) & ~H_PTE_NONE_MASK) == 0;
239 pte_t *ptep, pte_t pte, int percpu)
245 *ptep = pte;
238 hash__set_pte_at(struct mm_struct *mm, unsigned long addr, pte_t *ptep, pte_t pte, int percpu) argument
/linux-master/arch/powerpc/include/asm/
H A Dpgalloc.h38 static inline void pte_free_kernel(struct mm_struct *mm, pte_t *pte) argument
40 pte_fragment_free((unsigned long *)pte, 1);
H A Dpgtable.h47 pte_t pte, unsigned int nr);
60 static inline unsigned long pte_pfn(pte_t pte) argument
62 return (pte_val(pte) & PTE_RPN_MASK) >> PTE_RPN_SHIFT;
68 static inline pgprot_t pte_pgprot(pte_t pte) argument
72 pte_flags = pte_val(pte) & ~PTE_RPN_MASK;
/linux-master/arch/arm/mm/
H A Dmm.h25 static inline void set_top_pte(unsigned long va, pte_t pte) argument
28 set_pte_ext(ptep, pte, 0);
/linux-master/mm/
H A Dmapping_dirty_helpers.c13 * @tlbflush_start: Address of first modified pte
14 * @tlbflush_end: Address of last modified pte + 1
25 * wp_pte - Write-protect a pte
26 * @pte: Pointer to the pte
31 * The function write-protects a pte and records the range in
34 static int wp_pte(pte_t *pte, unsigned long addr, unsigned long end, argument
38 pte_t ptent = ptep_get(pte);
41 pte_t old_pte = ptep_modify_prot_start(walk->vma, addr, pte);
44 ptep_modify_prot_commit(walk->vma, addr, pte, old_pt
89 clean_record_pte(pte_t *pte, unsigned long addr, unsigned long end, struct mm_walk *walk) argument
[all...]
H A Dinternal.h116 static inline pte_t __pte_batch_clear_ignored(pte_t pte, fpb_t flags) argument
119 pte = pte_mkclean(pte);
121 pte = pte_clear_soft_dirty(pte);
122 return pte_wrprotect(pte_mkold(pte));
130 * @pte: Page table entry for the first page.
153 pte_t *start_ptep, pte_t pte, int max_nr, fpb_t flags,
169 VM_WARN_ON_FOLIO(!pte_present(pte), folio);
171 VM_WARN_ON_FOLIO(page_folio(pfn_to_page(pte_pfn(pte))) !
152 folio_pte_batch(struct folio *folio, unsigned long addr, pte_t *start_ptep, pte_t pte, int max_nr, fpb_t flags, bool *any_writable, bool *any_young, bool *any_dirty) argument
221 pte_next_swp_offset(pte_t pte) argument
252 swap_pte_batch(pte_t *start_ptep, int max_nr, pte_t pte) argument
[all...]
H A Dhugetlb_vmemmap.c26 * @nr_walked: the number of walked pte.
35 void (*remap_pte)(pte_t *pte, unsigned long addr,
64 pte_t entry, *pte; local
68 pte = pte_offset_kernel(&__pmd, addr);
69 set_pte_at(&init_mm, addr, pte, entry);
82 /* Make pte visible before pmd. See comment in pmd_install(). */
136 static int vmemmap_pte_entry(pte_t *pte, unsigned long addr, argument
146 vmemmap_walk->reuse_page = pte_page(ptep_get(pte));
148 vmemmap_walk->remap_pte(pte, addr, vmemmap_walk);
202 static void vmemmap_remap_pte(pte_t *pte, unsigne argument
250 vmemmap_restore_pte(pte_t *pte, unsigned long addr, struct vmemmap_remap_walk *walk) argument
[all...]
H A Dmincore.c25 static int mincore_hugetlb(pte_t *pte, unsigned long hmask, unsigned long addr, argument
36 present = pte && !huge_pte_none_mostly(huge_ptep_get(pte));
59 * file will not get a swp_entry_t in its pte, but rather it is like
122 pte_t pte = ptep_get(ptep); local
124 /* We need to do cache lookup too for pte markers */
125 if (pte_none_mostly(pte))
128 else if (pte_present(pte))
130 else { /* pte is a swap entry */
131 swp_entry_t entry = pte_to_swp_entry(pte);
[all...]
/linux-master/drivers/gpu/drm/xe/
H A Dxe_pt_types.h43 u64 (*pte_encode_vma)(u64 pte, struct xe_vma *vma,
54 u64 pte; member in struct:xe_pt_entry
/linux-master/arch/openrisc/include/asm/
H A Dpage.h49 unsigned long pte; member in struct:__anon33
59 #define pte_val(x) ((x).pte)
/linux-master/arch/nios2/include/asm/
H A Dpage.h61 typedef struct { unsigned long pte; } pte_t; member in struct:__anon30
65 #define pte_val(x) ((x).pte)
/linux-master/arch/arm64/include/asm/
H A Dpgtable-types.h23 typedef struct { pteval_t pte; } pte_t; member in struct:__anon11
24 #define pte_val(x) ((x).pte)
H A Dpgtable-prot.h23 * PTE_PRESENT_INVALID=1 & PTE_VALID=0 indicates that the pte's fields should be
123 #define pte_pi_index(pte) ( \
124 ((pte & BIT(PTE_PI_IDX_3)) >> (PTE_PI_IDX_3 - 3)) | \
125 ((pte & BIT(PTE_PI_IDX_2)) >> (PTE_PI_IDX_2 - 2)) | \
126 ((pte & BIT(PTE_PI_IDX_1)) >> (PTE_PI_IDX_1 - 1)) | \
127 ((pte & BIT(PTE_PI_IDX_0)) >> (PTE_PI_IDX_0 - 0)))
/linux-master/arch/riscv/include/asm/
H A Dhugetlb.h26 unsigned long addr, pte_t *ptep, pte_t pte,
44 pte_t pte, int dirty);
/linux-master/drivers/gpu/drm/i915/gt/
H A Dintel_ggtt.c262 gen8_pte_t pte = addr | GEN8_PAGE_PRESENT; local
267 pte |= GEN12_GGTT_PTE_LM;
270 pte |= MTL_GGTT_PTE_PAT0;
273 pte |= MTL_GGTT_PTE_PAT1;
275 return pte;
282 gen8_pte_t pte = addr | GEN8_PAGE_PRESENT; local
285 pte |= GEN12_GGTT_PTE_LM;
287 return pte;
331 const gen8_pte_t pte)
388 *cs++ = lower_32_bits(pte | add
329 gen8_ggtt_bind_ptes(struct i915_ggtt *ggtt, u32 offset, struct sg_table *pages, u32 num_entries, const gen8_pte_t pte) argument
431 gen8_set_pte(void __iomem *addr, gen8_pte_t pte) argument
443 gen8_pte_t __iomem *pte = local
456 gen8_pte_t pte; local
598 gen6_pte_t __iomem *pte = local
1304 gen6_pte_t pte = GEN6_PTE_ADDR_ENCODE(addr) | GEN6_PTE_VALID; local
1325 gen6_pte_t pte = GEN6_PTE_ADDR_ENCODE(addr) | GEN6_PTE_VALID; local
1348 gen6_pte_t pte = GEN6_PTE_ADDR_ENCODE(addr) | GEN6_PTE_VALID; local
1363 gen6_pte_t pte = HSW_PTE_ADDR_ENCODE(addr) | GEN6_PTE_VALID; local
1375 gen6_pte_t pte = HSW_PTE_ADDR_ENCODE(addr) | GEN6_PTE_VALID; local
[all...]
/linux-master/arch/powerpc/include/asm/nohash/64/
H A Dpgtable.h74 #include <asm/nohash/pte-e500.h>
136 static inline pud_t pte_pud(pte_t pte) argument
138 return __pud(pte_val(pte));
195 #define __pte_to_swp_entry(pte) ((swp_entry_t) { pte_val((pte)) })
/linux-master/arch/x86/mm/
H A Dinit_32.c155 static pte_t *__init page_table_kmap_check(pte_t *pte, pmd_t *pmd, argument
161 * Something (early fixmap) may already have put a pte
178 set_pte(newpte + i, pte[i]);
185 pte = newpte;
189 && lastpte && lastpte + PTRS_PER_PTE != pte);
191 return pte;
210 pte_t *pte = NULL; local
227 pte = page_table_kmap_check(one_page_table_init(pmd),
228 pmd, vaddr, pte, &adr);
262 pte_t *pte; local
442 pte_t *pte; local
[all...]
H A Dpgtable.c53 void ___pte_free_tlb(struct mmu_gather *tlb, struct page *pte) argument
55 pagetable_pte_dtor(page_ptdesc(pte));
56 paravirt_release_pte(page_to_pfn(pte));
57 paravirt_tlb_remove_table(tlb, pte);
499 * to also make the pte writeable at the same time the dirty bit is
564 (unsigned long *) &ptep->pte);
663 void __native_set_fixmap(enum fixed_addresses idx, pte_t pte) argument
680 set_pte_vaddr(address, pte);
819 pte_t *pte; local
840 pte
863 pte_t *pte; local
890 pte_mkwrite(pte_t pte, struct vm_area_struct *vma) argument
910 arch_check_zapped_pte(struct vm_area_struct *vma, pte_t pte) argument
[all...]
/linux-master/arch/x86/xen/
H A Dmmu_pv.c22 * pte/pmd/pgd, it converts the passed pfn into an mfn. Conversely,
23 * when reading the content back with __(pgd|pmd|pte)_val, it converts
93 pteval_t xen_pte_val(pte_t pte);
98 pte_t xen_make_pte(pteval_t pte);
103 pte_t xen_make_pte_init(pteval_t pte);
145 pte_t *pte, ptev; local
149 pte = lookup_address(address, &level);
150 if (pte == NULL)
153 ptev = pte_wrprotect(*pte);
161 pte_t *pte, pte local
316 xen_ptep_modify_prot_commit(struct vm_area_struct *vma, unsigned long addr, pte_t *ptep, pte_t pte) argument
374 xen_pte_val(pte_t pte) argument
388 xen_make_pte(pteval_t pte) argument
1450 xen_set_pte_init(pte_t *ptep, pte_t pte) argument
1458 xen_make_pte_init(pteval_t pte) argument
1647 pte_t pte = pfn_pte(pfn, prot); local
1671 pte_t *pte = v; local
1852 pte_t pte; local
2032 pte_t pte; local
2423 pte_t pte = pte_mkspecial(mfn_pte(*rmd->pfn, rmd->prot)); local
[all...]
/linux-master/tools/testing/selftests/kvm/lib/aarch64/
H A Dprocessor.c72 uint64_t pte; local
75 pte = pa & GENMASK(49, vm->page_shift);
76 pte |= FIELD_GET(GENMASK(51, 50), pa) << 8;
79 pte = pa & GENMASK(47, vm->page_shift);
81 pte |= FIELD_GET(GENMASK(51, 48), pa) << 12;
83 pte |= attrs;
85 return pte;
88 static uint64_t pte_addr(struct kvm_vm *vm, uint64_t pte) argument
93 pa = pte & GENMASK(49, vm->page_shift);
94 pa |= FIELD_GET(GENMASK(9, 8), pte) << 5
229 uint64_t pte, *ptep; local
[all...]
/linux-master/arch/s390/boot/
H A Dvmem.c152 static bool kasan_pte_populate_zero_shadow(pte_t *pte, enum populate_mode mode) argument
155 set_pte(pte, pte_z);
190 static bool kasan_pte_populate_zero_shadow(pte_t *pte, enum populate_mode mode) argument
219 pte_t *pte; local
227 pte = pte_leftover + _PAGE_TABLE_SIZE;
228 __arch_set_page_dat(pte, 1);
230 pte = pte_leftover;
234 memset64((u64 *)pte, _PAGE_INVALID, PTRS_PER_PTE);
235 return pte;
291 pte_t *pte, entr local
315 pte_t *pte; local
[all...]
/linux-master/arch/microblaze/mm/
H A Dpgtable.c201 pte_t *pte; local
210 pte = pte_offset_kernel(pmd, addr & PAGE_MASK);
211 if (pte) {
213 *ptep = pte;
227 pte_t *pte; local
239 if (get_pteptr(mm, addr, &pte))
240 pa = (pte_val(*pte) & PAGE_MASK) | (addr & ~PAGE_MASK);
/linux-master/arch/x86/include/asm/
H A Dpgtable_64.h35 pr_err("%s:%d: bad pte %p(%016lx)\n", \
65 static inline void native_set_pte(pte_t *ptep, pte_t pte) argument
67 WRITE_ONCE(*ptep, pte);
76 static inline void native_set_pte_atomic(pte_t *ptep, pte_t pte) argument
78 native_set_pte(ptep, pte);
94 return native_make_pte(xchg(&xp->pte, 0));
239 #define __pte_to_swp_entry(pte) ((swp_entry_t) { pte_val((pte)) })
/linux-master/arch/sparc/mm/
H A Dtlb.c151 pte_t *pte; local
153 pte = pte_offset_map(&pmd, vaddr);
154 if (!pte)
158 if (pte_val(*pte) & _PAGE_VALID) {
159 bool exec = pte_exec(*pte);
163 pte++;
166 pte_unmap(pte);

Completed in 202 milliseconds

1234567891011>>