Searched refs:PMD_SIZE (Results 26 - 50 of 136) sorted by relevance

123456

/linux-master/drivers/dax/
H A Ddax-private.h106 if (align == PMD_SIZE && has_transparent_hugepage())
H A Ddevice.c144 unsigned int fault_size = PMD_SIZE;
149 if (dev_dax->align > PMD_SIZE) {
162 (pmd_addr + PMD_SIZE) > vmf->vma->vm_end)
166 phys = dax_pgoff_to_phys(dev_dax, pgoff, PMD_SIZE);
/linux-master/arch/x86/kernel/
H A Dvmlinux.lds.S66 #define ALIGN_ENTRY_TEXT_BEGIN . = ALIGN(PMD_SIZE);
67 #define ALIGN_ENTRY_TEXT_END . = ALIGN(PMD_SIZE);
78 . = ALIGN(PMD_SIZE); \
83 . = ALIGN(PMD_SIZE); \
/linux-master/arch/x86/mm/
H A Dmem_encrypt_identity.c96 static char sme_workarea[2 * PMD_SIZE] __section(".init.scratch");
197 ppd->vaddr += PMD_SIZE;
198 ppd->paddr += PMD_SIZE;
224 ppd->vaddr_end = ALIGN(ppd->vaddr, PMD_SIZE);
324 kernel_end = ALIGN((unsigned long)RIP_REL_REF(_end), PMD_SIZE);
346 * intermediate copy buffer (PMD_SIZE)
351 execute_end = execute_start + (PAGE_SIZE * 2) + PMD_SIZE;
374 workarea_end = ALIGN(workarea_start + workarea_len, PMD_SIZE);
H A Dmem_encrypt_amd.c169 vaddr += PMD_SIZE;
170 paddr += PMD_SIZE;
171 size = (size <= PMD_SIZE) ? 0 : size - PMD_SIZE;
H A Dpti.c340 addr = round_up(addr + 1, PMD_SIZE);
377 addr += PMD_SIZE;
595 unsigned long end = ALIGN((unsigned long)_end, PMD_SIZE);
H A Dkasan_init_64.c47 ((end - addr) == PMD_SIZE) &&
48 IS_ALIGNED(addr, PMD_SIZE)) {
49 p = early_alloc(PMD_SIZE, nid, false);
52 memblock_free(p, PMD_SIZE);
/linux-master/arch/loongarch/kvm/
H A Dmmu.c398 if (IS_ALIGNED(size, PMD_SIZE) && IS_ALIGNED(gpa_start, PMD_SIZE)
399 && IS_ALIGNED(hva_start, PMD_SIZE))
426 gpa_offset = gpa_start & (PMD_SIZE - 1);
427 hva_offset = hva_start & (PMD_SIZE - 1);
432 gpa_offset = PMD_SIZE;
433 if ((size + gpa_offset) < (PMD_SIZE * 2))
635 return (hva >= ALIGN(start, PMD_SIZE)) && (hva < ALIGN_DOWN(end, PMD_SIZE));
/linux-master/arch/riscv/mm/
H A Dhugetlbpage.c56 if (sz == PMD_SIZE) {
112 if (sz == PMD_SIZE)
137 case PMD_SIZE:
138 return PUD_SIZE - PMD_SIZE;
140 return PMD_SIZE - napot_cont_size(NAPOT_CONT64KB_ORDER);
238 else if (sz >= PMD_SIZE)
H A Dpgtable.c119 flush_tlb_kernel_range(addr, addr + PMD_SIZE);
/linux-master/arch/powerpc/mm/book3s64/
H A Dradix_pgtable.c100 if (map_page_size == PMD_SIZE) {
163 if (map_page_size == PMD_SIZE) {
323 } else if (IS_ALIGNED(addr, PMD_SIZE) && gap >= PMD_SIZE &&
325 mapping_size = PMD_SIZE;
676 unsigned long start = ALIGN_DOWN(addr, PMD_SIZE);
678 return !vmemmap_populated(start, PMD_SIZE);
771 if (IS_ALIGNED(addr, PMD_SIZE) &&
772 IS_ALIGNED(next, PMD_SIZE)) {
774 free_vmemmap_pages(pmd_page(*pmd), altmap, get_order(PMD_SIZE));
[all...]
/linux-master/arch/arc/include/asm/
H A Dpgtable-levels.h81 #define PMD_SIZE BIT(PMD_SHIFT) macro
82 #define PMD_MASK (~(PMD_SIZE - 1))
/linux-master/arch/xtensa/mm/
H A Dkasan_init.c29 for (vaddr = 0; vaddr < KASAN_SHADOW_SIZE; vaddr += PMD_SIZE, ++pmd) {
/linux-master/arch/arm64/include/asm/
H A Dhugetlb.h71 if (stride == PMD_SIZE)
H A Dpgtable-hwdef.h50 #define PMD_SIZE (_AC(1, UL) << PMD_SHIFT) macro
51 #define PMD_MASK (~(PMD_SIZE-1))
91 #define CONT_PMD_SIZE (CONT_PMDS * PMD_SIZE)
/linux-master/arch/x86/boot/compressed/
H A Dident_map_64.c98 start = round_down(start, PMD_SIZE);
99 end = round_up(end, PMD_SIZE);
370 end = address + PMD_SIZE;
H A Dsev.c216 if (IS_ALIGNED(pa, PMD_SIZE) && (pa_end - pa) >= PMD_SIZE) {
218 pa += PMD_SIZE;
/linux-master/arch/arm64/kvm/
H A Dpkvm.c88 hyp_mem_base = memblock_phys_alloc(ALIGN(hyp_mem_size, PMD_SIZE),
89 PMD_SIZE);
93 hyp_mem_size = ALIGN(hyp_mem_size, PMD_SIZE);
/linux-master/arch/sparc/mm/
H A Dhugetlbpage.c296 if (sz >= PMD_SIZE)
340 else if (size >= PMD_SIZE)
383 else if (size >= PMD_SIZE)
502 addr += PMD_SIZE;
512 end -= PMD_SIZE;
/linux-master/arch/loongarch/include/asm/
H A Dpgtable.h29 #define PMD_SIZE (1UL << PMD_SHIFT) macro
30 #define PMD_MASK (~(PMD_SIZE-1))
34 #define PMD_SIZE (1UL << PMD_SHIFT) macro
35 #define PMD_MASK (~(PMD_SIZE-1))
96 min(PTRS_PER_PGD * PTRS_PER_PUD * PTRS_PER_PMD * PTRS_PER_PTE * PAGE_SIZE, (1UL << cpu_vabits)) - PMD_SIZE - VMEMMAP_SIZE - KFENCE_AREA_SIZE)
100 min(PTRS_PER_PGD * PTRS_PER_PUD * PTRS_PER_PMD * PTRS_PER_PTE * PAGE_SIZE, (1UL << cpu_vabits) / 2) - PMD_SIZE - VMEMMAP_SIZE - KFENCE_AREA_SIZE)
103 #define vmemmap ((struct page *)((VMALLOC_END + PMD_SIZE) & PMD_MASK))
/linux-master/arch/arm/mm/
H A Dmmu.c1125 next = (addr + PMD_SIZE - 1) & PMD_MASK;
1217 if (!IS_ALIGNED(block_start, PMD_SIZE)) {
1220 len = round_up(block_start, PMD_SIZE) - block_start;
1253 if (!IS_ALIGNED(block_start, PMD_SIZE))
1255 else if (!IS_ALIGNED(block_end, PMD_SIZE))
1274 memblock_limit = round_down(memblock_limit, PMD_SIZE);
1304 for (addr = 0; addr < KASAN_SHADOW_START; addr += PMD_SIZE)
1312 for (addr = KASAN_SHADOW_END; addr < MODULES_VADDR; addr += PMD_SIZE)
1315 for (addr = 0; addr < MODULES_VADDR; addr += PMD_SIZE)
1321 addr = ((unsigned long)_exiprom + PMD_SIZE
[all...]
/linux-master/arch/s390/mm/
H A Dhugetlbpage.c137 size = PMD_SIZE;
206 else if (sz == PMD_SIZE)
238 if (MACHINE_HAS_EDAT1 && size == PMD_SIZE)
/linux-master/arch/m68k/mm/
H A Dmotorola.c308 if (!(virtaddr & (PMD_SIZE-1)))
343 physaddr += PMD_SIZE;
357 size -= PMD_SIZE;
358 virtaddr += PMD_SIZE;
/linux-master/mm/kasan/
H A Dinit.c118 if (IS_ALIGNED(addr, PMD_SIZE) && end - addr >= PMD_SIZE) {
388 if (IS_ALIGNED(addr, PMD_SIZE) &&
389 IS_ALIGNED(next, PMD_SIZE)) {
/linux-master/arch/s390/boot/
H A Dvmem.c145 IS_ALIGNED(addr, PMD_SIZE) && end - addr >= PMD_SIZE) {
278 IS_ALIGNED(addr, PMD_SIZE) && (end - addr) >= PMD_SIZE;

Completed in 438 milliseconds

123456