Lines Matching refs:pmd

63  * When allocating pud or pmd pointers, we allocate a complete page
628 static void free_pte_table(pte_t *pte_start, pmd_t *pmd)
640 pmd_clear(pmd);
645 pmd_t *pmd;
649 pmd = pmd_start + i;
650 if (!pmd_none(*pmd))
761 pmd_t *pmd;
763 pmd = pmd_start + pmd_index(addr);
764 for (; addr < end; addr = next, pmd++) {
767 if (!pmd_present(*pmd))
770 if (pmd_leaf(*pmd)) {
774 free_vmemmap_pages(pmd_page(*pmd), altmap, get_order(PMD_SIZE));
775 pte_clear(&init_mm, addr, (pte_t *)pmd);
780 free_vmemmap_pages(pmd_page(*pmd), altmap, get_order(PMD_SIZE));
781 pte_clear(&init_mm, addr, (pte_t *)pmd);
787 pte_base = (pte_t *)pmd_page_vaddr(*pmd);
789 free_pte_table(pte_base, pmd);
1017 pmd_t *pmd;
1022 pmd = early_alloc_pgtable(PAGE_SIZE, node, 0, 0);
1023 pud_populate(&init_mm, pudp, pmd);
1056 pmd_t *pmd;
1067 pmd = vmemmap_pmd_alloc(pud, node, addr);
1068 if (!pmd)
1071 if (pmd_none(READ_ONCE(*pmd))) {
1077 * For us to use a pmd mapping, both addr and pfn should
1095 vmemmap_set_pmd(pmd, p, node, addr, next);
1107 } else if (vmemmap_check_pmd(pmd, node, addr, next)) {
1120 pte = vmemmap_pte_alloc(pmd, node, addr);
1124 pte = radix__vmemmap_pte_populate(pmd, addr, node, altmap, NULL);
1141 pmd_t *pmd;
1149 pmd = vmemmap_pmd_alloc(pud, node, addr);
1150 if (!pmd)
1152 if (pmd_leaf(*pmd))
1158 pte = vmemmap_pte_alloc(pmd, node, addr);
1161 radix__vmemmap_pte_populate(pmd, addr, node, NULL, NULL);
1173 pmd_t *pmd;
1184 pmd = vmemmap_pmd_alloc(pud, node, map_addr);
1185 if (!pmd)
1187 if (pmd_leaf(*pmd))
1193 pte = vmemmap_pte_alloc(pmd, node, map_addr);
1202 * It can fall in different pmd, hence
1211 pte = radix__vmemmap_pte_populate(pmd, map_addr, node, NULL, NULL);
1236 pmd_t *pmd;
1246 pmd = vmemmap_pmd_alloc(pud, node, addr);
1247 if (!pmd)
1250 if (pmd_leaf(READ_ONCE(*pmd))) {
1256 pte = vmemmap_pte_alloc(pmd, node, addr);
1279 pte = radix__vmemmap_pte_populate(pmd, addr, node, NULL, NULL);
1286 * It can fall in different pmd, hence
1304 pte = radix__vmemmap_pte_populate(pmd, addr, node, NULL, NULL);
1314 pte = radix__vmemmap_pte_populate(pmd, addr, node, NULL, pte_page(*tail_page_pte));
1382 pmd_t pmd;
1388 * khugepaged calls this for normal pmd
1390 pmd = *pmdp;
1395 return pmd;
1553 pmd_t *pmd;
1556 pmd = pud_pgtable(*pud);
1562 if (!pmd_none(pmd[i])) {
1564 pte = (pte_t *)pmd_page_vaddr(pmd[i]);
1570 pmd_free(&init_mm, pmd);
1575 int pmd_set_huge(pmd_t *pmd, phys_addr_t addr, pgprot_t prot)
1577 pte_t *ptep = (pte_t *)pmd;
1588 int pmd_clear_huge(pmd_t *pmd)
1590 if (pmd_leaf(*pmd)) {
1591 pmd_clear(pmd);
1598 int pmd_free_pte_page(pmd_t *pmd, unsigned long addr)
1602 pte = (pte_t *)pmd_page_vaddr(*pmd);
1603 pmd_clear(pmd);