Lines Matching refs:pmap

32  * Dispatch MI pmap calls to the appropriate MMU implementation
69 * pmap globals
71 struct pmap kernel_pmap_store;
98 pmap_advise(pmap_t pmap, vm_offset_t start, vm_offset_t end, int advice)
101 CTR5(KTR_PMAP, "%s(%p, %#x, %#x, %d)", __func__, pmap, start, end,
103 MMU_ADVISE(mmu_obj, pmap, start, end, advice);
143 pmap_enter(pmap_t pmap, vm_offset_t va, vm_page_t p, vm_prot_t prot,
147 CTR6(KTR_PMAP, "pmap_enter(%p, %#x, %p, %#x, %x, %d)", pmap, va,
149 return (MMU_ENTER(mmu_obj, pmap, va, p, prot, flags, psind));
153 pmap_enter_object(pmap_t pmap, vm_offset_t start, vm_offset_t end,
157 CTR6(KTR_PMAP, "%s(%p, %#x, %#x, %p, %#x)", __func__, pmap, start,
159 MMU_ENTER_OBJECT(mmu_obj, pmap, start, end, m_start, prot);
163 pmap_enter_quick(pmap_t pmap, vm_offset_t va, vm_page_t m, vm_prot_t prot)
166 CTR5(KTR_PMAP, "%s(%p, %#x, %p, %#x)", __func__, pmap, va, m, prot);
167 MMU_ENTER_QUICK(mmu_obj, pmap, va, m, prot);
171 pmap_extract(pmap_t pmap, vm_offset_t va)
174 CTR3(KTR_PMAP, "%s(%p, %#x)", __func__, pmap, va);
175 return (MMU_EXTRACT(mmu_obj, pmap, va));
179 pmap_extract_and_hold(pmap_t pmap, vm_offset_t va, vm_prot_t prot)
182 CTR4(KTR_PMAP, "%s(%p, %#x, %#x)", __func__, pmap, va, prot);
183 return (MMU_EXTRACT_AND_HOLD(mmu_obj, pmap, va, prot));
211 pmap_is_prefaultable(pmap_t pmap, vm_offset_t va)
214 CTR3(KTR_PMAP, "%s(%p, %#x)", __func__, pmap, va);
215 return (MMU_IS_PREFAULTABLE(mmu_obj, pmap, va));
244 pmap_object_init_pt(pmap_t pmap, vm_offset_t addr, vm_object_t object,
248 CTR6(KTR_PMAP, "%s(%p, %#x, %p, %u, %#x)", __func__, pmap, addr,
250 MMU_OBJECT_INIT_PT(mmu_obj, pmap, addr, object, pindex, size);
254 pmap_page_exists_quick(pmap_t pmap, vm_page_t m)
257 CTR3(KTR_PMAP, "%s(%p, %p)", __func__, pmap, m);
258 return (MMU_PAGE_EXISTS_QUICK(mmu_obj, pmap, m));
278 pmap_pinit(pmap_t pmap)
281 CTR2(KTR_PMAP, "%s(%p)", __func__, pmap);
282 MMU_PINIT(mmu_obj, pmap);
287 pmap_pinit0(pmap_t pmap)
290 CTR2(KTR_PMAP, "%s(%p)", __func__, pmap);
291 MMU_PINIT0(mmu_obj, pmap);
295 pmap_protect(pmap_t pmap, vm_offset_t start, vm_offset_t end, vm_prot_t prot)
298 CTR5(KTR_PMAP, "%s(%p, %#x, %#x, %#x)", __func__, pmap, start, end,
300 MMU_PROTECT(mmu_obj, pmap, start, end, prot);
320 pmap_release(pmap_t pmap)
323 CTR2(KTR_PMAP, "%s(%p)", __func__, pmap);
324 MMU_RELEASE(mmu_obj, pmap);
328 pmap_remove(pmap_t pmap, vm_offset_t start, vm_offset_t end)
331 CTR4(KTR_PMAP, "%s(%p, %#x, %#x)", __func__, pmap, start, end);
332 MMU_REMOVE(mmu_obj, pmap, start, end);
344 pmap_remove_pages(pmap_t pmap)
347 CTR2(KTR_PMAP, "%s(%p)", __func__, pmap);
348 MMU_REMOVE_PAGES(mmu_obj, pmap);
360 pmap_unwire(pmap_t pmap, vm_offset_t start, vm_offset_t end)
363 CTR4(KTR_PMAP, "%s(%p, %#x, %#x)", __func__, pmap, start, end);
364 MMU_UNWIRE(mmu_obj, pmap, start, end);
392 pmap_mincore(pmap_t pmap, vm_offset_t addr, vm_paddr_t *locked_pa)
395 CTR3(KTR_PMAP, "%s(%p, %#x)", __func__, pmap, addr);
396 return (MMU_MINCORE(mmu_obj, pmap, addr, locked_pa));