/linux-master/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
H A D | vmmnv44.c | 28 dma_addr_t *list, u32 ptei, u32 ptes) 38 while (ptes--) { 74 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 79 const u32 pten = min(ptes, 4 - (ptei & 3)); 84 ptes -= pten; 87 while (ptes >= 4) { 94 ptes -= 4; 97 if (ptes) { 98 for (i = 0; i < ptes; i++, addr += 0x1000) 100 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, ptes); 27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, dma_addr_t *list, u32 ptei, u32 ptes) argument 73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) argument 105 nv44_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 112 nv44_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 147 nv44_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument [all...] |
H A D | vmmnv41.c | 28 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 31 while (ptes--) { 39 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); 46 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 50 while (ptes--) { 56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); 62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); 27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) argument 38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 61 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument
|
H A D | vmmnv04.c | 29 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 32 while (ptes--) { 40 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); 47 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 51 while (ptes--) 55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); 61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); 28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) argument 39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 60 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument
|
H A D | vmmgp100.c | 35 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 41 while (ptes--) { 56 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 60 while (ptes--) { 76 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 82 for (; ptes; ptes--, map->pfn++) { 116 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 120 map->type += ptes * map->ctag; 122 while (ptes 34 gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument 55 gp100_vmm_pfn_clear(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument 75 gp100_vmm_pgt_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 115 gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) argument 129 gp100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 136 gp100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 155 gp100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 162 gp100_vmm_pgt_sparse(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument 182 gp100_vmm_lpt_invalid(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument 198 gp100_vmm_pd0_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) argument 212 gp100_vmm_pd0_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 268 gp100_vmm_pd0_pfn_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument 290 gp100_vmm_pd0_pfn_clear(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument 312 gp100_vmm_pd0_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument [all...] |
H A D | vmmnv50.c | 33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 39 map->type += ptes * map->ctag; 41 while (ptes) { 44 if (ptes >= pten && IS_ALIGNED(ptei, pten)) 50 ptes -= pten; 59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); 66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); 71 while (ptes 32 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) argument 58 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 65 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 84 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 91 nv50_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument [all...] |
H A D | vmmgk104.c | 26 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 29 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes); 25 gk104_vmm_lpt_invalid(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument
|
H A D | vmmgf100.c | 33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) 39 while (ptes--) { 48 map->type += ptes * map->ctag; 50 while (ptes--) { 59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); 66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); 71 while (ptes--) { 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, ma 32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) argument 58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 84 gf100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, struct nvkm_vmm_map *map) argument 91 gf100_vmm_pgt_unmap(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument [all...] |
H A D | vmm.c | 198 const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) 209 for (lpti = ptei >> sptb; ptes; spti = 0, lpti++) { 210 const u32 pten = min(sptn - spti, ptes); 212 ptes -= pten; 222 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { 236 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { 243 TRA(it, "LPTE %05x: U -> S %d PTEs", pteb, ptes); 244 pair->func->sparse(vmm, pgt->pt[0], pteb, ptes); 197 nvkm_vmm_unref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt, const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) argument 258 nvkm_vmm_unref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) argument 296 nvkm_vmm_ref_sptes(struct nvkm_vmm_iter *it, struct nvkm_vmm_pt *pgt, const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) argument 364 nvkm_vmm_ref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) argument 381 nvkm_vmm_sparse_ptes(const struct nvkm_vmm_desc *desc, struct nvkm_vmm_pt *pgt, u32 ptei, u32 ptes) argument 394 nvkm_vmm_sparse_unref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) argument 406 nvkm_vmm_sparse_ref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) argument 423 u32 pteb, ptei, ptes; local 536 const u32 ptes = min_t(u64, it.cnt, pten - ptei); local 1915 nvkm_vmm_boot_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes) argument [all...] |
H A D | vmmgm200.c | 29 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) 32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); 28 gm200_vmm_pgt_sparse(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) argument
|
H A D | vmm.h | 54 struct nvkm_mmu_pt *, u32 ptei, u32 ptes); 58 u32 ptei, u32 ptes, struct nvkm_vmm_map *); 72 bool (*pfn_clear)(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32 ptei, u32 ptes);
|
/linux-master/arch/x86/xen/ |
H A D | grant-table.c | 27 pte_t **ptes; member in struct:gnttab_vm_area 45 set_pte_at(&init_mm, addr, gnttab_shared_vm_area.ptes[i], 67 set_pte_at(&init_mm, addr, gnttab_status_vm_area.ptes[i], 77 pte_t **ptes; local 82 ptes = gnttab_status_vm_area.ptes; 84 ptes = gnttab_shared_vm_area.ptes; 89 set_pte_at(&init_mm, addr, ptes[i], __pte(0)); 98 area->ptes[are [all...] |
/linux-master/block/partitions/ |
H A D | efi.c | 267 * Description: Returns ptes on success, NULL on error. 330 * @ptes: PTEs ptr, filled on return. 336 gpt_header **gpt, gpt_entry **ptes) 341 if (!ptes) 430 if (!(*ptes = alloc_read_gpt_entries(state, *gpt))) 434 crc = efi_crc32((const unsigned char *) (*ptes), pt_size); 445 kfree(*ptes); 446 *ptes = NULL; 569 * @ptes: PTEs ptr, filled on return. 582 gpt_entry **ptes) 335 is_gpt_valid(struct parsed_partitions *state, u64 lba, gpt_header **gpt, gpt_entry **ptes) argument 581 find_valid_gpt(struct parsed_partitions *state, gpt_header **gpt, gpt_entry **ptes) argument 716 gpt_entry *ptes = NULL; local [all...] |
/linux-master/arch/alpha/kernel/ |
H A D | pci_iommu.c | 79 arena->ptes = memblock_alloc(mem_size, align); 80 if (!arena->ptes) 109 unsigned long *ptes; 118 /* Search forward for the first mask-aligned sequence of N free ptes */ 119 ptes = arena->ptes; 131 if (ptes[p+i]) { 165 unsigned long *ptes; 170 /* Search for N empty ptes */ 171 ptes 108 unsigned long *ptes; local 164 unsigned long *ptes; local 542 unsigned long *ptes; local 832 unsigned long *ptes; local 862 unsigned long *ptes; local 883 unsigned long *ptes; local [all...] |
H A D | pci_impl.h | 139 unsigned long *ptes; member in struct:pci_iommu_arena
|
H A D | core_titan.c | 331 port->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); 339 port->tba[2].csr = virt_to_phys(hose->sg_pci->ptes); 467 unsigned long *ptes; 522 ptes = hose->sg_pci->ptes; 526 pfn = ptes[baddr >> PAGE_SHIFT]; 716 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; 463 unsigned long *ptes; local
|
H A D | core_marvel.c | 299 csrs->POx_TBASE[0].csr = virt_to_phys(hose->sg_isa->ptes); 316 csrs->POx_TBASE[2].csr = virt_to_phys(hose->sg_pci->ptes); 699 unsigned long *ptes; 754 ptes = hose->sg_pci->ptes; 758 pfn = ptes[baddr >> PAGE_SHIFT]; 1012 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; 690 unsigned long *ptes; local
|
H A D | core_cia.c | 465 arena->ptes[4] = pte0; 489 arena->ptes[5] = pte0; 525 arena->ptes[4] = 0; 526 arena->ptes[5] = 0; 740 *(vip)CIA_IOC_PCI_T0_BASE = virt_to_phys(hose->sg_isa->ptes) >> 2;
|
H A D | core_mcpcia.c | 380 *(vuip)MCPCIA_T0_BASE(mid) = virt_to_phys(hose->sg_isa->ptes) >> 8; 384 *(vuip)MCPCIA_T1_BASE(mid) = virt_to_phys(hose->sg_pci->ptes) >> 8;
|
H A D | core_tsunami.c | 339 pchip->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); 343 pchip->tba[1].csr = virt_to_phys(hose->sg_pci->ptes);
|
/linux-master/arch/powerpc/mm/ptdump/ |
H A D | hashpagetable.c | 244 } ptes[4]; local 262 lpar_rc = plpar_pte_read_4(0, hpte_group, (void *)ptes); 267 if (HPTE_V_COMPARE(ptes[j].v, want_v) && 268 (ptes[j].v & HPTE_V_VALID)) { 270 *v = ptes[j].v; 271 *r = ptes[j].r;
|
/linux-master/arch/powerpc/include/asm/ |
H A D | plpar_wrappers.h | 173 * ptes must be 8*sizeof(unsigned long) 176 unsigned long *ptes) 184 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); 191 * ptes must be 8*sizeof(unsigned long) 194 unsigned long *ptes) 202 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); 606 unsigned long *ptes) 175 plpar_pte_read_4(unsigned long flags, unsigned long ptex, unsigned long *ptes) argument 193 plpar_pte_read_4_raw(unsigned long flags, unsigned long ptex, unsigned long *ptes) argument 605 plpar_pte_read_4(unsigned long flags, unsigned long ptex, unsigned long *ptes) argument
|
/linux-master/arch/arm64/kernel/pi/ |
H A D | map_kernel.c | 190 static u8 ptes[INIT_IDMAP_FDT_SIZE] __initdata __aligned(PAGE_SIZE); 192 u64 ptep = (u64)ptes;
|
/linux-master/arch/powerpc/platforms/pseries/ |
H A D | lpar.c | 854 } ptes[4]; local 863 lpar_rc = plpar_pte_read_4_raw(0, i, (void *)ptes); 870 if ((ptes[j].pteh & HPTE_V_VRMA_MASK) == 873 if (ptes[j].pteh & HPTE_V_VALID) 875 &(ptes[j].pteh), &(ptes[j].ptel)); 965 } ptes[4]; local 969 lpar_rc = plpar_pte_read_4(0, hpte_group, (void *)ptes); 977 if (HPTE_V_COMPARE(ptes[j].pteh, want_v) && 978 (ptes[ [all...] |
/linux-master/arch/powerpc/mm/ |
H A D | hugetlbpage.c | 258 void *ptes[]; member in struct:hugepd_freelist 270 kmem_cache_free(PGT_CACHE(PTE_T_ORDER), batch->ptes[i]); 293 (*batchp)->ptes[(*batchp)->index++] = hugepte;
|
/linux-master/arch/x86/kvm/mmu/ |
H A D | paging_tmpl.h | 84 pt_element_t ptes[PT_MAX_FULL_LEVELS]; member in struct:guest_walker 213 pte = orig_pte = walker->ptes[level - 1]; 254 walker->ptes[level - 1] = pte; 425 walker->ptes[walker->level - 1] = pte; 581 return r || curr_pte != gw->ptes[level - 1];
|