Searched refs:VTD_PAGE_SIZE (Results 1 - 6 of 6) sorted by last modified time

/linux-master/drivers/iommu/intel/
H A Dpasid.c165 clflush_cache_range(entries, VTD_PAGE_SIZE);
H A Dnested.c117 if (!IS_ALIGNED(inv_entry.addr, VTD_PAGE_SIZE) ||
H A Diommu.h34 #define VTD_PAGE_SIZE (1UL << VTD_PAGE_SHIFT) macro
36 #define VTD_PAGE_ALIGN(addr) (((addr) + VTD_PAGE_SIZE - 1) & VTD_PAGE_MASK)
864 return IS_ALIGNED((unsigned long)pte, VTD_PAGE_SIZE);
870 (struct dma_pte *)ALIGN((unsigned long)pte, VTD_PAGE_SIZE) - pte;
H A Diommu.c35 #define ROOT_SIZE VTD_PAGE_SIZE
36 #define CONTEXT_SIZE VTD_PAGE_SIZE
69 #define ROOT_ENTRY_NR (VTD_PAGE_SIZE/sizeof(struct root_entry))
854 domain_flush_cache(domain, tmp_page, VTD_PAGE_SIZE);
1932 pteval += lvl_pages * VTD_PAGE_SIZE;
2261 VTD_PAGE_SIZE);
2313 __iommu_flush_cache(iommu, new_ce, VTD_PAGE_SIZE);
3867 if (size < VTD_PAGE_SIZE << level_to_offset_bits(level))
3868 size = VTD_PAGE_SIZE << level_to_offset_bits(level);
H A Ddmar.c894 addr = ioremap(drhd->address, VTD_PAGE_SIZE);
896 addr = early_ioremap(drhd->address, VTD_PAGE_SIZE);
908 early_iounmap(addr, VTD_PAGE_SIZE);
1645 if (!IS_ALIGNED(addr, VTD_PAGE_SIZE << size_order))
H A Dcache.c254 return ALIGN_DOWN(start, VTD_PAGE_SIZE << mask);

Completed in 244 milliseconds