VTD_PAGE_SIZE     864 drivers/iommu/dmar.c 		addr = ioremap(drhd->address, VTD_PAGE_SIZE);
VTD_PAGE_SIZE     866 drivers/iommu/dmar.c 		addr = early_ioremap(drhd->address, VTD_PAGE_SIZE);
VTD_PAGE_SIZE     878 drivers/iommu/dmar.c 		early_iounmap(addr, VTD_PAGE_SIZE);
VTD_PAGE_SIZE     940 drivers/iommu/dmar.c 	iommu->reg_size = VTD_PAGE_SIZE;
VTD_PAGE_SIZE      53 drivers/iommu/intel-iommu.c #define ROOT_SIZE		VTD_PAGE_SIZE
VTD_PAGE_SIZE      54 drivers/iommu/intel-iommu.c #define CONTEXT_SIZE		VTD_PAGE_SIZE
VTD_PAGE_SIZE     185 drivers/iommu/intel-iommu.c #define ROOT_ENTRY_NR (VTD_PAGE_SIZE/sizeof(struct root_entry))
VTD_PAGE_SIZE     914 drivers/iommu/intel-iommu.c 			domain_flush_cache(domain, tmp_page, VTD_PAGE_SIZE);
VTD_PAGE_SIZE    1795 drivers/iommu/intel-iommu.c 	init_iova_domain(&reserved_iova_list, VTD_PAGE_SIZE, IOVA_START_PFN);
VTD_PAGE_SIZE    1854 drivers/iommu/intel-iommu.c 	init_iova_domain(&domain->iovad, VTD_PAGE_SIZE, IOVA_START_PFN);
VTD_PAGE_SIZE    2310 drivers/iommu/intel-iommu.c 		pteval += lvl_pages * VTD_PAGE_SIZE;
VTD_PAGE_SIZE    3013 drivers/iommu/intel-iommu.c 						    VTD_PAGE_SIZE);
VTD_PAGE_SIZE    3083 drivers/iommu/intel-iommu.c 	__iommu_flush_cache(iommu, new_ce, VTD_PAGE_SIZE);
VTD_PAGE_SIZE    3811 drivers/iommu/intel-iommu.c 	size_t aligned_size = ALIGN(size, VTD_PAGE_SIZE);
VTD_PAGE_SIZE    3848 drivers/iommu/intel-iommu.c 	if (!IS_ALIGNED(paddr | size, VTD_PAGE_SIZE)) {
VTD_PAGE_SIZE    3897 drivers/iommu/intel-iommu.c 	size_t aligned_size = ALIGN(size, VTD_PAGE_SIZE);
VTD_PAGE_SIZE    5142 drivers/iommu/intel-iommu.c 	init_iova_domain(&domain->iovad, VTD_PAGE_SIZE, IOVA_START_PFN);
VTD_PAGE_SIZE    5491 drivers/iommu/intel-iommu.c 	if (size < VTD_PAGE_SIZE << level_to_offset_bits(level))
VTD_PAGE_SIZE    5492 drivers/iommu/intel-iommu.c 		size = VTD_PAGE_SIZE << level_to_offset_bits(level);
VTD_PAGE_SIZE      32 include/linux/intel-iommu.h #define VTD_PAGE_ALIGN(addr)	(((addr) + VTD_PAGE_SIZE - 1) & VTD_PAGE_MASK)