dma_dom          1763 drivers/iommu/amd_iommu.c 					struct dma_ops_domain *dma_dom,
dma_dom          1771 drivers/iommu/amd_iommu.c 		pfn = alloc_iova_fast(&dma_dom->iovad, pages,
dma_dom          1775 drivers/iommu/amd_iommu.c 		pfn = alloc_iova_fast(&dma_dom->iovad, pages,
dma_dom          1781 drivers/iommu/amd_iommu.c static void dma_ops_free_iova(struct dma_ops_domain *dma_dom,
dma_dom          1788 drivers/iommu/amd_iommu.c 	free_iova_fast(&dma_dom->iovad, address, pages);
dma_dom          1912 drivers/iommu/amd_iommu.c 	struct dma_ops_domain *dma_dom;
dma_dom          1914 drivers/iommu/amd_iommu.c 	dma_dom = kzalloc(sizeof(struct dma_ops_domain), GFP_KERNEL);
dma_dom          1915 drivers/iommu/amd_iommu.c 	if (!dma_dom)
dma_dom          1918 drivers/iommu/amd_iommu.c 	if (protection_domain_init(&dma_dom->domain))
dma_dom          1921 drivers/iommu/amd_iommu.c 	dma_dom->domain.mode = PAGE_MODE_3_LEVEL;
dma_dom          1922 drivers/iommu/amd_iommu.c 	dma_dom->domain.pt_root = (void *)get_zeroed_page(GFP_KERNEL);
dma_dom          1923 drivers/iommu/amd_iommu.c 	dma_dom->domain.flags = PD_DMA_OPS_MASK;
dma_dom          1924 drivers/iommu/amd_iommu.c 	if (!dma_dom->domain.pt_root)
dma_dom          1927 drivers/iommu/amd_iommu.c 	init_iova_domain(&dma_dom->iovad, PAGE_SIZE, IOVA_START_PFN);
dma_dom          1929 drivers/iommu/amd_iommu.c 	if (init_iova_flush_queue(&dma_dom->iovad, iova_domain_flush_tlb, NULL))
dma_dom          1933 drivers/iommu/amd_iommu.c 	copy_reserved_iova(&reserved_iova_ranges, &dma_dom->iovad);
dma_dom          1935 drivers/iommu/amd_iommu.c 	return dma_dom;
dma_dom          1938 drivers/iommu/amd_iommu.c 	dma_ops_domain_free(dma_dom);
dma_dom          2411 drivers/iommu/amd_iommu.c 			       struct dma_ops_domain *dma_dom,
dma_dom          2427 drivers/iommu/amd_iommu.c 	address = dma_ops_alloc_iova(dev, dma_dom, pages, dma_mask);
dma_dom          2435 drivers/iommu/amd_iommu.c 		ret = iommu_map_page(&dma_dom->domain, start, paddr,
dma_dom          2445 drivers/iommu/amd_iommu.c 	domain_flush_np_cache(&dma_dom->domain, address, size);
dma_dom          2454 drivers/iommu/amd_iommu.c 		iommu_unmap_page(&dma_dom->domain, start, PAGE_SIZE);
dma_dom          2457 drivers/iommu/amd_iommu.c 	spin_lock_irqsave(&dma_dom->domain.lock, flags);
dma_dom          2458 drivers/iommu/amd_iommu.c 	domain_flush_tlb(&dma_dom->domain);
dma_dom          2459 drivers/iommu/amd_iommu.c 	domain_flush_complete(&dma_dom->domain);
dma_dom          2460 drivers/iommu/amd_iommu.c 	spin_unlock_irqrestore(&dma_dom->domain.lock, flags);
dma_dom          2462 drivers/iommu/amd_iommu.c 	dma_ops_free_iova(dma_dom, address, pages);
dma_dom          2471 drivers/iommu/amd_iommu.c static void __unmap_single(struct dma_ops_domain *dma_dom,
dma_dom          2484 drivers/iommu/amd_iommu.c 		iommu_unmap_page(&dma_dom->domain, start, PAGE_SIZE);
dma_dom          2491 drivers/iommu/amd_iommu.c 		spin_lock_irqsave(&dma_dom->domain.lock, flags);
dma_dom          2492 drivers/iommu/amd_iommu.c 		domain_flush_tlb(&dma_dom->domain);
dma_dom          2493 drivers/iommu/amd_iommu.c 		domain_flush_complete(&dma_dom->domain);
dma_dom          2494 drivers/iommu/amd_iommu.c 		spin_unlock_irqrestore(&dma_dom->domain.lock, flags);
dma_dom          2495 drivers/iommu/amd_iommu.c 		dma_ops_free_iova(dma_dom, dma_addr, pages);
dma_dom          2498 drivers/iommu/amd_iommu.c 		queue_iova(&dma_dom->iovad, dma_addr >> PAGE_SHIFT, pages, 0);
dma_dom          2512 drivers/iommu/amd_iommu.c 	struct dma_ops_domain *dma_dom;
dma_dom          2522 drivers/iommu/amd_iommu.c 	dma_dom = to_dma_ops_domain(domain);
dma_dom          2524 drivers/iommu/amd_iommu.c 	return __map_single(dev, dma_dom, paddr, size, dir, dma_mask);
dma_dom          2534 drivers/iommu/amd_iommu.c 	struct dma_ops_domain *dma_dom;
dma_dom          2540 drivers/iommu/amd_iommu.c 	dma_dom = to_dma_ops_domain(domain);
dma_dom          2542 drivers/iommu/amd_iommu.c 	__unmap_single(dma_dom, dma_addr, size, dir);
dma_dom          2581 drivers/iommu/amd_iommu.c 	struct dma_ops_domain *dma_dom;
dma_dom          2591 drivers/iommu/amd_iommu.c 	dma_dom  = to_dma_ops_domain(domain);
dma_dom          2596 drivers/iommu/amd_iommu.c 	address = dma_ops_alloc_iova(dev, dma_dom, npages, dma_mask);
dma_dom          2656 drivers/iommu/amd_iommu.c 	free_iova_fast(&dma_dom->iovad, address >> PAGE_SHIFT, npages);
dma_dom          2671 drivers/iommu/amd_iommu.c 	struct dma_ops_domain *dma_dom;
dma_dom          2680 drivers/iommu/amd_iommu.c 	dma_dom   = to_dma_ops_domain(domain);
dma_dom          2683 drivers/iommu/amd_iommu.c 	__unmap_single(dma_dom, startaddr, npages << PAGE_SHIFT, dir);
dma_dom          2695 drivers/iommu/amd_iommu.c 	struct dma_ops_domain *dma_dom;
dma_dom          2706 drivers/iommu/amd_iommu.c 	dma_dom   = to_dma_ops_domain(domain);
dma_dom          2726 drivers/iommu/amd_iommu.c 	*dma_addr = __map_single(dev, dma_dom, page_to_phys(page),
dma_dom          2750 drivers/iommu/amd_iommu.c 	struct dma_ops_domain *dma_dom;
dma_dom          2760 drivers/iommu/amd_iommu.c 	dma_dom = to_dma_ops_domain(domain);
dma_dom          2762 drivers/iommu/amd_iommu.c 	__unmap_single(dma_dom, dma_addr, size, DMA_BIDIRECTIONAL);
dma_dom          3002 drivers/iommu/amd_iommu.c 	struct dma_ops_domain *dma_dom;
dma_dom          3017 drivers/iommu/amd_iommu.c 		dma_dom = to_dma_ops_domain(domain);
dma_dom          3018 drivers/iommu/amd_iommu.c 		dma_ops_domain_free(dma_dom);
dma_dom          3241 drivers/iommu/amd_iommu.c 	struct dma_ops_domain *dma_dom = to_dma_ops_domain(to_pdomain(domain));
dma_dom          3247 drivers/iommu/amd_iommu.c 	WARN_ON_ONCE(reserve_iova(&dma_dom->iovad, start, end) == NULL);