Lines Matching refs:entry
41 unsigned long entry; /* Index into IOTSB. */ member
50 static inline void iommu_batch_start(struct device *dev, unsigned long prot, unsigned long entry) in iommu_batch_start() argument
56 p->entry = entry; in iommu_batch_start()
66 unsigned long entry = p->entry; in iommu_batch_flush() local
73 num = pci_sun4v_iommu_map(devhandle, HV_PCI_TSBID(0, entry), in iommu_batch_flush()
80 devhandle, HV_PCI_TSBID(0, entry), in iommu_batch_flush()
85 entry += num; in iommu_batch_flush()
90 p->entry = entry; in iommu_batch_flush()
96 static inline void iommu_batch_new_entry(unsigned long entry) in iommu_batch_new_entry() argument
100 if (p->entry + p->npages == entry) in iommu_batch_new_entry()
102 if (p->entry != ~0UL) in iommu_batch_new_entry()
104 p->entry = entry; in iommu_batch_new_entry()
139 long entry; in dma_4v_alloc_coherent() local
159 entry = iommu_tbl_range_alloc(dev, &iommu->tbl, npages, NULL, in dma_4v_alloc_coherent()
162 if (unlikely(entry == IOMMU_ERROR_CODE)) in dma_4v_alloc_coherent()
165 *dma_addrp = (iommu->tbl.table_map_base + (entry << IO_PAGE_SHIFT)); in dma_4v_alloc_coherent()
174 entry); in dma_4v_alloc_coherent()
197 static void dma_4v_iommu_demap(void *demap_arg, unsigned long entry, in dma_4v_iommu_demap() argument
206 HV_PCI_TSBID(0, entry), in dma_4v_iommu_demap()
209 entry += num; in dma_4v_iommu_demap()
220 unsigned long order, npages, entry; in dma_4v_free_coherent() local
227 entry = ((dvma - iommu->tbl.table_map_base) >> IO_PAGE_SHIFT); in dma_4v_free_coherent()
228 dma_4v_iommu_demap(&devhandle, entry, npages); in dma_4v_free_coherent()
245 long entry; in dma_4v_map_page() local
256 entry = iommu_tbl_range_alloc(dev, &iommu->tbl, npages, NULL, in dma_4v_map_page()
259 if (unlikely(entry == IOMMU_ERROR_CODE)) in dma_4v_map_page()
262 bus_addr = (iommu->tbl.table_map_base + (entry << IO_PAGE_SHIFT)); in dma_4v_map_page()
271 iommu_batch_start(dev, prot, entry); in dma_4v_map_page()
302 long entry; in dma_4v_unmap_page() local
318 entry = (bus_addr - iommu->tbl.table_map_base) >> IO_PAGE_SHIFT; in dma_4v_unmap_page()
319 dma_4v_iommu_demap(&devhandle, entry, npages); in dma_4v_unmap_page()
364 unsigned long paddr, npages, entry, out_entry = 0, slen; in dma_4v_map_sg() local
375 entry = iommu_tbl_range_alloc(dev, &iommu->tbl, npages, in dma_4v_map_sg()
379 if (unlikely(entry == IOMMU_ERROR_CODE)) { in dma_4v_map_sg()
386 iommu_batch_new_entry(entry); in dma_4v_map_sg()
389 dma_addr = iommu->tbl.table_map_base + (entry << IO_PAGE_SHIFT); in dma_4v_map_sg()
423 out_entry = entry; in dma_4v_map_sg()
474 unsigned long flags, entry; in dma_4v_unmap_sg() local
496 entry = ((dma_handle - tbl->table_map_base) >> shift); in dma_4v_unmap_sg()
497 dma_4v_iommu_demap(&devhandle, entry, npages); in dma_4v_unmap_sg()