Lines Matching refs:dma_ops

1656 	struct ib_dma_mapping_ops   *dma_ops;  member
2145 if (dev->dma_ops) in ib_dma_mapping_error()
2146 return dev->dma_ops->mapping_error(dev, dma_addr); in ib_dma_mapping_error()
2161 if (dev->dma_ops) in ib_dma_map_single()
2162 return dev->dma_ops->map_single(dev, cpu_addr, size, direction); in ib_dma_map_single()
2177 if (dev->dma_ops) in ib_dma_unmap_single()
2178 dev->dma_ops->unmap_single(dev, addr, size, direction); in ib_dma_unmap_single()
2215 if (dev->dma_ops) in ib_dma_map_page()
2216 return dev->dma_ops->map_page(dev, page, offset, size, direction); in ib_dma_map_page()
2231 if (dev->dma_ops) in ib_dma_unmap_page()
2232 dev->dma_ops->unmap_page(dev, addr, size, direction); in ib_dma_unmap_page()
2248 if (dev->dma_ops) in ib_dma_map_sg()
2249 return dev->dma_ops->map_sg(dev, sg, nents, direction); in ib_dma_map_sg()
2264 if (dev->dma_ops) in ib_dma_unmap_sg()
2265 dev->dma_ops->unmap_sg(dev, sg, nents, direction); in ib_dma_unmap_sg()
2325 if (dev->dma_ops) in ib_dma_sync_single_for_cpu()
2326 dev->dma_ops->sync_single_for_cpu(dev, addr, size, dir); in ib_dma_sync_single_for_cpu()
2343 if (dev->dma_ops) in ib_dma_sync_single_for_device()
2344 dev->dma_ops->sync_single_for_device(dev, addr, size, dir); in ib_dma_sync_single_for_device()
2361 if (dev->dma_ops) in ib_dma_alloc_coherent()
2362 return dev->dma_ops->alloc_coherent(dev, size, dma_handle, flag); in ib_dma_alloc_coherent()
2384 if (dev->dma_ops) in ib_dma_free_coherent()
2385 dev->dma_ops->free_coherent(dev, size, cpu_addr, dma_handle); in ib_dma_free_coherent()