Lines Matching refs:dma_ops

1803 	struct ib_dma_mapping_ops   *dma_ops;  member
2596 if (dev->dma_ops) in ib_dma_mapping_error()
2597 return dev->dma_ops->mapping_error(dev, dma_addr); in ib_dma_mapping_error()
2612 if (dev->dma_ops) in ib_dma_map_single()
2613 return dev->dma_ops->map_single(dev, cpu_addr, size, direction); in ib_dma_map_single()
2628 if (dev->dma_ops) in ib_dma_unmap_single()
2629 dev->dma_ops->unmap_single(dev, addr, size, direction); in ib_dma_unmap_single()
2666 if (dev->dma_ops) in ib_dma_map_page()
2667 return dev->dma_ops->map_page(dev, page, offset, size, direction); in ib_dma_map_page()
2682 if (dev->dma_ops) in ib_dma_unmap_page()
2683 dev->dma_ops->unmap_page(dev, addr, size, direction); in ib_dma_unmap_page()
2699 if (dev->dma_ops) in ib_dma_map_sg()
2700 return dev->dma_ops->map_sg(dev, sg, nents, direction); in ib_dma_map_sg()
2715 if (dev->dma_ops) in ib_dma_unmap_sg()
2716 dev->dma_ops->unmap_sg(dev, sg, nents, direction); in ib_dma_unmap_sg()
2776 if (dev->dma_ops) in ib_dma_sync_single_for_cpu()
2777 dev->dma_ops->sync_single_for_cpu(dev, addr, size, dir); in ib_dma_sync_single_for_cpu()
2794 if (dev->dma_ops) in ib_dma_sync_single_for_device()
2795 dev->dma_ops->sync_single_for_device(dev, addr, size, dir); in ib_dma_sync_single_for_device()
2812 if (dev->dma_ops) in ib_dma_alloc_coherent()
2813 return dev->dma_ops->alloc_coherent(dev, size, dma_handle, flag); in ib_dma_alloc_coherent()
2835 if (dev->dma_ops) in ib_dma_free_coherent()
2836 dev->dma_ops->free_coherent(dev, size, cpu_addr, dma_handle); in ib_dma_free_coherent()