Lines Matching refs:ops

15 	struct dma_map_ops *ops = get_dma_ops(dev);  in dma_map_single_attrs()  local
20 addr = ops->map_page(dev, virt_to_page(ptr), in dma_map_single_attrs()
34 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_single_attrs() local
37 if (ops->unmap_page) in dma_unmap_single_attrs()
38 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_single_attrs()
50 struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_sg_attrs() local
57 ents = ops->map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
68 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_sg_attrs() local
72 if (ops->unmap_sg) in dma_unmap_sg_attrs()
73 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
80 struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_page() local
85 addr = ops->map_page(dev, page, offset, size, dir, NULL); in dma_map_page()
94 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_page() local
97 if (ops->unmap_page) in dma_unmap_page()
98 ops->unmap_page(dev, addr, size, dir, NULL); in dma_unmap_page()
106 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_cpu() local
109 if (ops->sync_single_for_cpu) in dma_sync_single_for_cpu()
110 ops->sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
118 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_device() local
121 if (ops->sync_single_for_device) in dma_sync_single_for_device()
122 ops->sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
132 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_range_for_cpu() local
135 if (ops->sync_single_for_cpu) in dma_sync_single_range_for_cpu()
136 ops->sync_single_for_cpu(dev, addr + offset, size, dir); in dma_sync_single_range_for_cpu()
146 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_range_for_device() local
149 if (ops->sync_single_for_device) in dma_sync_single_range_for_device()
150 ops->sync_single_for_device(dev, addr + offset, size, dir); in dma_sync_single_range_for_device()
158 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_cpu() local
161 if (ops->sync_sg_for_cpu) in dma_sync_sg_for_cpu()
162 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
170 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_device() local
173 if (ops->sync_sg_for_device) in dma_sync_sg_for_device()
174 ops->sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
213 struct dma_map_ops *ops = get_dma_ops(dev); in dma_mmap_attrs() local
214 BUG_ON(!ops); in dma_mmap_attrs()
215 if (ops->mmap) in dma_mmap_attrs()
216 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
230 struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_sgtable_attrs() local
231 BUG_ON(!ops); in dma_get_sgtable_attrs()
232 if (ops->get_sgtable) in dma_get_sgtable_attrs()
233 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, in dma_get_sgtable_attrs()