Lines Matching refs:ops
16 struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_single_attrs() local
21 addr = ops->map_page(dev, virt_to_page(ptr), in dma_map_single_attrs()
35 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_single_attrs() local
38 if (ops->unmap_page) in dma_unmap_single_attrs()
39 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_single_attrs()
51 struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_sg_attrs() local
58 ents = ops->map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
69 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_sg_attrs() local
73 if (ops->unmap_sg) in dma_unmap_sg_attrs()
74 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
81 struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_page() local
86 addr = ops->map_page(dev, page, offset, size, dir, NULL); in dma_map_page()
95 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_page() local
98 if (ops->unmap_page) in dma_unmap_page()
99 ops->unmap_page(dev, addr, size, dir, NULL); in dma_unmap_page()
107 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_cpu() local
110 if (ops->sync_single_for_cpu) in dma_sync_single_for_cpu()
111 ops->sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
119 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_device() local
122 if (ops->sync_single_for_device) in dma_sync_single_for_device()
123 ops->sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
133 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_range_for_cpu() local
136 if (ops->sync_single_for_cpu) in dma_sync_single_range_for_cpu()
137 ops->sync_single_for_cpu(dev, addr + offset, size, dir); in dma_sync_single_range_for_cpu()
147 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_range_for_device() local
150 if (ops->sync_single_for_device) in dma_sync_single_range_for_device()
151 ops->sync_single_for_device(dev, addr + offset, size, dir); in dma_sync_single_range_for_device()
159 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_cpu() local
162 if (ops->sync_sg_for_cpu) in dma_sync_sg_for_cpu()
163 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
171 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_device() local
174 if (ops->sync_sg_for_device) in dma_sync_sg_for_device()
175 ops->sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
214 struct dma_map_ops *ops = get_dma_ops(dev); in dma_mmap_attrs() local
215 BUG_ON(!ops); in dma_mmap_attrs()
216 if (ops->mmap) in dma_mmap_attrs()
217 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
231 struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_sgtable_attrs() local
232 BUG_ON(!ops); in dma_get_sgtable_attrs()
233 if (ops->get_sgtable) in dma_get_sgtable_attrs()
234 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, in dma_get_sgtable_attrs()
249 struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_attrs() local
252 BUG_ON(!ops); in dma_alloc_attrs()
259 if (!ops->alloc) in dma_alloc_attrs()
262 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
271 struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_attrs() local
273 BUG_ON(!ops); in dma_free_attrs()
279 if (!ops->free) in dma_free_attrs()
283 ops->free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
333 struct dma_map_ops *ops = get_dma_ops(dev); in dma_supported() local
335 if (!ops) in dma_supported()
337 if (!ops->dma_supported) in dma_supported()
339 return ops->dma_supported(dev, mask); in dma_supported()
346 struct dma_map_ops *ops = get_dma_ops(dev); in dma_set_mask() local
348 if (ops->set_dma_mask) in dma_set_mask()
349 return ops->set_dma_mask(dev, mask); in dma_set_mask()