Lines Matching refs:dir

76 	     unsigned long offset, size_t size, enum dma_data_direction dir,  in arm_dma_map_page()  argument
80 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_dma_map_page()
85 unsigned long offset, size_t size, enum dma_data_direction dir, in arm_coherent_dma_map_page() argument
106 size_t size, enum dma_data_direction dir, in arm_dma_unmap_page() argument
111 handle & ~PAGE_MASK, size, dir); in arm_dma_unmap_page()
115 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_dma_sync_single_for_cpu() argument
119 __dma_page_dev_to_cpu(page, offset, size, dir); in arm_dma_sync_single_for_cpu()
123 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_dma_sync_single_for_device() argument
127 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_dma_sync_single_for_device()
783 size_t size, enum dma_data_direction dir, in dma_cache_maint_page() argument
810 op(vaddr + offset, len, dir); in dma_cache_maint_page()
815 op(vaddr + offset, len, dir); in dma_cache_maint_page()
821 op(vaddr, len, dir); in dma_cache_maint_page()
836 size_t size, enum dma_data_direction dir) in __dma_page_cpu_to_dev() argument
840 dma_cache_maint_page(page, off, size, dir, dmac_map_area); in __dma_page_cpu_to_dev()
843 if (dir == DMA_FROM_DEVICE) { in __dma_page_cpu_to_dev()
852 size_t size, enum dma_data_direction dir) in __dma_page_dev_to_cpu() argument
858 if (dir != DMA_TO_DEVICE) { in __dma_page_dev_to_cpu()
861 dma_cache_maint_page(page, off, size, dir, dmac_unmap_area); in __dma_page_dev_to_cpu()
867 if (dir != DMA_TO_DEVICE && size >= PAGE_SIZE) { in __dma_page_dev_to_cpu()
902 enum dma_data_direction dir, struct dma_attrs *attrs) in arm_dma_map_sg() argument
913 s->length, dir, attrs); in arm_dma_map_sg()
921 ops->unmap_page(dev, sg_dma_address(s), sg_dma_len(s), dir, attrs); in arm_dma_map_sg()
936 enum dma_data_direction dir, struct dma_attrs *attrs) in arm_dma_unmap_sg() argument
944 ops->unmap_page(dev, sg_dma_address(s), sg_dma_len(s), dir, attrs); in arm_dma_unmap_sg()
955 int nents, enum dma_data_direction dir) in arm_dma_sync_sg_for_cpu() argument
963 dir); in arm_dma_sync_sg_for_cpu()
974 int nents, enum dma_data_direction dir) in arm_dma_sync_sg_for_device() argument
982 dir); in arm_dma_sync_sg_for_device()
1468 static int __dma_direction_to_prot(enum dma_data_direction dir) in __dma_direction_to_prot() argument
1472 switch (dir) { in __dma_direction_to_prot()
1494 enum dma_data_direction dir, struct dma_attrs *attrs, in __map_sg_chunk() argument
1517 __dma_page_cpu_to_dev(sg_page(s), s->offset, s->length, dir); in __map_sg_chunk()
1519 prot = __dma_direction_to_prot(dir); in __map_sg_chunk()
1537 enum dma_data_direction dir, struct dma_attrs *attrs, in __iommu_map_sg() argument
1554 dir, attrs, is_coherent) < 0) in __iommu_map_sg()
1567 if (__map_sg_chunk(dev, start, size, &dma->dma_address, dir, attrs, in __iommu_map_sg()
1595 int nents, enum dma_data_direction dir, struct dma_attrs *attrs) in arm_coherent_iommu_map_sg() argument
1597 return __iommu_map_sg(dev, sg, nents, dir, attrs, true); in arm_coherent_iommu_map_sg()
1613 int nents, enum dma_data_direction dir, struct dma_attrs *attrs) in arm_iommu_map_sg() argument
1615 return __iommu_map_sg(dev, sg, nents, dir, attrs, false); in arm_iommu_map_sg()
1619 int nents, enum dma_data_direction dir, struct dma_attrs *attrs, in __iommu_unmap_sg() argument
1632 s->length, dir); in __iommu_unmap_sg()
1647 int nents, enum dma_data_direction dir, struct dma_attrs *attrs) in arm_coherent_iommu_unmap_sg() argument
1649 __iommu_unmap_sg(dev, sg, nents, dir, attrs, true); in arm_coherent_iommu_unmap_sg()
1663 enum dma_data_direction dir, struct dma_attrs *attrs) in arm_iommu_unmap_sg() argument
1665 __iommu_unmap_sg(dev, sg, nents, dir, attrs, false); in arm_iommu_unmap_sg()
1676 int nents, enum dma_data_direction dir) in arm_iommu_sync_sg_for_cpu() argument
1682 __dma_page_dev_to_cpu(sg_page(s), s->offset, s->length, dir); in arm_iommu_sync_sg_for_cpu()
1694 int nents, enum dma_data_direction dir) in arm_iommu_sync_sg_for_device() argument
1700 __dma_page_cpu_to_dev(sg_page(s), s->offset, s->length, dir); in arm_iommu_sync_sg_for_device()
1715 unsigned long offset, size_t size, enum dma_data_direction dir, in arm_coherent_iommu_map_page() argument
1726 prot = __dma_direction_to_prot(dir); in arm_coherent_iommu_map_page()
1749 unsigned long offset, size_t size, enum dma_data_direction dir, in arm_iommu_map_page() argument
1753 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_iommu_map_page()
1755 return arm_coherent_iommu_map_page(dev, page, offset, size, dir, attrs); in arm_iommu_map_page()
1768 size_t size, enum dma_data_direction dir, in arm_coherent_iommu_unmap_page() argument
1793 size_t size, enum dma_data_direction dir, in arm_iommu_unmap_page() argument
1806 __dma_page_dev_to_cpu(page, offset, size, dir); in arm_iommu_unmap_page()
1813 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_iommu_sync_single_for_cpu() argument
1823 __dma_page_dev_to_cpu(page, offset, size, dir); in arm_iommu_sync_single_for_cpu()
1827 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_iommu_sync_single_for_device() argument
1837 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_iommu_sync_single_for_device()