Lines Matching refs:dir

77 	     unsigned long offset, size_t size, enum dma_data_direction dir,  in arm_dma_map_page()  argument
81 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_dma_map_page()
86 unsigned long offset, size_t size, enum dma_data_direction dir, in arm_coherent_dma_map_page() argument
107 size_t size, enum dma_data_direction dir, in arm_dma_unmap_page() argument
112 handle & ~PAGE_MASK, size, dir); in arm_dma_unmap_page()
116 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_dma_sync_single_for_cpu() argument
120 __dma_page_dev_to_cpu(page, offset, size, dir); in arm_dma_sync_single_for_cpu()
124 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_dma_sync_single_for_device() argument
128 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_dma_sync_single_for_device()
793 size_t size, enum dma_data_direction dir, in dma_cache_maint_page() argument
820 op(vaddr + offset, len, dir); in dma_cache_maint_page()
825 op(vaddr + offset, len, dir); in dma_cache_maint_page()
831 op(vaddr, len, dir); in dma_cache_maint_page()
846 size_t size, enum dma_data_direction dir) in __dma_page_cpu_to_dev() argument
850 dma_cache_maint_page(page, off, size, dir, dmac_map_area); in __dma_page_cpu_to_dev()
853 if (dir == DMA_FROM_DEVICE) { in __dma_page_cpu_to_dev()
862 size_t size, enum dma_data_direction dir) in __dma_page_dev_to_cpu() argument
868 if (dir != DMA_TO_DEVICE) { in __dma_page_dev_to_cpu()
871 dma_cache_maint_page(page, off, size, dir, dmac_unmap_area); in __dma_page_dev_to_cpu()
877 if (dir != DMA_TO_DEVICE && size >= PAGE_SIZE) { in __dma_page_dev_to_cpu()
912 enum dma_data_direction dir, struct dma_attrs *attrs) in arm_dma_map_sg() argument
923 s->length, dir, attrs); in arm_dma_map_sg()
931 ops->unmap_page(dev, sg_dma_address(s), sg_dma_len(s), dir, attrs); in arm_dma_map_sg()
946 enum dma_data_direction dir, struct dma_attrs *attrs) in arm_dma_unmap_sg() argument
954 ops->unmap_page(dev, sg_dma_address(s), sg_dma_len(s), dir, attrs); in arm_dma_unmap_sg()
965 int nents, enum dma_data_direction dir) in arm_dma_sync_sg_for_cpu() argument
973 dir); in arm_dma_sync_sg_for_cpu()
984 int nents, enum dma_data_direction dir) in arm_dma_sync_sg_for_device() argument
992 dir); in arm_dma_sync_sg_for_device()
1480 static int __dma_direction_to_prot(enum dma_data_direction dir) in __dma_direction_to_prot() argument
1484 switch (dir) { in __dma_direction_to_prot()
1506 enum dma_data_direction dir, struct dma_attrs *attrs, in __map_sg_chunk() argument
1529 __dma_page_cpu_to_dev(sg_page(s), s->offset, s->length, dir); in __map_sg_chunk()
1531 prot = __dma_direction_to_prot(dir); in __map_sg_chunk()
1549 enum dma_data_direction dir, struct dma_attrs *attrs, in __iommu_map_sg() argument
1566 dir, attrs, is_coherent) < 0) in __iommu_map_sg()
1579 if (__map_sg_chunk(dev, start, size, &dma->dma_address, dir, attrs, in __iommu_map_sg()
1607 int nents, enum dma_data_direction dir, struct dma_attrs *attrs) in arm_coherent_iommu_map_sg() argument
1609 return __iommu_map_sg(dev, sg, nents, dir, attrs, true); in arm_coherent_iommu_map_sg()
1625 int nents, enum dma_data_direction dir, struct dma_attrs *attrs) in arm_iommu_map_sg() argument
1627 return __iommu_map_sg(dev, sg, nents, dir, attrs, false); in arm_iommu_map_sg()
1631 int nents, enum dma_data_direction dir, struct dma_attrs *attrs, in __iommu_unmap_sg() argument
1644 s->length, dir); in __iommu_unmap_sg()
1659 int nents, enum dma_data_direction dir, struct dma_attrs *attrs) in arm_coherent_iommu_unmap_sg() argument
1661 __iommu_unmap_sg(dev, sg, nents, dir, attrs, true); in arm_coherent_iommu_unmap_sg()
1675 enum dma_data_direction dir, struct dma_attrs *attrs) in arm_iommu_unmap_sg() argument
1677 __iommu_unmap_sg(dev, sg, nents, dir, attrs, false); in arm_iommu_unmap_sg()
1688 int nents, enum dma_data_direction dir) in arm_iommu_sync_sg_for_cpu() argument
1694 __dma_page_dev_to_cpu(sg_page(s), s->offset, s->length, dir); in arm_iommu_sync_sg_for_cpu()
1706 int nents, enum dma_data_direction dir) in arm_iommu_sync_sg_for_device() argument
1712 __dma_page_cpu_to_dev(sg_page(s), s->offset, s->length, dir); in arm_iommu_sync_sg_for_device()
1727 unsigned long offset, size_t size, enum dma_data_direction dir, in arm_coherent_iommu_map_page() argument
1738 prot = __dma_direction_to_prot(dir); in arm_coherent_iommu_map_page()
1761 unsigned long offset, size_t size, enum dma_data_direction dir, in arm_iommu_map_page() argument
1765 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_iommu_map_page()
1767 return arm_coherent_iommu_map_page(dev, page, offset, size, dir, attrs); in arm_iommu_map_page()
1780 size_t size, enum dma_data_direction dir, in arm_coherent_iommu_unmap_page() argument
1805 size_t size, enum dma_data_direction dir, in arm_iommu_unmap_page() argument
1818 __dma_page_dev_to_cpu(page, offset, size, dir); in arm_iommu_unmap_page()
1825 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_iommu_sync_single_for_cpu() argument
1835 __dma_page_dev_to_cpu(page, offset, size, dir); in arm_iommu_sync_single_for_cpu()
1839 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_iommu_sync_single_for_device() argument
1849 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_iommu_sync_single_for_device()