Lines Matching refs:attrs
77 struct dma_attrs *attrs) in arm_dma_map_page() argument
79 if (!dma_get_attr(DMA_ATTR_SKIP_CPU_SYNC, attrs)) in arm_dma_map_page()
86 struct dma_attrs *attrs) in arm_coherent_dma_map_page() argument
107 struct dma_attrs *attrs) in arm_dma_unmap_page() argument
109 if (!dma_get_attr(DMA_ATTR_SKIP_CPU_SYNC, attrs)) in arm_dma_unmap_page()
148 dma_addr_t *handle, gfp_t gfp, struct dma_attrs *attrs);
150 dma_addr_t handle, struct dma_attrs *attrs);
573 static inline pgprot_t __get_dma_pgprot(struct dma_attrs *attrs, pgprot_t prot) in __get_dma_pgprot() argument
575 prot = dma_get_attr(DMA_ATTR_WRITE_COMBINE, attrs) ? in __get_dma_pgprot()
587 #define __get_dma_pgprot(attrs, prot) __pgprot(0) argument
613 struct dma_attrs *attrs, const void *caller) in __dma_alloc() argument
646 want_vaddr = !dma_get_attr(DMA_ATTR_NO_KERNEL_MAPPING, attrs); in __dma_alloc()
668 gfp_t gfp, struct dma_attrs *attrs) in arm_dma_alloc() argument
670 pgprot_t prot = __get_dma_pgprot(attrs, PAGE_KERNEL); in arm_dma_alloc()
677 attrs, __builtin_return_address(0)); in arm_dma_alloc()
681 dma_addr_t *handle, gfp_t gfp, struct dma_attrs *attrs) in arm_coherent_dma_alloc() argument
683 pgprot_t prot = __get_dma_pgprot(attrs, PAGE_KERNEL); in arm_coherent_dma_alloc()
690 attrs, __builtin_return_address(0)); in arm_coherent_dma_alloc()
698 struct dma_attrs *attrs) in arm_dma_mmap() argument
707 vma->vm_page_prot = __get_dma_pgprot(attrs, vma->vm_page_prot); in arm_dma_mmap()
727 dma_addr_t handle, struct dma_attrs *attrs, in __arm_dma_free() argument
731 bool want_vaddr = !dma_get_attr(DMA_ATTR_NO_KERNEL_MAPPING, attrs); in __arm_dma_free()
756 dma_addr_t handle, struct dma_attrs *attrs) in arm_dma_free() argument
758 __arm_dma_free(dev, size, cpu_addr, handle, attrs, false); in arm_dma_free()
762 dma_addr_t handle, struct dma_attrs *attrs) in arm_coherent_dma_free() argument
764 __arm_dma_free(dev, size, cpu_addr, handle, attrs, true); in arm_coherent_dma_free()
769 struct dma_attrs *attrs) in arm_dma_get_sgtable() argument
902 enum dma_data_direction dir, struct dma_attrs *attrs) in arm_dma_map_sg() argument
913 s->length, dir, attrs); in arm_dma_map_sg()
921 ops->unmap_page(dev, sg_dma_address(s), sg_dma_len(s), dir, attrs); in arm_dma_map_sg()
936 enum dma_data_direction dir, struct dma_attrs *attrs) in arm_dma_unmap_sg() argument
944 ops->unmap_page(dev, sg_dma_address(s), sg_dma_len(s), dir, attrs); in arm_dma_unmap_sg()
1116 gfp_t gfp, struct dma_attrs *attrs) in __iommu_alloc_buffer() argument
1130 if (dma_get_attr(DMA_ATTR_FORCE_CONTIGUOUS, attrs)) in __iommu_alloc_buffer()
1201 size_t size, struct dma_attrs *attrs) in __iommu_free_buffer() argument
1207 if (dma_get_attr(DMA_ATTR_FORCE_CONTIGUOUS, attrs)) { in __iommu_free_buffer()
1300 static struct page **__iommu_get_pages(void *cpu_addr, struct dma_attrs *attrs) in __iommu_get_pages() argument
1307 if (dma_get_attr(DMA_ATTR_NO_KERNEL_MAPPING, attrs)) in __iommu_get_pages()
1345 dma_addr_t *handle, gfp_t gfp, struct dma_attrs *attrs) in arm_iommu_alloc_attrs() argument
1347 pgprot_t prot = __get_dma_pgprot(attrs, PAGE_KERNEL); in arm_iommu_alloc_attrs()
1366 pages = __iommu_alloc_buffer(dev, size, gfp, attrs); in arm_iommu_alloc_attrs()
1374 if (dma_get_attr(DMA_ATTR_NO_KERNEL_MAPPING, attrs)) in arm_iommu_alloc_attrs()
1387 __iommu_free_buffer(dev, pages, size, attrs); in arm_iommu_alloc_attrs()
1393 struct dma_attrs *attrs) in arm_iommu_mmap_attrs() argument
1397 struct page **pages = __iommu_get_pages(cpu_addr, attrs); in arm_iommu_mmap_attrs()
1401 vma->vm_page_prot = __get_dma_pgprot(attrs, vma->vm_page_prot); in arm_iommu_mmap_attrs()
1429 dma_addr_t handle, struct dma_attrs *attrs) in arm_iommu_free_attrs() argument
1439 pages = __iommu_get_pages(cpu_addr, attrs); in arm_iommu_free_attrs()
1445 if (!dma_get_attr(DMA_ATTR_NO_KERNEL_MAPPING, attrs)) { in arm_iommu_free_attrs()
1451 __iommu_free_buffer(dev, pages, size, attrs); in arm_iommu_free_attrs()
1456 size_t size, struct dma_attrs *attrs) in arm_iommu_get_sgtable() argument
1459 struct page **pages = __iommu_get_pages(cpu_addr, attrs); in arm_iommu_get_sgtable()
1494 enum dma_data_direction dir, struct dma_attrs *attrs, in __map_sg_chunk() argument
1516 !dma_get_attr(DMA_ATTR_SKIP_CPU_SYNC, attrs)) in __map_sg_chunk()
1537 enum dma_data_direction dir, struct dma_attrs *attrs, in __iommu_map_sg() argument
1554 dir, attrs, is_coherent) < 0) in __iommu_map_sg()
1567 if (__map_sg_chunk(dev, start, size, &dma->dma_address, dir, attrs, in __iommu_map_sg()
1595 int nents, enum dma_data_direction dir, struct dma_attrs *attrs) in arm_coherent_iommu_map_sg() argument
1597 return __iommu_map_sg(dev, sg, nents, dir, attrs, true); in arm_coherent_iommu_map_sg()
1613 int nents, enum dma_data_direction dir, struct dma_attrs *attrs) in arm_iommu_map_sg() argument
1615 return __iommu_map_sg(dev, sg, nents, dir, attrs, false); in arm_iommu_map_sg()
1619 int nents, enum dma_data_direction dir, struct dma_attrs *attrs, in __iommu_unmap_sg() argument
1630 !dma_get_attr(DMA_ATTR_SKIP_CPU_SYNC, attrs)) in __iommu_unmap_sg()
1647 int nents, enum dma_data_direction dir, struct dma_attrs *attrs) in arm_coherent_iommu_unmap_sg() argument
1649 __iommu_unmap_sg(dev, sg, nents, dir, attrs, true); in arm_coherent_iommu_unmap_sg()
1663 enum dma_data_direction dir, struct dma_attrs *attrs) in arm_iommu_unmap_sg() argument
1665 __iommu_unmap_sg(dev, sg, nents, dir, attrs, false); in arm_iommu_unmap_sg()
1716 struct dma_attrs *attrs) in arm_coherent_iommu_map_page() argument
1750 struct dma_attrs *attrs) in arm_iommu_map_page() argument
1752 if (!dma_get_attr(DMA_ATTR_SKIP_CPU_SYNC, attrs)) in arm_iommu_map_page()
1755 return arm_coherent_iommu_map_page(dev, page, offset, size, dir, attrs); in arm_iommu_map_page()
1769 struct dma_attrs *attrs) in arm_coherent_iommu_unmap_page() argument
1794 struct dma_attrs *attrs) in arm_iommu_unmap_page() argument
1805 if (!dma_get_attr(DMA_ATTR_SKIP_CPU_SYNC, attrs)) in arm_iommu_unmap_page()