Lines Matching refs:prot
295 pgprot_t prot, struct page **ret_page,
299 pgprot_t prot, struct page **ret_page,
303 __dma_alloc_remap(struct page *page, size_t size, gfp_t gfp, pgprot_t prot, in __dma_alloc_remap() argument
312 prot, caller); in __dma_alloc_remap()
353 pgprot_t prot = pgprot_dmacoherent(PAGE_KERNEL); in atomic_pool_init() local
363 ptr = __alloc_from_contiguous(NULL, atomic_pool_size, prot, in atomic_pool_init()
366 ptr = __alloc_remap_buffer(NULL, atomic_pool_size, gfp, prot, in atomic_pool_init()
457 pgprot_t prot = *(pgprot_t *)data; in __dma_update_pte() local
459 set_pte_ext(pte, mk_pte(page, prot), 0); in __dma_update_pte()
463 static void __dma_remap(struct page *page, size_t size, pgprot_t prot) in __dma_remap() argument
468 apply_to_page_range(&init_mm, start, size, __dma_update_pte, &prot); in __dma_remap()
473 pgprot_t prot, struct page **ret_page, in __alloc_remap_buffer() argument
484 ptr = __dma_alloc_remap(page, size, gfp, prot, caller); in __alloc_remap_buffer()
532 pgprot_t prot, struct page **ret_page, in __alloc_from_contiguous() argument
550 ptr = __dma_alloc_remap(page, size, GFP_KERNEL, prot, caller); in __alloc_from_contiguous()
556 __dma_remap(page, size, prot); in __alloc_from_contiguous()
577 static inline pgprot_t __get_dma_pgprot(struct dma_attrs *attrs, pgprot_t prot) in __get_dma_pgprot() argument
579 prot = dma_get_attr(DMA_ATTR_WRITE_COMBINE, attrs) ? in __get_dma_pgprot()
580 pgprot_writecombine(prot) : in __get_dma_pgprot()
581 pgprot_dmacoherent(prot); in __get_dma_pgprot()
582 return prot; in __get_dma_pgprot()
591 #define __get_dma_pgprot(attrs, prot) __pgprot(0) argument
592 #define __alloc_remap_buffer(dev, size, gfp, prot, ret, c, wv) NULL argument
594 #define __alloc_from_contiguous(dev, size, prot, ret, c, wv) NULL argument
616 gfp_t gfp, pgprot_t prot, bool is_coherent, in __dma_alloc() argument
655 addr = __alloc_from_contiguous(dev, size, prot, &page, in __dma_alloc()
662 addr = __alloc_remap_buffer(dev, size, gfp, prot, &page, in __dma_alloc()
678 pgprot_t prot = __get_dma_pgprot(attrs, PAGE_KERNEL); in arm_dma_alloc() local
680 return __dma_alloc(dev, size, handle, gfp, prot, false, in arm_dma_alloc()
1236 __iommu_alloc_remap(struct page **pages, size_t size, gfp_t gfp, pgprot_t prot, in __iommu_alloc_remap() argument
1240 VM_ARM_DMA_CONSISTENT | VM_USERMAP, prot, caller); in __iommu_alloc_remap()
1359 pgprot_t prot = __get_dma_pgprot(attrs, PAGE_KERNEL); in arm_iommu_alloc_attrs() local
1389 addr = __iommu_alloc_remap(pages, size, gfp, prot, in arm_iommu_alloc_attrs()
1482 int prot; in __dma_direction_to_prot() local
1486 prot = IOMMU_READ | IOMMU_WRITE; in __dma_direction_to_prot()
1489 prot = IOMMU_READ; in __dma_direction_to_prot()
1492 prot = IOMMU_WRITE; in __dma_direction_to_prot()
1495 prot = 0; in __dma_direction_to_prot()
1498 return prot; in __dma_direction_to_prot()
1514 int prot; in __map_sg_chunk() local
1531 prot = __dma_direction_to_prot(dir); in __map_sg_chunk()
1533 ret = iommu_map(mapping->domain, iova, phys, len, prot); in __map_sg_chunk()
1732 int ret, prot, len = PAGE_ALIGN(size + offset); in arm_coherent_iommu_map_page() local
1738 prot = __dma_direction_to_prot(dir); in arm_coherent_iommu_map_page()
1740 ret = iommu_map(mapping->domain, dma_addr, page_to_phys(page), len, prot); in arm_coherent_iommu_map_page()