Lines Matching refs:size
54 static void *__alloc_from_pool(size_t size, struct page **ret_page, gfp_t flags) in __alloc_from_pool() argument
64 val = gen_pool_alloc(atomic_pool, size); in __alloc_from_pool()
70 memset(ptr, 0, size); in __alloc_from_pool()
76 static bool __in_atomic_pool(void *start, size_t size) in __in_atomic_pool() argument
78 return addr_in_gen_pool(atomic_pool, (unsigned long)start, size); in __in_atomic_pool()
81 static int __free_from_pool(void *start, size_t size) in __free_from_pool() argument
83 if (!__in_atomic_pool(start, size)) in __free_from_pool()
86 gen_pool_free(atomic_pool, (unsigned long)start, size); in __free_from_pool()
91 static void *__dma_alloc_coherent(struct device *dev, size_t size, in __dma_alloc_coherent() argument
107 page = dma_alloc_from_contiguous(dev, size >> PAGE_SHIFT, in __dma_alloc_coherent()
108 get_order(size)); in __dma_alloc_coherent()
114 memset(addr, 0, size); in __dma_alloc_coherent()
117 return swiotlb_alloc_coherent(dev, size, dma_handle, flags); in __dma_alloc_coherent()
121 static void __dma_free_coherent(struct device *dev, size_t size, in __dma_free_coherent() argument
135 size >> PAGE_SHIFT); in __dma_free_coherent()
137 swiotlb_free_coherent(dev, size, vaddr, dma_handle); in __dma_free_coherent()
140 static void *__dma_alloc(struct device *dev, size_t size, in __dma_alloc() argument
148 size = PAGE_ALIGN(size); in __dma_alloc()
152 void *addr = __alloc_from_pool(size, &page, flags); in __dma_alloc()
160 ptr = __dma_alloc_coherent(dev, size, dma_handle, flags, attrs); in __dma_alloc()
169 __dma_flush_range(ptr, ptr + size); in __dma_alloc()
173 coherent_ptr = dma_common_contiguous_remap(page, size, VM_USERMAP, in __dma_alloc()
183 __dma_free_coherent(dev, size, ptr, *dma_handle, attrs); in __dma_alloc()
189 static void __dma_free(struct device *dev, size_t size, in __dma_free() argument
195 size = PAGE_ALIGN(size); in __dma_free()
198 if (__free_from_pool(vaddr, size)) in __dma_free()
202 __dma_free_coherent(dev, size, swiotlb_addr, dma_handle, attrs); in __dma_free()
206 unsigned long offset, size_t size, in __swiotlb_map_page() argument
212 dev_addr = swiotlb_map_page(dev, page, offset, size, dir, attrs); in __swiotlb_map_page()
214 __dma_map_area(phys_to_virt(dma_to_phys(dev, dev_addr)), size, dir); in __swiotlb_map_page()
221 size_t size, enum dma_data_direction dir, in __swiotlb_unmap_page() argument
225 __dma_unmap_area(phys_to_virt(dma_to_phys(dev, dev_addr)), size, dir); in __swiotlb_unmap_page()
226 swiotlb_unmap_page(dev, dev_addr, size, dir, attrs); in __swiotlb_unmap_page()
261 dma_addr_t dev_addr, size_t size, in __swiotlb_sync_single_for_cpu() argument
265 __dma_unmap_area(phys_to_virt(dma_to_phys(dev, dev_addr)), size, dir); in __swiotlb_sync_single_for_cpu()
266 swiotlb_sync_single_for_cpu(dev, dev_addr, size, dir); in __swiotlb_sync_single_for_cpu()
270 dma_addr_t dev_addr, size_t size, in __swiotlb_sync_single_for_device() argument
273 swiotlb_sync_single_for_device(dev, dev_addr, size, dir); in __swiotlb_sync_single_for_device()
275 __dma_map_area(phys_to_virt(dma_to_phys(dev, dev_addr)), size, dir); in __swiotlb_sync_single_for_device()
308 void *cpu_addr, dma_addr_t dma_addr, size_t size) in __dma_common_mmap() argument
313 unsigned long nr_pages = PAGE_ALIGN(size) >> PAGE_SHIFT; in __dma_common_mmap()
317 if (dma_mmap_from_coherent(dev, vma, cpu_addr, size, &ret)) in __dma_common_mmap()
332 void *cpu_addr, dma_addr_t dma_addr, size_t size, in __swiotlb_mmap() argument
337 return __dma_common_mmap(dev, vma, cpu_addr, dma_addr, size); in __swiotlb_mmap()