Lines Matching refs:size
28 void dma_cache_sync(struct device *dev, void *vaddr, size_t size, in dma_cache_sync() argument
33 __flush_invalidate_dcache_range((unsigned long)vaddr, size); in dma_cache_sync()
37 __invalidate_dcache_range((unsigned long)vaddr, size); in dma_cache_sync()
41 __flush_dcache_range((unsigned long)vaddr, size); in dma_cache_sync()
51 static void do_cache_op(dma_addr_t dma_handle, size_t size, in do_cache_op() argument
59 fn((unsigned long)bus_to_virt(dma_handle), size); in do_cache_op()
61 while (size > 0) { in do_cache_op()
62 size_t sz = min_t(size_t, size, PAGE_SIZE - off); in do_cache_op()
69 size -= sz; in do_cache_op()
74 dma_addr_t dma_handle, size_t size, in xtensa_sync_single_for_cpu() argument
80 do_cache_op(dma_handle, size, __invalidate_dcache_range); in xtensa_sync_single_for_cpu()
93 dma_addr_t dma_handle, size_t size, in xtensa_sync_single_for_device() argument
100 do_cache_op(dma_handle, size, __flush_dcache_range); in xtensa_sync_single_for_device()
143 static void *xtensa_dma_alloc(struct device *dev, size_t size, in xtensa_dma_alloc() argument
156 ret = (unsigned long)__get_free_pages(flag, get_order(size)); in xtensa_dma_alloc()
168 __invalidate_dcache_range(ret, size); in xtensa_dma_alloc()
173 static void xtensa_dma_free(struct device *hwdev, size_t size, void *vaddr, in xtensa_dma_free() argument
182 free_pages(addr, get_order(size)); in xtensa_dma_free()
186 unsigned long offset, size_t size, in xtensa_map_page() argument
192 xtensa_sync_single_for_device(dev, dma_handle, size, dir); in xtensa_map_page()
197 size_t size, enum dma_data_direction dir, in xtensa_unmap_page() argument
200 xtensa_sync_single_for_cpu(dev, dma_handle, size, dir); in xtensa_unmap_page()