Lines Matching refs:size
16 void dma_cache_sync(struct device *dev, void *vaddr, size_t size, int direction) in dma_cache_sync() argument
26 invalidate_dcache_region(vaddr, size); in dma_cache_sync()
29 clean_dcache_region(vaddr, size); in dma_cache_sync()
32 flush_dcache_region(vaddr, size); in dma_cache_sync()
40 static struct page *__dma_alloc(struct device *dev, size_t size, in __dma_alloc() argument
53 size = PAGE_ALIGN(size); in __dma_alloc()
54 order = get_order(size); in __dma_alloc()
70 invalidate_dcache_region(phys_to_virt(page_to_phys(page)), size); in __dma_alloc()
73 free = page + (size >> PAGE_SHIFT); in __dma_alloc()
87 static void __dma_free(struct device *dev, size_t size, in __dma_free() argument
90 struct page *end = page + (PAGE_ALIGN(size) >> PAGE_SHIFT); in __dma_free()
96 void *dma_alloc_coherent(struct device *dev, size_t size, in dma_alloc_coherent() argument
102 page = __dma_alloc(dev, size, handle, gfp); in dma_alloc_coherent()
110 void dma_free_coherent(struct device *dev, size_t size, in dma_free_coherent() argument
117 cpu_addr, (unsigned long)handle, (unsigned)size); in dma_free_coherent()
120 __dma_free(dev, size, page, handle); in dma_free_coherent()
124 void *dma_alloc_writecombine(struct device *dev, size_t size, in dma_alloc_writecombine() argument
130 page = __dma_alloc(dev, size, handle, gfp); in dma_alloc_writecombine()
138 return __ioremap(phys, size, _PAGE_BUFFER); in dma_alloc_writecombine()
142 void dma_free_writecombine(struct device *dev, size_t size, in dma_free_writecombine() argument
150 __dma_free(dev, size, page, handle); in dma_free_writecombine()