Lines Matching refs:size

14 	int		size;  member
21 size_t size, int flags, in dma_init_coherent_memory() argument
26 int pages = size >> PAGE_SHIFT; in dma_init_coherent_memory()
31 if (!size) in dma_init_coherent_memory()
34 mem_base = ioremap(phys_addr, size); in dma_init_coherent_memory()
48 dma_mem->size = pages; in dma_init_coherent_memory()
88 dma_addr_t device_addr, size_t size, int flags) in dma_declare_coherent_memory() argument
93 ret = dma_init_coherent_memory(phys_addr, device_addr, size, flags, in dma_declare_coherent_memory()
118 dma_addr_t device_addr, size_t size) in dma_mark_declared_memory_occupied() argument
124 size += device_addr & ~PAGE_MASK; in dma_mark_declared_memory_occupied()
131 err = bitmap_allocate_region(mem->bitmap, pos, get_order(size)); in dma_mark_declared_memory_occupied()
155 int dma_alloc_from_coherent(struct device *dev, ssize_t size, in dma_alloc_from_coherent() argument
159 int order = get_order(size); in dma_alloc_from_coherent()
172 if (unlikely(size > (mem->size << PAGE_SHIFT))) in dma_alloc_from_coherent()
175 pageno = bitmap_find_free_region(mem->bitmap, mem->size, order); in dma_alloc_from_coherent()
184 memset(*ret, 0, size); in dma_alloc_from_coherent()
218 (mem->virt_base + (mem->size << PAGE_SHIFT))) { in dma_release_from_coherent()
247 void *vaddr, size_t size, int *ret) in dma_mmap_from_coherent() argument
251 if (mem && vaddr >= mem->virt_base && vaddr + size <= in dma_mmap_from_coherent()
252 (mem->virt_base + (mem->size << PAGE_SHIFT))) { in dma_mmap_from_coherent()
256 int count = size >> PAGE_SHIFT; in dma_mmap_from_coherent()
284 dma_init_coherent_memory(rmem->base, rmem->base, rmem->size, in rmem_dma_device_init()
288 &rmem->base, (unsigned long)rmem->size / SZ_1M); in rmem_dma_device_init()
323 &rmem->base, (unsigned long)rmem->size / SZ_1M); in rmem_dma_setup()