Lines Matching refs:size
21 size_t size; member
30 dma_free_coherent(dev, this->size, this->vaddr, this->dma_handle); in dmam_coherent_release()
37 dma_free_noncoherent(dev, this->size, this->vaddr, this->dma_handle); in dmam_noncoherent_release()
45 WARN_ON(this->size != match->size || in dmam_match()
65 void *dmam_alloc_coherent(struct device *dev, size_t size, in dmam_alloc_coherent() argument
75 vaddr = dma_alloc_coherent(dev, size, dma_handle, gfp); in dmam_alloc_coherent()
83 dr->size = size; in dmam_alloc_coherent()
100 void dmam_free_coherent(struct device *dev, size_t size, void *vaddr, in dmam_free_coherent() argument
103 struct dma_devres match_data = { size, vaddr, dma_handle }; in dmam_free_coherent()
105 dma_free_coherent(dev, size, vaddr, dma_handle); in dmam_free_coherent()
124 void *dmam_alloc_noncoherent(struct device *dev, size_t size, in dmam_alloc_noncoherent() argument
134 vaddr = dma_alloc_noncoherent(dev, size, dma_handle, gfp); in dmam_alloc_noncoherent()
142 dr->size = size; in dmam_alloc_noncoherent()
159 void dmam_free_noncoherent(struct device *dev, size_t size, void *vaddr, in dmam_free_noncoherent() argument
162 struct dma_devres match_data = { size, vaddr, dma_handle }; in dmam_free_noncoherent()
164 dma_free_noncoherent(dev, size, vaddr, dma_handle); in dmam_free_noncoherent()
191 dma_addr_t device_addr, size_t size, int flags) in dmam_declare_coherent_memory() argument
200 rc = dma_declare_coherent_memory(dev, phys_addr, device_addr, size, in dmam_declare_coherent_memory()
229 void *cpu_addr, dma_addr_t handle, size_t size) in dma_common_get_sgtable() argument
238 sg_set_page(sgt->sgl, page, PAGE_ALIGN(size), 0); in dma_common_get_sgtable()
247 void *cpu_addr, dma_addr_t dma_addr, size_t size) in dma_common_mmap() argument
252 unsigned long count = PAGE_ALIGN(size) >> PAGE_SHIFT; in dma_common_mmap()
258 if (dma_mmap_from_coherent(dev, vma, cpu_addr, size, &ret)) in dma_common_mmap()
278 void *dma_common_pages_remap(struct page **pages, size_t size, in dma_common_pages_remap() argument
284 area = get_vm_area_caller(size, vm_flags, caller); in dma_common_pages_remap()
303 void *dma_common_contiguous_remap(struct page *page, size_t size, in dma_common_contiguous_remap() argument
312 pages = kmalloc(sizeof(struct page *) << get_order(size), GFP_KERNEL); in dma_common_contiguous_remap()
316 for (i = 0, pfn = page_to_pfn(page); i < (size >> PAGE_SHIFT); i++) in dma_common_contiguous_remap()
319 ptr = dma_common_pages_remap(pages, size, vm_flags, prot, caller); in dma_common_contiguous_remap()
329 void dma_common_free_remap(void *cpu_addr, size_t size, unsigned long vm_flags) in dma_common_free_remap() argument
338 unmap_kernel_range((unsigned long)cpu_addr, size); in dma_common_free_remap()