Lines Matching refs:size

35 static void *tile_dma_alloc_coherent(struct device *dev, size_t size,  in tile_dma_alloc_coherent()  argument
42 int order = get_order(size); in tile_dma_alloc_coherent()
66 if (addr + size > dma_mask) { in tile_dma_alloc_coherent()
79 static void tile_dma_free_coherent(struct device *dev, size_t size, in tile_dma_free_coherent() argument
83 homecache_free_pages((unsigned long)vaddr, get_order(size)); in tile_dma_free_coherent()
102 size_t size, enum dma_data_direction direction) in __dma_prep_page() argument
142 if (offset & (L2_CACHE_BYTES - 1) || (size & (L2_CACHE_BYTES - 1))) in __dma_prep_page()
144 PFN_PHYS(page_to_pfn(page)) + offset, size); in __dma_prep_page()
150 size_t size, enum dma_data_direction direction) in __dma_complete_page() argument
171 static void __dma_prep_pa_range(dma_addr_t dma_addr, size_t size, in __dma_prep_pa_range() argument
176 size_t bytes = min(size, (size_t)(PAGE_SIZE - offset)); in __dma_prep_pa_range()
178 while (size != 0) { in __dma_prep_pa_range()
180 size -= bytes; in __dma_prep_pa_range()
183 bytes = min((size_t)PAGE_SIZE, size); in __dma_prep_pa_range()
187 static void __dma_complete_pa_range(dma_addr_t dma_addr, size_t size, in __dma_complete_pa_range() argument
192 size_t bytes = min(size, (size_t)(PAGE_SIZE - offset)); in __dma_complete_pa_range()
194 while (size != 0) { in __dma_complete_pa_range()
196 size -= bytes; in __dma_complete_pa_range()
199 bytes = min((size_t)PAGE_SIZE, size); in __dma_complete_pa_range()
241 unsigned long offset, size_t size, in tile_dma_map_page() argument
247 BUG_ON(offset + size > PAGE_SIZE); in tile_dma_map_page()
248 __dma_prep_page(page, offset, size, direction); in tile_dma_map_page()
254 size_t size, enum dma_data_direction direction, in tile_dma_unmap_page() argument
260 dma_address & (PAGE_SIZE - 1), size, direction); in tile_dma_unmap_page()
265 size_t size, in tile_dma_sync_single_for_cpu() argument
270 __dma_complete_pa_range(dma_handle, size, direction); in tile_dma_sync_single_for_cpu()
274 dma_addr_t dma_handle, size_t size, in tile_dma_sync_single_for_device() argument
277 __dma_prep_pa_range(dma_handle, size, direction); in tile_dma_sync_single_for_device()
344 static void *tile_pci_dma_alloc_coherent(struct device *dev, size_t size, in tile_pci_dma_alloc_coherent() argument
349 int order = get_order(size); in tile_pci_dma_alloc_coherent()
369 static void tile_pci_dma_free_coherent(struct device *dev, size_t size, in tile_pci_dma_free_coherent() argument
373 homecache_free_pages((unsigned long)vaddr, get_order(size)); in tile_pci_dma_free_coherent()
417 unsigned long offset, size_t size, in tile_pci_dma_map_page() argument
423 BUG_ON(offset + size > PAGE_SIZE); in tile_pci_dma_map_page()
424 __dma_prep_page(page, offset, size, direction); in tile_pci_dma_map_page()
430 size_t size, in tile_pci_dma_unmap_page() argument
439 dma_address & (PAGE_SIZE - 1), size, direction); in tile_pci_dma_unmap_page()
444 size_t size, in tile_pci_dma_sync_single_for_cpu() argument
451 __dma_complete_pa_range(dma_handle, size, direction); in tile_pci_dma_sync_single_for_cpu()
456 size_t size, in tile_pci_dma_sync_single_for_device() argument
462 __dma_prep_pa_range(dma_handle, size, direction); in tile_pci_dma_sync_single_for_device()
532 static void *tile_swiotlb_alloc_coherent(struct device *dev, size_t size, in tile_swiotlb_alloc_coherent() argument
537 return swiotlb_alloc_coherent(dev, size, dma_handle, gfp); in tile_swiotlb_alloc_coherent()
540 static void tile_swiotlb_free_coherent(struct device *dev, size_t size, in tile_swiotlb_free_coherent() argument
544 swiotlb_free_coherent(dev, size, vaddr, dma_addr); in tile_swiotlb_free_coherent()