Lines Matching refs:size

52 	size_t		size;  member
62 unsigned long size; member
108 size_t size, enum dma_data_direction dir) in alloc_safe_buffer() argument
116 __func__, ptr, size, dir); in alloc_safe_buffer()
118 if (size <= device_info->small.size) { in alloc_safe_buffer()
120 } else if (size <= device_info->large.size) { in alloc_safe_buffer()
133 buf->size = size; in alloc_safe_buffer()
141 buf->safe = dma_alloc_coherent(dev, size, &buf->safe_dma_addr, in alloc_safe_buffer()
148 __func__, size); in alloc_safe_buffer()
177 b->safe_dma_addr + b->size > safe_dma_addr) { in find_safe_buffer()
202 dma_free_coherent(device_info->dev, buf->size, buf->safe, in free_safe_buffer()
222 static int needs_bounce(struct device *dev, dma_addr_t dma_addr, size_t size) in needs_bounce() argument
231 if (limit && size > limit) { in needs_bounce()
233 "mask %#Lx)\n", size, *dev->dma_mask); in needs_bounce()
238 if ((dma_addr | (dma_addr + size - 1)) & ~mask) in needs_bounce()
242 return !!dev->archdata.dmabounce->needs_bounce(dev, dma_addr, size); in needs_bounce()
245 static inline dma_addr_t map_single(struct device *dev, void *ptr, size_t size, in map_single() argument
254 buf = alloc_safe_buffer(device_info, ptr, size, dir); in map_single()
267 __func__, ptr, buf->safe, size); in map_single()
268 memcpy(buf->safe, ptr, size); in map_single()
275 size_t size, enum dma_data_direction dir) in unmap_single() argument
277 BUG_ON(buf->size != size); in unmap_single()
290 __func__, buf->safe, ptr, size); in unmap_single()
291 memcpy(ptr, buf->safe, size); in unmap_single()
298 __cpuc_flush_dcache_area(ptr, size); in unmap_single()
312 unsigned long offset, size_t size, enum dma_data_direction dir, in dmabounce_map_page() argument
319 __func__, page, offset, size, dir); in dmabounce_map_page()
323 ret = needs_bounce(dev, dma_addr, size); in dmabounce_map_page()
328 arm_dma_ops.sync_single_for_device(dev, dma_addr, size, dir); in dmabounce_map_page()
337 return map_single(dev, page_address(page) + offset, size, dir); in dmabounce_map_page()
346 static void dmabounce_unmap_page(struct device *dev, dma_addr_t dma_addr, size_t size, in dmabounce_unmap_page() argument
352 __func__, dma_addr, size, dir); in dmabounce_unmap_page()
356 arm_dma_ops.sync_single_for_cpu(dev, dma_addr, size, dir); in dmabounce_unmap_page()
360 unmap_single(dev, buf, size, dir); in dmabounce_unmap_page()
395 dma_addr_t handle, size_t size, enum dma_data_direction dir) in dmabounce_sync_for_cpu() argument
397 if (!__dmabounce_sync_for_cpu(dev, handle, size, dir)) in dmabounce_sync_for_cpu()
400 arm_dma_ops.sync_single_for_cpu(dev, handle, size, dir); in dmabounce_sync_for_cpu()
435 dma_addr_t handle, size_t size, enum dma_data_direction dir) in dmabounce_sync_for_device() argument
437 if (!__dmabounce_sync_for_device(dev, handle, size, dir)) in dmabounce_sync_for_device()
440 arm_dma_ops.sync_single_for_device(dev, handle, size, dir); in dmabounce_sync_for_device()
468 const char *name, unsigned long size) in dmabounce_init_pool() argument
470 pool->size = size; in dmabounce_init_pool()
472 pool->pool = dma_pool_create(name, dev, size, in dmabounce_init_pool()