dmaengine_buffer   40 drivers/iio/buffer/industrialio-buffer-dmaengine.c static struct dmaengine_buffer *iio_buffer_to_dmaengine_buffer(
dmaengine_buffer   43 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	return container_of(buffer, struct dmaengine_buffer, queue.buffer);
dmaengine_buffer   60 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	struct dmaengine_buffer *dmaengine_buffer =
dmaengine_buffer   65 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	block->bytes_used = min(block->size, dmaengine_buffer->max_size);
dmaengine_buffer   67 drivers/iio/buffer/industrialio-buffer-dmaengine.c 			dmaengine_buffer->align);
dmaengine_buffer   69 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	desc = dmaengine_prep_slave_single(dmaengine_buffer->chan,
dmaengine_buffer   82 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	spin_lock_irq(&dmaengine_buffer->queue.list_lock);
dmaengine_buffer   83 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	list_add_tail(&block->head, &dmaengine_buffer->active);
dmaengine_buffer   84 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	spin_unlock_irq(&dmaengine_buffer->queue.list_lock);
dmaengine_buffer   86 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	dma_async_issue_pending(dmaengine_buffer->chan);
dmaengine_buffer   93 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	struct dmaengine_buffer *dmaengine_buffer =
dmaengine_buffer   96 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	dmaengine_terminate_sync(dmaengine_buffer->chan);
dmaengine_buffer   97 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	iio_dma_buffer_block_list_abort(queue, &dmaengine_buffer->active);
dmaengine_buffer  102 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	struct dmaengine_buffer *dmaengine_buffer =
dmaengine_buffer  105 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	iio_dma_buffer_release(&dmaengine_buffer->queue);
dmaengine_buffer  106 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	kfree(dmaengine_buffer);
dmaengine_buffer  143 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	struct dmaengine_buffer *dmaengine_buffer;
dmaengine_buffer  149 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	dmaengine_buffer = kzalloc(sizeof(*dmaengine_buffer), GFP_KERNEL);
dmaengine_buffer  150 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	if (!dmaengine_buffer)
dmaengine_buffer  174 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	INIT_LIST_HEAD(&dmaengine_buffer->active);
dmaengine_buffer  175 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	dmaengine_buffer->chan = chan;
dmaengine_buffer  176 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	dmaengine_buffer->align = width;
dmaengine_buffer  177 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	dmaengine_buffer->max_size = dma_get_max_seg_size(chan->device->dev);
dmaengine_buffer  179 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	iio_dma_buffer_init(&dmaengine_buffer->queue, chan->device->dev,
dmaengine_buffer  182 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	dmaengine_buffer->queue.buffer.access = &iio_dmaengine_buffer_ops;
dmaengine_buffer  184 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	return &dmaengine_buffer->queue.buffer;
dmaengine_buffer  187 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	kfree(dmaengine_buffer);
dmaengine_buffer  200 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	struct dmaengine_buffer *dmaengine_buffer =
dmaengine_buffer  203 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	iio_dma_buffer_exit(&dmaengine_buffer->queue);
dmaengine_buffer  204 drivers/iio/buffer/industrialio-buffer-dmaengine.c 	dma_release_channel(dmaengine_buffer->chan);