Searched refs:dma (Results 1 - 200 of 2783) sorted by relevance

1234567891011>>

/linux-4.1.27/drivers/tty/serial/8250/
H A D8250_dma.c14 #include <linux/dma-mapping.h>
21 struct uart_8250_dma *dma = p->dma; __dma_tx_complete() local
26 dma_sync_single_for_cpu(dma->txchan->device->dev, dma->tx_addr, __dma_tx_complete()
31 dma->tx_running = 0; __dma_tx_complete()
33 xmit->tail += dma->tx_size; __dma_tx_complete()
35 p->port.icount.tx += dma->tx_size; __dma_tx_complete()
52 struct uart_8250_dma *dma = p->dma; __dma_rx_complete() local
57 dma_sync_single_for_cpu(dma->rxchan->device->dev, dma->rx_addr, __dma_rx_complete()
58 dma->rx_size, DMA_FROM_DEVICE); __dma_rx_complete()
60 dma->rx_running = 0; __dma_rx_complete()
61 dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state); __dma_rx_complete()
63 count = dma->rx_size - state.residue; __dma_rx_complete()
65 tty_insert_flip_string(tty_port, dma->rx_buf, count); __dma_rx_complete()
73 struct uart_8250_dma *dma = p->dma; serial8250_tx_dma() local
78 if (uart_tx_stopped(&p->port) || dma->tx_running || serial8250_tx_dma()
82 dma->tx_size = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); serial8250_tx_dma()
84 desc = dmaengine_prep_slave_single(dma->txchan, serial8250_tx_dma()
85 dma->tx_addr + xmit->tail, serial8250_tx_dma()
86 dma->tx_size, DMA_MEM_TO_DEV, serial8250_tx_dma()
93 dma->tx_running = 1; serial8250_tx_dma()
97 dma->tx_cookie = dmaengine_submit(desc); serial8250_tx_dma()
99 dma_sync_single_for_device(dma->txchan->device->dev, dma->tx_addr, serial8250_tx_dma()
102 dma_async_issue_pending(dma->txchan); serial8250_tx_dma()
103 if (dma->tx_err) { serial8250_tx_dma()
104 dma->tx_err = 0; serial8250_tx_dma()
112 dma->tx_err = 1; serial8250_tx_dma()
118 struct uart_8250_dma *dma = p->dma; serial8250_rx_dma() local
130 if (dma->rx_running) { serial8250_rx_dma()
131 dmaengine_pause(dma->rxchan); serial8250_rx_dma()
133 dmaengine_terminate_all(dma->rxchan); serial8250_rx_dma()
140 if (dma->rx_running) serial8250_rx_dma()
143 desc = dmaengine_prep_slave_single(dma->rxchan, dma->rx_addr, serial8250_rx_dma()
144 dma->rx_size, DMA_DEV_TO_MEM, serial8250_rx_dma()
149 dma->rx_running = 1; serial8250_rx_dma()
153 dma->rx_cookie = dmaengine_submit(desc); serial8250_rx_dma()
155 dma_sync_single_for_device(dma->rxchan->device->dev, dma->rx_addr, serial8250_rx_dma()
156 dma->rx_size, DMA_FROM_DEVICE); serial8250_rx_dma()
158 dma_async_issue_pending(dma->rxchan); serial8250_rx_dma()
165 struct uart_8250_dma *dma = p->dma; serial8250_request_dma() local
169 dma->rxconf.direction = DMA_DEV_TO_MEM; serial8250_request_dma()
170 dma->rxconf.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; serial8250_request_dma()
171 dma->rxconf.src_addr = p->port.mapbase + UART_RX; serial8250_request_dma()
173 dma->txconf.direction = DMA_MEM_TO_DEV; serial8250_request_dma()
174 dma->txconf.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; serial8250_request_dma()
175 dma->txconf.dst_addr = p->port.mapbase + UART_TX; serial8250_request_dma()
181 dma->rxchan = dma_request_slave_channel_compat(mask, serial8250_request_dma()
182 dma->fn, dma->rx_param, serial8250_request_dma()
184 if (!dma->rxchan) serial8250_request_dma()
187 dmaengine_slave_config(dma->rxchan, &dma->rxconf); serial8250_request_dma()
190 dma->txchan = dma_request_slave_channel_compat(mask, serial8250_request_dma()
191 dma->fn, dma->tx_param, serial8250_request_dma()
193 if (!dma->txchan) { serial8250_request_dma()
194 dma_release_channel(dma->rxchan); serial8250_request_dma()
198 dmaengine_slave_config(dma->txchan, &dma->txconf); serial8250_request_dma()
201 if (!dma->rx_size) serial8250_request_dma()
202 dma->rx_size = PAGE_SIZE; serial8250_request_dma()
204 dma->rx_buf = dma_alloc_coherent(dma->rxchan->device->dev, dma->rx_size, serial8250_request_dma()
205 &dma->rx_addr, GFP_KERNEL); serial8250_request_dma()
206 if (!dma->rx_buf) serial8250_request_dma()
210 dma->tx_addr = dma_map_single(dma->txchan->device->dev, serial8250_request_dma()
214 if (dma_mapping_error(dma->txchan->device->dev, dma->tx_addr)) { serial8250_request_dma()
215 dma_free_coherent(dma->rxchan->device->dev, dma->rx_size, serial8250_request_dma()
216 dma->rx_buf, dma->rx_addr); serial8250_request_dma()
220 dev_dbg_ratelimited(p->port.dev, "got both dma channels\n"); serial8250_request_dma()
224 dma_release_channel(dma->rxchan); serial8250_request_dma()
225 dma_release_channel(dma->txchan); serial8250_request_dma()
233 struct uart_8250_dma *dma = p->dma; serial8250_release_dma() local
235 if (!dma) serial8250_release_dma()
239 dmaengine_terminate_all(dma->rxchan); serial8250_release_dma()
240 dma_free_coherent(dma->rxchan->device->dev, dma->rx_size, dma->rx_buf, serial8250_release_dma()
241 dma->rx_addr); serial8250_release_dma()
242 dma_release_channel(dma->rxchan); serial8250_release_dma()
243 dma->rxchan = NULL; serial8250_release_dma()
246 dmaengine_terminate_all(dma->txchan); serial8250_release_dma()
247 dma_unmap_single(dma->txchan->device->dev, dma->tx_addr, serial8250_release_dma()
249 dma_release_channel(dma->txchan); serial8250_release_dma()
250 dma->txchan = NULL; serial8250_release_dma()
251 dma->tx_running = 0; serial8250_release_dma()
253 dev_dbg_ratelimited(p->port.dev, "dma channels released\n"); serial8250_release_dma()
H A D8250_omap.c25 #include <linux/dma-mapping.h>
236 struct uart_8250_dma *dma = up->dma; omap8250_restore_regs() local
238 if (dma && dma->tx_running) { omap8250_restore_regs()
412 if (up->dma) omap_8250_set_termios()
577 if (up->dma) { omap8250_irq()
615 if (up->dma) { omap_8250_startup()
620 up->dma = NULL; omap_8250_startup()
642 if (up->dma) omap_8250_startup()
643 up->dma->rx_dma(up, 0); omap_8250_startup()
662 if (up->dma) omap_8250_shutdown()
663 up->dma->rx_dma(up, UART_IIR_RX_TIMEOUT); omap_8250_shutdown()
672 if (up->dma) omap_8250_shutdown()
729 struct uart_8250_dma *dma = p->dma; __dma_rx_do_complete() local
734 dma_sync_single_for_cpu(dma->rxchan->device->dev, dma->rx_addr, __dma_rx_do_complete()
735 dma->rx_size, DMA_FROM_DEVICE); __dma_rx_do_complete()
737 dma->rx_running = 0; __dma_rx_do_complete()
738 dmaengine_tx_status(dma->rxchan, dma->rx_cookie, &state); __dma_rx_do_complete()
739 dmaengine_terminate_all(dma->rxchan); __dma_rx_do_complete()
741 count = dma->rx_size - state.residue; __dma_rx_do_complete()
743 tty_insert_flip_string(tty_port, dma->rx_buf, count); __dma_rx_do_complete()
758 struct uart_8250_dma *dma = p->dma; omap_8250_rx_dma() local
764 if (dma->rx_running) { omap_8250_rx_dma()
765 dmaengine_pause(dma->rxchan); omap_8250_rx_dma()
774 if (dma->rx_running) { omap_8250_rx_dma()
775 dmaengine_pause(dma->rxchan); omap_8250_rx_dma()
789 if (dma->rx_running) { omap_8250_rx_dma()
790 dmaengine_pause(dma->rxchan); omap_8250_rx_dma()
799 if (dma->rx_running) omap_8250_rx_dma()
802 desc = dmaengine_prep_slave_single(dma->rxchan, dma->rx_addr, omap_8250_rx_dma()
803 dma->rx_size, DMA_DEV_TO_MEM, omap_8250_rx_dma()
808 dma->rx_running = 1; omap_8250_rx_dma()
812 dma->rx_cookie = dmaengine_submit(desc); omap_8250_rx_dma()
814 dma_sync_single_for_device(dma->rxchan->device->dev, dma->rx_addr, omap_8250_rx_dma()
815 dma->rx_size, DMA_FROM_DEVICE); omap_8250_rx_dma()
817 dma_async_issue_pending(dma->rxchan); omap_8250_rx_dma()
826 struct uart_8250_dma *dma = p->dma; omap_8250_dma_tx_complete() local
832 dma_sync_single_for_cpu(dma->txchan->device->dev, dma->tx_addr, omap_8250_dma_tx_complete()
837 dma->tx_running = 0; omap_8250_dma_tx_complete()
839 xmit->tail += dma->tx_size; omap_8250_dma_tx_complete()
841 p->port.icount.tx += dma->tx_size; omap_8250_dma_tx_complete()
863 dma->tx_err = 1; omap_8250_dma_tx_complete()
873 struct uart_8250_dma *dma = p->dma; omap_8250_tx_dma() local
880 if (dma->tx_running) omap_8250_tx_dma()
889 if (dma->tx_err || p->capabilities & UART_CAP_RPM) { omap_8250_tx_dma()
900 dma->tx_size = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); omap_8250_tx_dma()
924 if (dma->tx_size < 4) { omap_8250_tx_dma()
931 desc = dmaengine_prep_slave_single(dma->txchan, omap_8250_tx_dma()
932 dma->tx_addr + xmit->tail + skip_byte, omap_8250_tx_dma()
933 dma->tx_size - skip_byte, DMA_MEM_TO_DEV, omap_8250_tx_dma()
940 dma->tx_running = 1; omap_8250_tx_dma()
945 dma->tx_cookie = dmaengine_submit(desc); omap_8250_tx_dma()
947 dma_sync_single_for_device(dma->txchan->device->dev, dma->tx_addr, omap_8250_tx_dma()
950 dma_async_issue_pending(dma->txchan); omap_8250_tx_dma()
951 if (dma->tx_err) omap_8250_tx_dma()
952 dma->tx_err = 0; omap_8250_tx_dma()
962 dma->tx_err = 1; omap_8250_tx_dma()
1000 if (status & UART_LSR_THRE && up->dma->tx_err) { omap_8250_dma_handle_irq()
1003 up->dma->tx_err = 0; omap_8250_dma_handle_irq()
1153 ret = of_property_count_strings(pdev->dev.of_node, "dma-names"); omap8250_probe()
1155 up.dma = &priv->omap8250_dma; omap8250_probe()
1304 if (up->dma) omap8250_runtime_suspend()
1330 if (up->dma) omap8250_runtime_resume()
/linux-4.1.27/drivers/dma-buf/
H A DMakefile1 obj-y := dma-buf.o fence.o reservation.o seqno-fence.o
/linux-4.1.27/drivers/crypto/qce/
H A DMakefile4 dma.o \
H A Ddma.c17 #include "dma.h"
19 int qce_dma_request(struct device *dev, struct qce_dma_data *dma) qce_dma_request() argument
23 dma->txchan = dma_request_slave_channel_reason(dev, "tx"); qce_dma_request()
24 if (IS_ERR(dma->txchan)) qce_dma_request()
25 return PTR_ERR(dma->txchan); qce_dma_request()
27 dma->rxchan = dma_request_slave_channel_reason(dev, "rx"); qce_dma_request()
28 if (IS_ERR(dma->rxchan)) { qce_dma_request()
29 ret = PTR_ERR(dma->rxchan); qce_dma_request()
33 dma->result_buf = kmalloc(QCE_RESULT_BUF_SZ + QCE_IGNORE_BUF_SZ, qce_dma_request()
35 if (!dma->result_buf) { qce_dma_request()
40 dma->ignore_buf = dma->result_buf + QCE_RESULT_BUF_SZ; qce_dma_request()
44 dma_release_channel(dma->rxchan); qce_dma_request()
46 dma_release_channel(dma->txchan); qce_dma_request()
50 void qce_dma_release(struct qce_dma_data *dma) qce_dma_release() argument
52 dma_release_channel(dma->txchan); qce_dma_release()
53 dma_release_channel(dma->rxchan); qce_dma_release()
54 kfree(dma->result_buf); qce_dma_release()
156 int qce_dma_prep_sgs(struct qce_dma_data *dma, struct scatterlist *rx_sg, qce_dma_prep_sgs() argument
160 struct dma_chan *rxchan = dma->rxchan; qce_dma_prep_sgs()
161 struct dma_chan *txchan = dma->txchan; qce_dma_prep_sgs()
174 void qce_dma_issue_pending(struct qce_dma_data *dma) qce_dma_issue_pending() argument
176 dma_async_issue_pending(dma->rxchan); qce_dma_issue_pending()
177 dma_async_issue_pending(dma->txchan); qce_dma_issue_pending()
180 int qce_dma_terminate_all(struct qce_dma_data *dma) qce_dma_terminate_all() argument
184 ret = dmaengine_terminate_all(dma->rxchan); qce_dma_terminate_all()
185 return ret ?: dmaengine_terminate_all(dma->txchan); qce_dma_terminate_all()
H A Ddma.h45 int qce_dma_request(struct device *dev, struct qce_dma_data *dma);
46 void qce_dma_release(struct qce_dma_data *dma);
47 int qce_dma_prep_sgs(struct qce_dma_data *dma, struct scatterlist *sg_in,
50 void qce_dma_issue_pending(struct qce_dma_data *dma);
51 int qce_dma_terminate_all(struct qce_dma_data *dma);
H A Dcore.h17 #include "dma.h"
31 * @dma: pointer to dma data
46 struct qce_dma_data dma; member in struct:qce_device
/linux-4.1.27/drivers/dma/ioat/
H A DMakefile2 ioatdma-y := pci.o dma.o dma_v2.o dma_v3.o dca.o
H A Ddma.c31 #include <linux/dma-mapping.h>
35 #include "dma.h"
102 struct dma_device *dma = &device->common; ioat_init_channel() local
109 chan->common.device = dma; ioat_init_channel()
111 list_add_tail(&chan->common.device_node, &dma->channels); ioat_init_channel()
130 struct dma_device *dma = &device->common; ioat1_enumerate_channels() local
132 INIT_LIST_HEAD(&dma->channels); ioat1_enumerate_channels()
133 dma->chancnt = readb(device->reg_base + IOAT_CHANCNT_OFFSET); ioat1_enumerate_channels()
134 dma->chancnt &= 0x1f; /* bits [4:0] valid */ ioat1_enumerate_channels()
135 if (dma->chancnt > ARRAY_SIZE(device->idx)) { ioat1_enumerate_channels()
137 dma->chancnt, ARRAY_SIZE(device->idx)); ioat1_enumerate_channels()
138 dma->chancnt = ARRAY_SIZE(device->idx); ioat1_enumerate_channels()
147 dma->chancnt--; ioat1_enumerate_channels()
149 for (i = 0; i < dma->chancnt; i++) { ioat1_enumerate_channels()
160 dma->chancnt = i; ioat1_enumerate_channels()
355 ioat1_dma_start_null_desc(ioat); /* give chain to dma device */ ioat1_dma_alloc_chan_resources()
820 struct dma_device *dma = &device->common; ioat_dma_self_test() local
845 dma_chan = container_of(dma->channels.next, struct dma_chan, ioat_dma_self_test()
847 if (dma->device_alloc_chan_resources(dma_chan) < 1) { ioat_dma_self_test()
882 dma->device_issue_pending(dma_chan); ioat_dma_self_test()
887 dma->device_tx_status(dma_chan, cookie, NULL) ioat_dma_self_test()
904 dma->device_free_chan_resources(dma_chan); ioat_dma_self_test()
1015 struct dma_device *dma = &device->common; ioat_probe() local
1039 dma_cap_set(DMA_MEMCPY, dma->cap_mask); ioat_probe()
1040 dma->dev = &pdev->dev; ioat_probe()
1042 if (!dma->chancnt) { ioat_probe()
1080 /* ioat1_intr_quirk - fix up dma ctrl register to enable / disable msi */ ioat1_intr_quirk()
1112 struct dma_device *dma = c->device; cap_show() local
1115 dma_has_cap(DMA_PQ, dma->cap_mask) ? " pq" : "", cap_show()
1116 dma_has_cap(DMA_PQ_VAL, dma->cap_mask) ? " pq_val" : "", cap_show()
1117 dma_has_cap(DMA_XOR, dma->cap_mask) ? " xor" : "", cap_show()
1118 dma_has_cap(DMA_XOR_VAL, dma->cap_mask) ? " xor_val" : "", cap_show()
1119 dma_has_cap(DMA_INTERRUPT, dma->cap_mask) ? " intr" : ""); cap_show()
1126 struct dma_device *dma = c->device; version_show() local
1127 struct ioatdma_device *device = to_ioatdma_device(dma); version_show()
1167 struct dma_device *dma = &device->common; ioat_kobject_add() local
1170 list_for_each_entry(c, &dma->channels, device_node) { ioat_kobject_add()
1187 struct dma_device *dma = &device->common; ioat_kobject_del() local
1190 list_for_each_entry(c, &dma->channels, device_node) { ioat_kobject_del()
1203 struct dma_device *dma; ioat1_dma_probe() local
1211 dma = &device->common; ioat1_dma_probe()
1212 dma->device_prep_dma_memcpy = ioat1_dma_prep_memcpy; ioat1_dma_probe()
1213 dma->device_issue_pending = ioat1_dma_memcpy_issue_pending; ioat1_dma_probe()
1214 dma->device_alloc_chan_resources = ioat1_dma_alloc_chan_resources; ioat1_dma_probe()
1215 dma->device_free_chan_resources = ioat1_dma_free_chan_resources; ioat1_dma_probe()
1216 dma->device_tx_status = ioat_dma_tx_status; ioat1_dma_probe()
1234 struct dma_device *dma = &device->common; ioat_dma_remove() local
1240 dma_async_device_unregister(dma); ioat_dma_remove()
1245 INIT_LIST_HEAD(&dma->channels); ioat_dma_remove()
/linux-4.1.27/drivers/block/rsxx/
H A DMakefile2 rsxx-objs := config.o core.o cregs.o dev.o dma.o
H A Ddma.c2 * Filename: dma.c
88 struct rsxx_dma *dma; member in struct:dma_tracker
128 static unsigned int get_dma_size(struct rsxx_dma *dma) get_dma_size() argument
130 if (dma->sub_page.cnt) get_dma_size()
131 return dma->sub_page.cnt << 9; get_dma_size()
140 struct rsxx_dma *dma) set_tracker_dma()
142 trackers->list[tag].dma = dma; set_tracker_dma()
148 return trackers->list[tag].dma; get_tracker_dma()
171 trackers->list[tag].dma = NULL; push_tracker()
224 static void rsxx_free_dma(struct rsxx_dma_ctrl *ctrl, struct rsxx_dma *dma) rsxx_free_dma() argument
226 if (dma->cmd != HW_CMD_BLK_DISCARD) { rsxx_free_dma()
227 if (!pci_dma_mapping_error(ctrl->card->dev, dma->dma_addr)) { rsxx_free_dma()
228 pci_unmap_page(ctrl->card->dev, dma->dma_addr, rsxx_free_dma()
229 get_dma_size(dma), rsxx_free_dma()
230 dma->cmd == HW_CMD_BLK_WRITE ? rsxx_free_dma()
236 kmem_cache_free(rsxx_dma_pool, dma); rsxx_free_dma()
240 struct rsxx_dma *dma, rsxx_complete_dma()
250 if (dma->cb) rsxx_complete_dma()
251 dma->cb(ctrl->card, dma->cb_data, status ? 1 : 0); rsxx_complete_dma()
253 rsxx_free_dma(ctrl, dma); rsxx_complete_dma()
259 struct rsxx_dma *dma; rsxx_cleanup_dma_queue() local
263 list_for_each_entry_safe(dma, tmp, q, list) { list_for_each_entry_safe()
264 list_del(&dma->list); list_for_each_entry_safe()
266 rsxx_complete_dma(ctrl, dma, DMA_CANCELLED); list_for_each_entry_safe()
268 rsxx_free_dma(ctrl, dma); list_for_each_entry_safe()
276 struct rsxx_dma *dma) rsxx_requeue_dma()
284 list_add(&dma->list, &ctrl->queue); rsxx_requeue_dma()
289 struct rsxx_dma *dma, rsxx_handle_dma_error()
297 dma->cmd, dma->laddr, hw_st); rsxx_handle_dma_error()
306 switch (dma->cmd) { rsxx_handle_dma_error()
310 dma->cmd = HW_CMD_BLK_RECON_READ; rsxx_handle_dma_error()
345 dma->cmd, dma->laddr, hw_st); rsxx_handle_dma_error()
352 rsxx_requeue_dma(ctrl, dma); rsxx_handle_dma_error()
354 rsxx_complete_dma(ctrl, dma, status); rsxx_handle_dma_error()
368 * The dma engine was stalled because the SW_CMD_IDX write dma_engine_stalled()
398 struct rsxx_dma *dma; rsxx_issue_dmas() local
423 dma = list_entry(ctrl->queue.next, struct rsxx_dma, list); rsxx_issue_dmas()
424 list_del(&dma->list); rsxx_issue_dmas()
435 rsxx_complete_dma(ctrl, dma, DMA_CANCELLED); rsxx_issue_dmas()
439 if (dma->cmd != HW_CMD_BLK_DISCARD) { rsxx_issue_dmas()
440 if (dma->cmd == HW_CMD_BLK_WRITE) rsxx_issue_dmas()
455 dma->dma_addr = pci_map_page(ctrl->card->dev, dma->page, rsxx_issue_dmas()
456 dma->pg_off, dma->sub_page.cnt << 9, dir); rsxx_issue_dmas()
457 if (pci_dma_mapping_error(ctrl->card->dev, dma->dma_addr)) { rsxx_issue_dmas()
459 rsxx_complete_dma(ctrl, dma, DMA_CANCELLED); rsxx_issue_dmas()
464 set_tracker_dma(ctrl->trackers, tag, dma); rsxx_issue_dmas()
465 hw_cmd_buf[ctrl->cmd.idx].command = dma->cmd; rsxx_issue_dmas()
469 ((dma->sub_page.cnt & 0x7) << 4) | rsxx_issue_dmas()
470 (dma->sub_page.off & 0x7); rsxx_issue_dmas()
473 cpu_to_le32(dma->laddr); rsxx_issue_dmas()
476 cpu_to_le64(dma->dma_addr); rsxx_issue_dmas()
480 ctrl->id, dma->laddr, tag, ctrl->cmd.idx); rsxx_issue_dmas()
485 if (dma->cmd == HW_CMD_BLK_WRITE) rsxx_issue_dmas()
487 else if (dma->cmd == HW_CMD_BLK_DISCARD) rsxx_issue_dmas()
510 struct rsxx_dma *dma; rsxx_dma_done() local
539 dma = get_tracker_dma(ctrl->trackers, tag); rsxx_dma_done()
540 if (dma == NULL) { rsxx_dma_done()
555 ctrl->id, dma->laddr, tag, status, count, rsxx_dma_done()
564 rsxx_handle_dma_error(ctrl, dma, status); rsxx_dma_done()
566 rsxx_complete_dma(ctrl, dma, 0); rsxx_dma_done()
620 struct rsxx_dma *dma; rsxx_queue_discard() local
622 dma = kmem_cache_alloc(rsxx_dma_pool, GFP_KERNEL); rsxx_queue_discard()
623 if (!dma) rsxx_queue_discard()
626 dma->cmd = HW_CMD_BLK_DISCARD; rsxx_queue_discard()
627 dma->laddr = laddr; rsxx_queue_discard()
628 dma->dma_addr = 0; rsxx_queue_discard()
629 dma->sub_page.off = 0; rsxx_queue_discard()
630 dma->sub_page.cnt = 0; rsxx_queue_discard()
631 dma->page = NULL; rsxx_queue_discard()
632 dma->pg_off = 0; rsxx_queue_discard()
633 dma->cb = cb; rsxx_queue_discard()
634 dma->cb_data = cb_data; rsxx_queue_discard()
636 dev_dbg(CARD_TO_DEV(card), "Queuing[D] laddr %x\n", dma->laddr); rsxx_queue_discard()
638 list_add_tail(&dma->list, q); rsxx_queue_discard()
654 struct rsxx_dma *dma; rsxx_queue_dma() local
656 dma = kmem_cache_alloc(rsxx_dma_pool, GFP_KERNEL); rsxx_queue_dma()
657 if (!dma) rsxx_queue_dma()
660 dma->cmd = dir ? HW_CMD_BLK_WRITE : HW_CMD_BLK_READ; rsxx_queue_dma()
661 dma->laddr = laddr; rsxx_queue_dma()
662 dma->sub_page.off = (dma_off >> 9); rsxx_queue_dma()
663 dma->sub_page.cnt = (dma_len >> 9); rsxx_queue_dma()
664 dma->page = page; rsxx_queue_dma()
665 dma->pg_off = pg_off; rsxx_queue_dma()
666 dma->cb = cb; rsxx_queue_dma()
667 dma->cb_data = cb_data; rsxx_queue_dma()
671 dir ? 'W' : 'R', dma->laddr, dma->sub_page.off, rsxx_queue_dma()
672 dma->sub_page.cnt, dma->page, dma->pg_off); rsxx_queue_dma()
675 list_add_tail(&dma->list, q); rsxx_queue_dma()
833 ctrl->trackers->list[i].dma = NULL; rsxx_dma_ctrl_init()
980 struct rsxx_dma *dma; rsxx_dma_cancel() local
986 dma = get_tracker_dma(ctrl->trackers, i); rsxx_dma_cancel()
987 if (dma) { rsxx_dma_cancel()
989 rsxx_complete_dma(ctrl, dma, DMA_CANCELLED); rsxx_dma_cancel()
1040 struct rsxx_dma *dma; rsxx_eeh_save_issued_dmas() local
1052 dma = get_tracker_dma(card->ctrl[i].trackers, j); rsxx_eeh_save_issued_dmas()
1053 if (dma == NULL) rsxx_eeh_save_issued_dmas()
1056 if (dma->cmd == HW_CMD_BLK_WRITE) rsxx_eeh_save_issued_dmas()
1058 else if (dma->cmd == HW_CMD_BLK_DISCARD) rsxx_eeh_save_issued_dmas()
1063 if (dma->cmd != HW_CMD_BLK_DISCARD) { rsxx_eeh_save_issued_dmas()
1064 pci_unmap_page(card->dev, dma->dma_addr, rsxx_eeh_save_issued_dmas()
1065 get_dma_size(dma), rsxx_eeh_save_issued_dmas()
1066 dma->cmd == HW_CMD_BLK_WRITE ? rsxx_eeh_save_issued_dmas()
1071 list_add_tail(&dma->list, &issued_dmas[i]); rsxx_eeh_save_issued_dmas()
138 set_tracker_dma(struct dma_tracker_list *trackers, int tag, struct rsxx_dma *dma) set_tracker_dma() argument
239 rsxx_complete_dma(struct rsxx_dma_ctrl *ctrl, struct rsxx_dma *dma, unsigned int status) rsxx_complete_dma() argument
275 rsxx_requeue_dma(struct rsxx_dma_ctrl *ctrl, struct rsxx_dma *dma) rsxx_requeue_dma() argument
288 rsxx_handle_dma_error(struct rsxx_dma_ctrl *ctrl, struct rsxx_dma *dma, u8 hw_st) rsxx_handle_dma_error() argument
/linux-4.1.27/arch/blackfin/mach-bf518/
H A DMakefile5 obj-y := ints-priority.o dma.o
/linux-4.1.27/arch/blackfin/mach-bf527/
H A DMakefile5 obj-y := ints-priority.o dma.o
/linux-4.1.27/arch/blackfin/mach-bf533/
H A DMakefile5 obj-y := ints-priority.o dma.o
/linux-4.1.27/arch/blackfin/mach-bf537/
H A DMakefile5 obj-y := ints-priority.o dma.o
/linux-4.1.27/arch/blackfin/mach-bf548/
H A DMakefile5 obj-y := ints-priority.o dma.o
/linux-4.1.27/arch/arm/kernel/
H A Ddma.c2 * linux/arch/arm/kernel/dma.c
22 #include <asm/dma.h>
24 #include <asm/mach/dma.h>
39 int __init isa_dma_add(unsigned int chan, dma_t *dma) isa_dma_add() argument
41 if (!dma->d_ops) isa_dma_add()
44 sg_init_table(&dma->buf, 1); isa_dma_add()
48 dma_chan[chan] = dma; isa_dma_add()
59 dma_t *dma = dma_channel(chan); request_dma() local
62 if (!dma) request_dma()
65 if (xchg(&dma->lock, 1) != 0) request_dma()
68 dma->device_id = device_id; request_dma()
69 dma->active = 0; request_dma()
70 dma->invalid = 1; request_dma()
73 if (dma->d_ops->request) request_dma()
74 ret = dma->d_ops->request(chan, dma); request_dma()
77 xchg(&dma->lock, 0); request_dma()
82 pr_err("dma: trying to allocate DMA%d\n", chan); request_dma()
97 dma_t *dma = dma_channel(chan); free_dma() local
99 if (!dma) free_dma()
102 if (dma->active) { free_dma()
103 pr_err("dma%d: freeing active DMA\n", chan); free_dma()
104 dma->d_ops->disable(chan, dma); free_dma()
105 dma->active = 0; free_dma()
108 if (xchg(&dma->lock, 0) != 0) { free_dma()
109 if (dma->d_ops->free) free_dma()
110 dma->d_ops->free(chan, dma); free_dma()
114 pr_err("dma%d: trying to free free DMA\n", chan); free_dma()
118 pr_err("dma: trying to free DMA%d\n", chan); free_dma()
126 dma_t *dma = dma_channel(chan); set_dma_sg() local
128 if (dma->active) set_dma_sg()
129 pr_err("dma%d: altering DMA SG while DMA active\n", chan); set_dma_sg()
131 dma->sg = sg; set_dma_sg()
132 dma->sgcount = nr_sg; set_dma_sg()
133 dma->invalid = 1; set_dma_sg()
143 dma_t *dma = dma_channel(chan); __set_dma_addr() local
145 if (dma->active) __set_dma_addr()
146 pr_err("dma%d: altering DMA address while DMA active\n", chan); __set_dma_addr()
148 dma->sg = NULL; __set_dma_addr()
149 dma->addr = addr; __set_dma_addr()
150 dma->invalid = 1; __set_dma_addr()
160 dma_t *dma = dma_channel(chan); set_dma_count() local
162 if (dma->active) set_dma_count()
163 pr_err("dma%d: altering DMA count while DMA active\n", chan); set_dma_count()
165 dma->sg = NULL; set_dma_count()
166 dma->count = count; set_dma_count()
167 dma->invalid = 1; set_dma_count()
175 dma_t *dma = dma_channel(chan); set_dma_mode() local
177 if (dma->active) set_dma_mode()
178 pr_err("dma%d: altering DMA mode while DMA active\n", chan); set_dma_mode()
180 dma->dma_mode = mode; set_dma_mode()
181 dma->invalid = 1; set_dma_mode()
189 dma_t *dma = dma_channel(chan); enable_dma() local
191 if (!dma->lock) enable_dma()
194 if (dma->active == 0) { enable_dma()
195 dma->active = 1; enable_dma()
196 dma->d_ops->enable(chan, dma); enable_dma()
201 pr_err("dma%d: trying to enable free DMA\n", chan); enable_dma()
210 dma_t *dma = dma_channel(chan); disable_dma() local
212 if (!dma->lock) disable_dma()
215 if (dma->active == 1) { disable_dma()
216 dma->active = 0; disable_dma()
217 dma->d_ops->disable(chan, dma); disable_dma()
222 pr_err("dma%d: trying to disable free DMA\n", chan); disable_dma()
232 dma_t *dma = dma_channel(chan); dma_channel_active() local
233 return dma->active; dma_channel_active()
239 pr_err("dma%d: trying to set_dma_page\n", chan); set_dma_page()
245 dma_t *dma = dma_channel(chan); set_dma_speed() local
248 if (dma->d_ops->setspeed) set_dma_speed()
249 ret = dma->d_ops->setspeed(chan, dma, cycle_ns); set_dma_speed()
250 dma->speed = ret; set_dma_speed()
256 dma_t *dma = dma_channel(chan); get_dma_residue() local
259 if (dma->d_ops->residue) get_dma_residue()
260 ret = dma->d_ops->residue(chan, dma); get_dma_residue()
272 dma_t *dma = dma_channel(i); proc_dma_show() local
273 if (dma && dma->lock) proc_dma_show()
274 seq_printf(m, "%2d: %s\n", i, dma->device_id); proc_dma_show()
293 proc_create("dma", 0, NULL, &proc_dma_operations); proc_dma_init()
H A Ddma-isa.c2 * linux/arch/arm/kernel/dma-isa.c
12 * linux/include/asm/dma.h: Defines for using and allocating dma channels.
16 * arch/arm/kernel/dma-ebsa285.c
21 #include <linux/dma-mapping.h>
24 #include <asm/dma.h>
25 #include <asm/mach/dma.h>
47 static int isa_get_dma_residue(unsigned int chan, dma_t *dma) isa_get_dma_residue() argument
58 static void isa_enable_dma(unsigned int chan, dma_t *dma) isa_enable_dma() argument
60 if (dma->invalid) { isa_enable_dma()
65 mode = (chan & 3) | dma->dma_mode; isa_enable_dma()
66 switch (dma->dma_mode & DMA_MODE_MASK) { isa_enable_dma()
84 if (!dma->sg) { isa_enable_dma()
89 dma->sg = &dma->buf; isa_enable_dma()
90 dma->sgcount = 1; isa_enable_dma()
91 dma->buf.length = dma->count; isa_enable_dma()
92 dma->buf.dma_address = dma_map_single(NULL, isa_enable_dma()
93 dma->addr, dma->count, isa_enable_dma()
97 address = dma->buf.dma_address; isa_enable_dma()
98 length = dma->buf.length - 1; isa_enable_dma()
117 dma->invalid = 0; isa_enable_dma()
122 static void isa_disable_dma(unsigned int chan, dma_t *dma) isa_disable_dma() argument
139 .name = "dma low page",
147 .name = "dma high page",
/linux-4.1.27/drivers/media/v4l2-core/
H A Dvideobuf-dma-sg.c28 #include <linux/dma-mapping.h>
35 #include <media/videobuf-dma-sg.h>
50 MODULE_DESCRIPTION("helper module to manage video4linux dma sg buffers");
144 return &mem->dma; videobuf_to_dma()
148 static void videobuf_dma_init(struct videobuf_dmabuf *dma) videobuf_dma_init() argument
150 memset(dma, 0, sizeof(*dma)); videobuf_dma_init()
151 dma->magic = MAGIC_DMABUF; videobuf_dma_init()
154 static int videobuf_dma_init_user_locked(struct videobuf_dmabuf *dma, videobuf_dma_init_user_locked() argument
160 dma->direction = direction; videobuf_dma_init_user_locked()
161 switch (dma->direction) { videobuf_dma_init_user_locked()
174 dma->offset = data & ~PAGE_MASK; videobuf_dma_init_user_locked()
175 dma->size = size; videobuf_dma_init_user_locked()
176 dma->nr_pages = last-first+1; videobuf_dma_init_user_locked()
177 dma->pages = kmalloc(dma->nr_pages * sizeof(struct page *), GFP_KERNEL); videobuf_dma_init_user_locked()
178 if (NULL == dma->pages) videobuf_dma_init_user_locked()
182 data, size, dma->nr_pages); videobuf_dma_init_user_locked()
185 data & PAGE_MASK, dma->nr_pages, videobuf_dma_init_user_locked()
187 dma->pages, NULL); videobuf_dma_init_user_locked()
189 if (err != dma->nr_pages) { videobuf_dma_init_user_locked()
190 dma->nr_pages = (err >= 0) ? err : 0; videobuf_dma_init_user_locked()
191 dprintk(1, "get_user_pages: err=%d [%d]\n", err, dma->nr_pages); videobuf_dma_init_user_locked()
197 static int videobuf_dma_init_user(struct videobuf_dmabuf *dma, int direction, videobuf_dma_init_user() argument
203 ret = videobuf_dma_init_user_locked(dma, direction, data, size); videobuf_dma_init_user()
209 static int videobuf_dma_init_kernel(struct videobuf_dmabuf *dma, int direction, videobuf_dma_init_kernel() argument
216 dma->direction = direction; videobuf_dma_init_kernel()
217 dma->vaddr_pages = kcalloc(nr_pages, sizeof(*dma->vaddr_pages), videobuf_dma_init_kernel()
219 if (!dma->vaddr_pages) videobuf_dma_init_kernel()
222 dma->dma_addr = kcalloc(nr_pages, sizeof(*dma->dma_addr), GFP_KERNEL); videobuf_dma_init_kernel()
223 if (!dma->dma_addr) { videobuf_dma_init_kernel()
224 kfree(dma->vaddr_pages); videobuf_dma_init_kernel()
230 addr = dma_alloc_coherent(dma->dev, PAGE_SIZE, videobuf_dma_init_kernel()
231 &(dma->dma_addr[i]), GFP_KERNEL); videobuf_dma_init_kernel()
235 dma->vaddr_pages[i] = virt_to_page(addr); videobuf_dma_init_kernel()
237 dma->vaddr = vmap(dma->vaddr_pages, nr_pages, VM_MAP | VM_IOREMAP, videobuf_dma_init_kernel()
239 if (NULL == dma->vaddr) { videobuf_dma_init_kernel()
245 (unsigned long)dma->vaddr, videobuf_dma_init_kernel()
248 memset(dma->vaddr, 0, nr_pages << PAGE_SHIFT); videobuf_dma_init_kernel()
249 dma->nr_pages = nr_pages; videobuf_dma_init_kernel()
257 addr = page_address(dma->vaddr_pages[i]); videobuf_dma_init_kernel()
258 dma_free_coherent(dma->dev, PAGE_SIZE, addr, dma->dma_addr[i]); videobuf_dma_init_kernel()
260 kfree(dma->dma_addr); videobuf_dma_init_kernel()
261 dma->dma_addr = NULL; videobuf_dma_init_kernel()
262 kfree(dma->vaddr_pages); videobuf_dma_init_kernel()
263 dma->vaddr_pages = NULL; videobuf_dma_init_kernel()
269 static int videobuf_dma_init_overlay(struct videobuf_dmabuf *dma, int direction, videobuf_dma_init_overlay() argument
274 dma->direction = direction; videobuf_dma_init_overlay()
279 dma->bus_addr = addr; videobuf_dma_init_overlay()
280 dma->nr_pages = nr_pages; videobuf_dma_init_overlay()
285 static int videobuf_dma_map(struct device *dev, struct videobuf_dmabuf *dma) videobuf_dma_map() argument
287 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); videobuf_dma_map()
288 BUG_ON(0 == dma->nr_pages); videobuf_dma_map()
290 if (dma->pages) { videobuf_dma_map()
291 dma->sglist = videobuf_pages_to_sg(dma->pages, dma->nr_pages, videobuf_dma_map()
292 dma->offset, dma->size); videobuf_dma_map()
294 if (dma->vaddr) { videobuf_dma_map()
295 dma->sglist = videobuf_vmalloc_to_sg(dma->vaddr, videobuf_dma_map()
296 dma->nr_pages); videobuf_dma_map()
298 if (dma->bus_addr) { videobuf_dma_map()
299 dma->sglist = vmalloc(sizeof(*dma->sglist)); videobuf_dma_map()
300 if (NULL != dma->sglist) { videobuf_dma_map()
301 dma->sglen = 1; videobuf_dma_map()
302 sg_dma_address(&dma->sglist[0]) = dma->bus_addr videobuf_dma_map()
304 dma->sglist[0].offset = dma->bus_addr & ~PAGE_MASK; videobuf_dma_map()
305 sg_dma_len(&dma->sglist[0]) = dma->nr_pages * PAGE_SIZE; videobuf_dma_map()
308 if (NULL == dma->sglist) { videobuf_dma_map()
312 if (!dma->bus_addr) { videobuf_dma_map()
313 dma->sglen = dma_map_sg(dev, dma->sglist, videobuf_dma_map()
314 dma->nr_pages, dma->direction); videobuf_dma_map()
315 if (0 == dma->sglen) { videobuf_dma_map()
318 vfree(dma->sglist); videobuf_dma_map()
319 dma->sglist = NULL; videobuf_dma_map()
320 dma->sglen = 0; videobuf_dma_map()
328 int videobuf_dma_unmap(struct device *dev, struct videobuf_dmabuf *dma) videobuf_dma_unmap() argument
330 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); videobuf_dma_unmap()
332 if (!dma->sglen) videobuf_dma_unmap()
335 dma_unmap_sg(dev, dma->sglist, dma->sglen, dma->direction); videobuf_dma_unmap()
337 vfree(dma->sglist); videobuf_dma_unmap()
338 dma->sglist = NULL; videobuf_dma_unmap()
339 dma->sglen = 0; videobuf_dma_unmap()
345 int videobuf_dma_free(struct videobuf_dmabuf *dma) videobuf_dma_free() argument
348 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); videobuf_dma_free()
349 BUG_ON(dma->sglen); videobuf_dma_free()
351 if (dma->pages) { videobuf_dma_free()
352 for (i = 0; i < dma->nr_pages; i++) videobuf_dma_free()
353 page_cache_release(dma->pages[i]); videobuf_dma_free()
354 kfree(dma->pages); videobuf_dma_free()
355 dma->pages = NULL; videobuf_dma_free()
358 if (dma->dma_addr) { videobuf_dma_free()
359 for (i = 0; i < dma->nr_pages; i++) { videobuf_dma_free()
362 addr = page_address(dma->vaddr_pages[i]); videobuf_dma_free()
363 dma_free_coherent(dma->dev, PAGE_SIZE, addr, videobuf_dma_free()
364 dma->dma_addr[i]); videobuf_dma_free()
366 kfree(dma->dma_addr); videobuf_dma_free()
367 dma->dma_addr = NULL; videobuf_dma_free()
368 kfree(dma->vaddr_pages); videobuf_dma_free()
369 dma->vaddr_pages = NULL; videobuf_dma_free()
370 vunmap(dma->vaddr); videobuf_dma_free()
371 dma->vaddr = NULL; videobuf_dma_free()
374 if (dma->bus_addr) videobuf_dma_free()
375 dma->bus_addr = 0; videobuf_dma_free()
376 dma->direction = DMA_NONE; videobuf_dma_free()
480 videobuf_dma_init(&mem->dma); __videobuf_alloc_vb()
496 return mem->dma.vaddr; __videobuf_to_vaddr()
510 if (!mem->dma.dev) __videobuf_iolock()
511 mem->dma.dev = q->dev; __videobuf_iolock()
513 WARN_ON(mem->dma.dev != q->dev); __videobuf_iolock()
521 err = videobuf_dma_init_kernel(&mem->dma, __videobuf_iolock()
527 /* dma directly to userspace */ __videobuf_iolock()
528 err = videobuf_dma_init_user(&mem->dma, __videobuf_iolock()
539 err = videobuf_dma_init_user_locked(&mem->dma, __videobuf_iolock()
557 err = videobuf_dma_init_overlay(&mem->dma, DMA_FROM_DEVICE, __videobuf_iolock()
565 err = videobuf_dma_map(q->dev, &mem->dma); __videobuf_iolock()
576 BUG_ON(!mem || !mem->dma.sglen); __videobuf_sync()
579 MAGIC_CHECK(mem->dma.magic, MAGIC_DMABUF); __videobuf_sync()
581 dma_sync_sg_for_cpu(q->dev, mem->dma.sglist, __videobuf_sync()
582 mem->dma.sglen, mem->dma.direction); __videobuf_sync()
H A DMakefile26 obj-$(CONFIG_VIDEOBUF_DMA_SG) += videobuf-dma-sg.o
27 obj-$(CONFIG_VIDEOBUF_DMA_CONTIG) += videobuf-dma-contig.o
34 obj-$(CONFIG_VIDEOBUF2_DMA_CONTIG) += videobuf2-dma-contig.o
35 obj-$(CONFIG_VIDEOBUF2_DMA_SG) += videobuf2-dma-sg.o
/linux-4.1.27/arch/sparc/kernel/
H A Ddma.c2 #include <linux/dma-mapping.h>
3 #include <linux/dma-debug.h>
H A Dsparc_ksyms_32.c14 #include <asm/dma.h>
/linux-4.1.27/arch/arm/mach-mmp/include/mach/
H A Ddma.h2 * linux/arch/arm/mach-mmp/include/mach/dma.h
12 #include <plat/dma.h>
H A Ddevices.h16 int dma[MAX_RESOURCE_DMA]; member in struct:pxa_device_desc
27 .dma = { _dma }, \
38 .dma = { _dma }, \
49 .dma = { _dma }, \
/linux-4.1.27/drivers/media/platform/xilinx/
H A Dxilinx-dma.c15 #include <linux/dma/xilinx_dma.h>
26 #include <media/videobuf2-dma-contig.h>
28 #include "xilinx-dma.h"
62 static int xvip_dma_verify_format(struct xvip_dma *dma) xvip_dma_verify_format() argument
68 subdev = xvip_dma_remote_subdev(&dma->pad, &fmt.pad); xvip_dma_verify_format()
77 if (dma->fmtinfo->code != fmt.format.code || xvip_dma_verify_format()
78 dma->format.height != fmt.format.height || xvip_dma_verify_format()
79 dma->format.width != fmt.format.width || xvip_dma_verify_format()
80 dma->format.colorspace != fmt.format.colorspace) xvip_dma_verify_format()
103 struct xvip_dma *dma = pipe->output; xvip_pipeline_start_stop() local
109 entity = &dma->video.entity; xvip_pipeline_start_stop()
194 struct xvip_dma *dma; xvip_pipeline_validate() local
199 dma = to_xvip_dma(media_entity_to_video_device(entity)); xvip_pipeline_validate()
201 if (dma->pad.flags & MEDIA_PAD_FL_SINK) { xvip_pipeline_validate()
202 pipe->output = dma; xvip_pipeline_validate()
246 * @dma: DMA engine at one end of the pipeline
254 struct xvip_dma *dma) xvip_pipeline_prepare()
262 ret = xvip_pipeline_validate(pipe, dma); xvip_pipeline_prepare()
285 * @dma: DMA channel that uses the buffer
290 struct xvip_dma *dma; member in struct:xvip_dma_buffer
298 struct xvip_dma *dma = buf->dma; xvip_dma_complete() local
300 spin_lock(&dma->queued_lock); xvip_dma_complete()
302 spin_unlock(&dma->queued_lock); xvip_dma_complete()
305 buf->buf.v4l2_buf.sequence = dma->sequence++; xvip_dma_complete()
307 vb2_set_plane_payload(&buf->buf, 0, dma->format.sizeimage); xvip_dma_complete()
316 struct xvip_dma *dma = vb2_get_drv_priv(vq); xvip_dma_queue_setup() local
319 if (fmt && fmt->fmt.pix.sizeimage < dma->format.sizeimage) xvip_dma_queue_setup()
324 sizes[0] = fmt ? fmt->fmt.pix.sizeimage : dma->format.sizeimage; xvip_dma_queue_setup()
325 alloc_ctxs[0] = dma->alloc_ctx; xvip_dma_queue_setup()
332 struct xvip_dma *dma = vb2_get_drv_priv(vb->vb2_queue); xvip_dma_buffer_prepare() local
335 buf->dma = dma; xvip_dma_buffer_prepare()
342 struct xvip_dma *dma = vb2_get_drv_priv(vb->vb2_queue); xvip_dma_buffer_queue() local
348 if (dma->queue.type == V4L2_BUF_TYPE_VIDEO_CAPTURE) { xvip_dma_buffer_queue()
350 dma->xt.dir = DMA_DEV_TO_MEM; xvip_dma_buffer_queue()
351 dma->xt.src_sgl = false; xvip_dma_buffer_queue()
352 dma->xt.dst_sgl = true; xvip_dma_buffer_queue()
353 dma->xt.dst_start = addr; xvip_dma_buffer_queue()
356 dma->xt.dir = DMA_MEM_TO_DEV; xvip_dma_buffer_queue()
357 dma->xt.src_sgl = true; xvip_dma_buffer_queue()
358 dma->xt.dst_sgl = false; xvip_dma_buffer_queue()
359 dma->xt.src_start = addr; xvip_dma_buffer_queue()
362 dma->xt.frame_size = 1; xvip_dma_buffer_queue()
363 dma->sgl[0].size = dma->format.width * dma->fmtinfo->bpp; xvip_dma_buffer_queue()
364 dma->sgl[0].icg = dma->format.bytesperline - dma->sgl[0].size; xvip_dma_buffer_queue()
365 dma->xt.numf = dma->format.height; xvip_dma_buffer_queue()
367 desc = dmaengine_prep_interleaved_dma(dma->dma, &dma->xt, flags); xvip_dma_buffer_queue()
369 dev_err(dma->xdev->dev, "Failed to prepare DMA transfer\n"); xvip_dma_buffer_queue()
376 spin_lock_irq(&dma->queued_lock); xvip_dma_buffer_queue()
377 list_add_tail(&buf->queue, &dma->queued_bufs); xvip_dma_buffer_queue()
378 spin_unlock_irq(&dma->queued_lock); xvip_dma_buffer_queue()
382 if (vb2_is_streaming(&dma->queue)) xvip_dma_buffer_queue()
383 dma_async_issue_pending(dma->dma); xvip_dma_buffer_queue()
388 struct xvip_dma *dma = vb2_get_drv_priv(vq); xvip_dma_start_streaming() local
393 dma->sequence = 0; xvip_dma_start_streaming()
402 pipe = dma->video.entity.pipe xvip_dma_start_streaming()
403 ? to_xvip_pipeline(&dma->video.entity) : &dma->pipe; xvip_dma_start_streaming()
405 ret = media_entity_pipeline_start(&dma->video.entity, &pipe->pipe); xvip_dma_start_streaming()
412 ret = xvip_dma_verify_format(dma); xvip_dma_start_streaming()
416 ret = xvip_pipeline_prepare(pipe, dma); xvip_dma_start_streaming()
423 dma_async_issue_pending(dma->dma); xvip_dma_start_streaming()
431 media_entity_pipeline_stop(&dma->video.entity); xvip_dma_start_streaming()
435 spin_lock_irq(&dma->queued_lock); xvip_dma_start_streaming()
436 list_for_each_entry_safe(buf, nbuf, &dma->queued_bufs, queue) { xvip_dma_start_streaming()
440 spin_unlock_irq(&dma->queued_lock); xvip_dma_start_streaming()
447 struct xvip_dma *dma = vb2_get_drv_priv(vq); xvip_dma_stop_streaming() local
448 struct xvip_pipeline *pipe = to_xvip_pipeline(&dma->video.entity); xvip_dma_stop_streaming()
455 dmaengine_terminate_all(dma->dma); xvip_dma_stop_streaming()
459 media_entity_pipeline_stop(&dma->video.entity); xvip_dma_stop_streaming()
462 spin_lock_irq(&dma->queued_lock); xvip_dma_stop_streaming()
463 list_for_each_entry_safe(buf, nbuf, &dma->queued_bufs, queue) { xvip_dma_stop_streaming()
467 spin_unlock_irq(&dma->queued_lock); xvip_dma_stop_streaming()
488 struct xvip_dma *dma = to_xvip_dma(vfh->vdev); xvip_dma_querycap() local
491 | dma->xdev->v4l2_caps; xvip_dma_querycap()
493 if (dma->queue.type == V4L2_BUF_TYPE_VIDEO_CAPTURE) xvip_dma_querycap()
499 strlcpy(cap->card, dma->video.name, sizeof(cap->card)); xvip_dma_querycap()
501 dma->xdev->dev->of_node->name, dma->port); xvip_dma_querycap()
515 struct xvip_dma *dma = to_xvip_dma(vfh->vdev); xvip_dma_enum_format() local
520 f->pixelformat = dma->format.pixelformat; xvip_dma_enum_format()
521 strlcpy(f->description, dma->fmtinfo->description, xvip_dma_enum_format()
531 struct xvip_dma *dma = to_xvip_dma(vfh->vdev); xvip_dma_get_format() local
533 format->fmt.pix = dma->format; xvip_dma_get_format()
539 __xvip_dma_try_format(struct xvip_dma *dma, struct v4l2_pix_format *pix, __xvip_dma_try_format() argument
565 align = lcm(dma->align, info->bpp); __xvip_dma_try_format()
579 max_bpl = rounddown(XVIP_DMA_MAX_WIDTH, dma->align); __xvip_dma_try_format()
580 bpl = rounddown(pix->bytesperline, dma->align); __xvip_dma_try_format()
593 struct xvip_dma *dma = to_xvip_dma(vfh->vdev); xvip_dma_try_format() local
595 __xvip_dma_try_format(dma, &format->fmt.pix, NULL); xvip_dma_try_format()
603 struct xvip_dma *dma = to_xvip_dma(vfh->vdev); xvip_dma_set_format() local
606 __xvip_dma_try_format(dma, &format->fmt.pix, &info); xvip_dma_set_format()
608 if (vb2_is_busy(&dma->queue)) xvip_dma_set_format()
611 dma->format = format->fmt.pix; xvip_dma_set_format()
612 dma->fmtinfo = info; xvip_dma_set_format()
653 int xvip_dma_init(struct xvip_composite_device *xdev, struct xvip_dma *dma, xvip_dma_init() argument
659 dma->xdev = xdev; xvip_dma_init()
660 dma->port = port; xvip_dma_init()
661 mutex_init(&dma->lock); xvip_dma_init()
662 mutex_init(&dma->pipe.lock); xvip_dma_init()
663 INIT_LIST_HEAD(&dma->queued_bufs); xvip_dma_init()
664 spin_lock_init(&dma->queued_lock); xvip_dma_init()
666 dma->fmtinfo = xvip_get_format_by_fourcc(XVIP_DMA_DEF_FORMAT); xvip_dma_init()
667 dma->format.pixelformat = dma->fmtinfo->fourcc; xvip_dma_init()
668 dma->format.colorspace = V4L2_COLORSPACE_SRGB; xvip_dma_init()
669 dma->format.field = V4L2_FIELD_NONE; xvip_dma_init()
670 dma->format.width = XVIP_DMA_DEF_WIDTH; xvip_dma_init()
671 dma->format.height = XVIP_DMA_DEF_HEIGHT; xvip_dma_init()
672 dma->format.bytesperline = dma->format.width * dma->fmtinfo->bpp; xvip_dma_init()
673 dma->format.sizeimage = dma->format.bytesperline * dma->format.height; xvip_dma_init()
676 dma->pad.flags = type == V4L2_BUF_TYPE_VIDEO_CAPTURE xvip_dma_init()
679 ret = media_entity_init(&dma->video.entity, 1, &dma->pad, 0); xvip_dma_init()
684 dma->video.fops = &xvip_dma_fops; xvip_dma_init()
685 dma->video.v4l2_dev = &xdev->v4l2_dev; xvip_dma_init()
686 dma->video.queue = &dma->queue; xvip_dma_init()
687 snprintf(dma->video.name, sizeof(dma->video.name), "%s %s %u", xvip_dma_init()
691 dma->video.vfl_type = VFL_TYPE_GRABBER; xvip_dma_init()
692 dma->video.vfl_dir = type == V4L2_BUF_TYPE_VIDEO_CAPTURE xvip_dma_init()
694 dma->video.release = video_device_release_empty; xvip_dma_init()
695 dma->video.ioctl_ops = &xvip_dma_ioctl_ops; xvip_dma_init()
696 dma->video.lock = &dma->lock; xvip_dma_init()
698 video_set_drvdata(&dma->video, dma); xvip_dma_init()
701 dma->alloc_ctx = vb2_dma_contig_init_ctx(dma->xdev->dev); xvip_dma_init()
702 if (IS_ERR(dma->alloc_ctx)) xvip_dma_init()
712 dma->queue.type = type; xvip_dma_init()
713 dma->queue.io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF; xvip_dma_init()
714 dma->queue.lock = &dma->lock; xvip_dma_init()
715 dma->queue.drv_priv = dma; xvip_dma_init()
716 dma->queue.buf_struct_size = sizeof(struct xvip_dma_buffer); xvip_dma_init()
717 dma->queue.ops = &xvip_dma_queue_qops; xvip_dma_init()
718 dma->queue.mem_ops = &vb2_dma_contig_memops; xvip_dma_init()
719 dma->queue.timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC xvip_dma_init()
721 ret = vb2_queue_init(&dma->queue); xvip_dma_init()
723 dev_err(dma->xdev->dev, "failed to initialize VB2 queue\n"); xvip_dma_init()
729 dma->dma = dma_request_slave_channel(dma->xdev->dev, name); xvip_dma_init()
730 if (dma->dma == NULL) { xvip_dma_init()
731 dev_err(dma->xdev->dev, "no VDMA channel found\n"); xvip_dma_init()
736 dma->align = 1 << dma->dma->device->copy_align; xvip_dma_init()
738 ret = video_register_device(&dma->video, VFL_TYPE_GRABBER, -1); xvip_dma_init()
740 dev_err(dma->xdev->dev, "failed to register video device\n"); xvip_dma_init()
747 xvip_dma_cleanup(dma); xvip_dma_init()
751 void xvip_dma_cleanup(struct xvip_dma *dma) xvip_dma_cleanup() argument
753 if (video_is_registered(&dma->video)) xvip_dma_cleanup()
754 video_unregister_device(&dma->video); xvip_dma_cleanup()
756 if (dma->dma) xvip_dma_cleanup()
757 dma_release_channel(dma->dma); xvip_dma_cleanup()
759 if (!IS_ERR_OR_NULL(dma->alloc_ctx)) xvip_dma_cleanup()
760 vb2_dma_contig_cleanup_ctx(dma->alloc_ctx); xvip_dma_cleanup()
762 media_entity_cleanup(&dma->video.entity); xvip_dma_cleanup()
764 mutex_destroy(&dma->lock); xvip_dma_cleanup()
765 mutex_destroy(&dma->pipe.lock); xvip_dma_cleanup()
253 xvip_pipeline_prepare(struct xvip_pipeline *pipe, struct xvip_dma *dma) xvip_pipeline_prepare() argument
H A Dxilinx-dma.h72 * @dma: DMA engine channel
74 * @xt: dma interleaved template for dma configuration
97 struct dma_chan *dma; member in struct:xvip_dma
105 int xvip_dma_init(struct xvip_composite_device *xdev, struct xvip_dma *dma,
107 void xvip_dma_cleanup(struct xvip_dma *dma);
H A Dxilinx-vipp.c27 #include "xilinx-dma.h"
178 struct xvip_dma *dma; xvip_graph_find_dma() local
180 list_for_each_entry(dma, &xdev->dmas, list) { xvip_graph_find_dma()
181 if (dma->port == port) xvip_graph_find_dma()
182 return dma; xvip_graph_find_dma()
200 struct xvip_dma *dma; xvip_graph_build_dma() local
224 dma = xvip_graph_find_dma(xdev, link.local_port); xvip_graph_build_dma()
225 if (dma == NULL) { xvip_graph_build_dma()
234 dma->video.name); xvip_graph_build_dma()
254 if (dma->pad.flags & MEDIA_PAD_FL_SOURCE) { xvip_graph_build_dma()
255 source = &dma->video.entity; xvip_graph_build_dma()
256 source_pad = &dma->pad; xvip_graph_build_dma()
262 sink = &dma->video.entity; xvip_graph_build_dma()
263 sink_pad = &dma->pad; xvip_graph_build_dma()
427 struct xvip_dma *dma; xvip_graph_dma_init_one() local
446 dma = devm_kzalloc(xdev->dev, sizeof(*dma), GFP_KERNEL); xvip_graph_dma_init_one()
447 if (dma == NULL) xvip_graph_dma_init_one()
450 ret = xvip_dma_init(xdev, dma, type, index); xvip_graph_dma_init_one()
457 list_add_tail(&dma->list, &xdev->dmas); xvip_graph_dma_init_one()
491 struct xvip_dma *dma; xvip_graph_cleanup() local
500 list_for_each_entry_safe(dma, dmap, &xdev->dmas, list) { xvip_graph_cleanup()
501 xvip_dma_cleanup(dma); xvip_graph_cleanup()
502 list_del(&dma->list); xvip_graph_cleanup()
/linux-4.1.27/arch/mips/lantiq/xway/
H A DMakefile1 obj-y := prom.o sysctrl.o clk.o reset.o dma.o gptu.o dcdc.o
H A Dvmmc.c12 #include <linux/dma-mapping.h>
31 dma_addr_t dma; vmmc_probe() local
35 &dma, GFP_ATOMIC)); vmmc_probe()
/linux-4.1.27/arch/blackfin/mach-bf538/
H A DMakefile5 obj-y := ints-priority.o dma.o
/linux-4.1.27/arch/c6x/mm/
H A DMakefile5 obj-y := init.o dma-coherent.o
/linux-4.1.27/arch/cris/arch-v32/mach-a3/
H A DMakefile5 obj-y := dma.o pinmux.o io.o arbiter.o
/linux-4.1.27/arch/cris/arch-v32/mach-fs/
H A DMakefile5 obj-y := dma.o pinmux.o io.o arbiter.o
/linux-4.1.27/arch/arm/mach-rpc/
H A DMakefile7 obj-y := dma.o ecard.o fiq.o irq.o riscpc.o time.o
H A Ddma.c2 * linux/arch/arm/mach-rpc/dma.c
15 #include <linux/dma-mapping.h>
19 #include <asm/dma.h>
25 #include <asm/mach/dma.h>
29 struct dma_struct dma; member in struct:iomd_dma
60 if (idma->dma.sg) { iomd_get_next_sg()
78 if (idma->dma.sgcount > 1) { iomd_get_next_sg()
79 idma->dma.sg = sg_next(idma->dma.sg); iomd_get_next_sg()
80 idma->dma_addr = idma->dma.sg->dma_address; iomd_get_next_sg()
81 idma->dma_len = idma->dma.sg->length; iomd_get_next_sg()
82 idma->dma.sgcount--; iomd_get_next_sg()
84 idma->dma.sg = NULL; iomd_get_next_sg()
139 static int iomd_request_dma(unsigned int chan, dma_t *dma) iomd_request_dma() argument
141 struct iomd_dma *idma = container_of(dma, struct iomd_dma, dma); iomd_request_dma()
144 0, idma->dma.device_id, idma); iomd_request_dma()
147 static void iomd_free_dma(unsigned int chan, dma_t *dma) iomd_free_dma() argument
149 struct iomd_dma *idma = container_of(dma, struct iomd_dma, dma); iomd_free_dma()
154 static void iomd_enable_dma(unsigned int chan, dma_t *dma) iomd_enable_dma() argument
156 struct iomd_dma *idma = container_of(dma, struct iomd_dma, dma); iomd_enable_dma()
160 if (idma->dma.invalid) { iomd_enable_dma()
161 idma->dma.invalid = 0; iomd_enable_dma()
167 if (!idma->dma.sg) { iomd_enable_dma()
168 idma->dma.sg = &idma->dma.buf; iomd_enable_dma()
169 idma->dma.sgcount = 1; iomd_enable_dma()
170 idma->dma.buf.length = idma->dma.count; iomd_enable_dma()
171 idma->dma.buf.dma_address = dma_map_single(NULL, iomd_enable_dma()
172 idma->dma.addr, idma->dma.count, iomd_enable_dma()
173 idma->dma.dma_mode == DMA_MODE_READ ? iomd_enable_dma()
181 if (idma->dma.dma_mode == DMA_MODE_READ) iomd_enable_dma()
188 static void iomd_disable_dma(unsigned int chan, dma_t *dma) iomd_disable_dma() argument
190 struct iomd_dma *idma = container_of(dma, struct iomd_dma, dma); iomd_disable_dma()
201 static int iomd_set_dma_speed(unsigned int chan, dma_t *dma, int cycle) iomd_set_dma_speed() argument
257 struct dma_struct dma; member in struct:floppy_dma
261 static void floppy_enable_dma(unsigned int chan, dma_t *dma) floppy_enable_dma() argument
263 struct floppy_dma *fdma = container_of(dma, struct floppy_dma, dma); floppy_enable_dma()
268 if (fdma->dma.sg) floppy_enable_dma()
271 if (fdma->dma.dma_mode == DMA_MODE_READ) { floppy_enable_dma()
281 regs.ARM_r9 = fdma->dma.count; floppy_enable_dma()
282 regs.ARM_r10 = (unsigned long)fdma->dma.addr; floppy_enable_dma()
295 static void floppy_disable_dma(unsigned int chan, dma_t *dma) floppy_disable_dma() argument
297 struct floppy_dma *fdma = container_of(dma, struct floppy_dma, dma); floppy_disable_dma()
302 static int floppy_get_residue(unsigned int chan, dma_t *dma) floppy_get_residue() argument
319 static void sound_enable_disable_dma(unsigned int chan, dma_t *dma) sound_enable_disable_dma() argument
332 .dma = {
374 iomd_dma[i].dma.d_ops = &iomd_dma_ops; rpc_dma_init()
376 ret = isa_dma_add(i, &iomd_dma[i].dma); rpc_dma_init()
381 ret = isa_dma_add(DMA_VIRTUAL_FLOPPY, &floppy_dma.dma); rpc_dma_init()
/linux-4.1.27/arch/arm/mach-footbridge/
H A Ddma.c2 * linux/arch/arm/kernel/dma-ebsa285.c
9 * 09-Nov-1998 RMK Split out ISA DMA functions to dma-isa.c
17 #include <asm/dma.h>
20 #include <asm/mach/dma.h>
24 static int fb_dma_request(unsigned int chan, dma_t *dma)
29 static void fb_dma_enable(unsigned int chan, dma_t *dma)
33 static void fb_dma_disable(unsigned int chan, dma_t *dma)
48 dma[_DC21285_DMA(0)].d_ops = &fb_dma_ops; fb_dma_init()
49 dma[_DC21285_DMA(1)].d_ops = &fb_dma_ops; fb_dma_init()
H A DMakefile7 obj-y := common.o dma.o isa-irq.o
/linux-4.1.27/sound/core/
H A Disadma.c31 #include <asm/dma.h>
35 * @dma: the dma number
42 void snd_dma_program(unsigned long dma, snd_dma_program() argument
49 disable_dma(dma); snd_dma_program()
50 clear_dma_ff(dma); snd_dma_program()
51 set_dma_mode(dma, mode); snd_dma_program()
52 set_dma_addr(dma, addr); snd_dma_program()
53 set_dma_count(dma, size); snd_dma_program()
55 enable_dma(dma); snd_dma_program()
63 * @dma: the dma number
67 void snd_dma_disable(unsigned long dma) snd_dma_disable() argument
72 clear_dma_ff(dma); snd_dma_disable()
73 disable_dma(dma); snd_dma_disable()
81 * @dma: the dma number
82 * @size: the dma transfer size
86 unsigned int snd_dma_pointer(unsigned long dma, unsigned int size) snd_dma_pointer() argument
92 clear_dma_ff(dma); snd_dma_pointer()
94 disable_dma(dma); snd_dma_pointer()
95 result = get_dma_residue(dma); snd_dma_pointer()
101 result1 = get_dma_residue(dma); snd_dma_pointer()
103 enable_dma(dma); snd_dma_pointer()
109 pr_err("ALSA: pointer (0x%x) for DMA #%ld is greater than transfer size (0x%x)\n", result, dma, size); snd_dma_pointer()
/linux-4.1.27/drivers/gpu/drm/
H A Ddrm_dma.c60 dev->dma = kzalloc(sizeof(*dev->dma), GFP_KERNEL); drm_legacy_dma_setup()
61 if (!dev->dma) drm_legacy_dma_setup()
65 memset(&dev->dma->bufs[i], 0, sizeof(dev->dma->bufs[0])); drm_legacy_dma_setup()
76 * finally the drm_device::dma structure itself.
80 struct drm_device_dma *dma = dev->dma; drm_legacy_dma_takedown() local
88 if (!dma) drm_legacy_dma_takedown()
91 /* Clear dma buffers */ drm_legacy_dma_takedown()
93 if (dma->bufs[i].seg_count) { drm_legacy_dma_takedown()
97 dma->bufs[i].buf_count, drm_legacy_dma_takedown()
98 dma->bufs[i].seg_count); drm_legacy_dma_takedown()
99 for (j = 0; j < dma->bufs[i].seg_count; j++) { drm_legacy_dma_takedown()
100 if (dma->bufs[i].seglist[j]) { drm_legacy_dma_takedown()
101 drm_pci_free(dev, dma->bufs[i].seglist[j]); drm_legacy_dma_takedown()
104 kfree(dma->bufs[i].seglist); drm_legacy_dma_takedown()
106 if (dma->bufs[i].buf_count) { drm_legacy_dma_takedown()
107 for (j = 0; j < dma->bufs[i].buf_count; j++) { drm_legacy_dma_takedown()
108 kfree(dma->bufs[i].buflist[j].dev_private); drm_legacy_dma_takedown()
110 kfree(dma->bufs[i].buflist); drm_legacy_dma_takedown()
114 kfree(dma->buflist); drm_legacy_dma_takedown()
115 kfree(dma->pagelist); drm_legacy_dma_takedown()
116 kfree(dev->dma); drm_legacy_dma_takedown()
117 dev->dma = NULL; drm_legacy_dma_takedown()
149 struct drm_device_dma *dma = dev->dma; drm_legacy_reclaim_buffers() local
152 if (!dma) drm_legacy_reclaim_buffers()
154 for (i = 0; i < dma->buf_count; i++) { drm_legacy_reclaim_buffers()
155 if (dma->buflist[i]->file_priv == file_priv) { drm_legacy_reclaim_buffers()
156 switch (dma->buflist[i]->list) { drm_legacy_reclaim_buffers()
158 drm_legacy_free_buffer(dev, dma->buflist[i]); drm_legacy_reclaim_buffers()
161 dma->buflist[i]->list = DRM_LIST_RECLAIM; drm_legacy_reclaim_buffers()
H A Ddrm_bufs.c600 struct drm_device_dma *dma = dev->dma; drm_legacy_addbufs_agp() local
616 if (!dma) drm_legacy_addbufs_agp()
664 entry = &dma->bufs[order]; drm_legacy_addbufs_agp()
691 buf->idx = dma->buf_count + entry->buf_count; drm_legacy_addbufs_agp()
696 buf->offset = (dma->byte_count + offset); drm_legacy_addbufs_agp()
724 temp_buflist = krealloc(dma->buflist, drm_legacy_addbufs_agp()
725 (dma->buf_count + entry->buf_count) * drm_legacy_addbufs_agp()
726 sizeof(*dma->buflist), GFP_KERNEL); drm_legacy_addbufs_agp()
734 dma->buflist = temp_buflist; drm_legacy_addbufs_agp()
737 dma->buflist[i + dma->buf_count] = &entry->buflist[i]; drm_legacy_addbufs_agp()
740 dma->buf_count += entry->buf_count; drm_legacy_addbufs_agp()
741 dma->seg_count += entry->seg_count; drm_legacy_addbufs_agp()
742 dma->page_count += byte_count >> PAGE_SHIFT; drm_legacy_addbufs_agp()
743 dma->byte_count += byte_count; drm_legacy_addbufs_agp()
745 DRM_DEBUG("dma->buf_count : %d\n", dma->buf_count); drm_legacy_addbufs_agp()
753 dma->flags = _DRM_DMA_USE_AGP; drm_legacy_addbufs_agp()
764 struct drm_device_dma *dma = dev->dma; drm_legacy_addbufs_pci() local
784 if (!dma) drm_legacy_addbufs_pci()
814 entry = &dma->bufs[order]; drm_legacy_addbufs_pci()
845 temp_pagelist = kmalloc((dma->page_count + (count << page_order)) * drm_legacy_addbufs_pci()
846 sizeof(*dma->pagelist), GFP_KERNEL); drm_legacy_addbufs_pci()
855 dma->pagelist, dma->page_count * sizeof(*dma->pagelist)); drm_legacy_addbufs_pci()
857 dma->page_count + (count << page_order)); drm_legacy_addbufs_pci()
881 dma->page_count + page_count, drm_legacy_addbufs_pci()
883 temp_pagelist[dma->page_count + page_count++] drm_legacy_addbufs_pci()
890 buf->idx = dma->buf_count + entry->buf_count; drm_legacy_addbufs_pci()
894 buf->offset = (dma->byte_count + byte_count + offset); drm_legacy_addbufs_pci()
922 temp_buflist = krealloc(dma->buflist, drm_legacy_addbufs_pci()
923 (dma->buf_count + entry->buf_count) * drm_legacy_addbufs_pci()
924 sizeof(*dma->buflist), GFP_KERNEL); drm_legacy_addbufs_pci()
933 dma->buflist = temp_buflist; drm_legacy_addbufs_pci()
936 dma->buflist[i + dma->buf_count] = &entry->buflist[i]; drm_legacy_addbufs_pci()
942 if (dma->page_count) { drm_legacy_addbufs_pci()
943 kfree(dma->pagelist); drm_legacy_addbufs_pci()
945 dma->pagelist = temp_pagelist; drm_legacy_addbufs_pci()
947 dma->buf_count += entry->buf_count; drm_legacy_addbufs_pci()
948 dma->seg_count += entry->seg_count; drm_legacy_addbufs_pci()
949 dma->page_count += entry->seg_count << page_order; drm_legacy_addbufs_pci()
950 dma->byte_count += PAGE_SIZE * (entry->seg_count << page_order); drm_legacy_addbufs_pci()
958 dma->flags = _DRM_DMA_USE_PCI_RO; drm_legacy_addbufs_pci()
969 struct drm_device_dma *dma = dev->dma; drm_legacy_addbufs_sg() local
987 if (!dma) drm_legacy_addbufs_sg()
1025 entry = &dma->bufs[order]; drm_legacy_addbufs_sg()
1053 buf->idx = dma->buf_count + entry->buf_count; drm_legacy_addbufs_sg()
1058 buf->offset = (dma->byte_count + offset); drm_legacy_addbufs_sg()
1087 temp_buflist = krealloc(dma->buflist, drm_legacy_addbufs_sg()
1088 (dma->buf_count + entry->buf_count) * drm_legacy_addbufs_sg()
1089 sizeof(*dma->buflist), GFP_KERNEL); drm_legacy_addbufs_sg()
1097 dma->buflist = temp_buflist; drm_legacy_addbufs_sg()
1100 dma->buflist[i + dma->buf_count] = &entry->buflist[i]; drm_legacy_addbufs_sg()
1103 dma->buf_count += entry->buf_count; drm_legacy_addbufs_sg()
1104 dma->seg_count += entry->seg_count; drm_legacy_addbufs_sg()
1105 dma->page_count += byte_count >> PAGE_SHIFT; drm_legacy_addbufs_sg()
1106 dma->byte_count += byte_count; drm_legacy_addbufs_sg()
1108 DRM_DEBUG("dma->buf_count : %d\n", dma->buf_count); drm_legacy_addbufs_sg()
1116 dma->flags = _DRM_DMA_USE_SG; drm_legacy_addbufs_sg()
1183 struct drm_device_dma *dma = dev->dma; drm_legacy_infobufs() local
1194 if (!dma) drm_legacy_infobufs()
1206 if (dma->bufs[i].buf_count) drm_legacy_infobufs()
1214 if (dma->bufs[i].buf_count) { drm_legacy_infobufs()
1217 struct drm_buf_entry *from = &dma->bufs[i]; drm_legacy_infobufs()
1234 dma->bufs[i].buf_count, drm_legacy_infobufs()
1235 dma->bufs[i].buf_size, drm_legacy_infobufs()
1236 dma->bufs[i].low_mark, drm_legacy_infobufs()
1237 dma->bufs[i].high_mark); drm_legacy_infobufs()
1264 struct drm_device_dma *dma = dev->dma; drm_legacy_markbufs() local
1275 if (!dma) drm_legacy_markbufs()
1283 entry = &dma->bufs[order]; drm_legacy_markbufs()
1311 struct drm_device_dma *dma = dev->dma; drm_legacy_freebufs() local
1323 if (!dma) drm_legacy_freebufs()
1330 if (idx < 0 || idx >= dma->buf_count) { drm_legacy_freebufs()
1332 idx, dma->buf_count - 1); drm_legacy_freebufs()
1335 buf = dma->buflist[idx]; drm_legacy_freebufs()
1364 struct drm_device_dma *dma = dev->dma; drm_legacy_mapbufs() local
1378 if (!dma) drm_legacy_mapbufs()
1389 if (request->count >= dma->buf_count) { drm_legacy_mapbufs()
1390 if ((dev->agp && (dma->flags & _DRM_DMA_USE_AGP)) drm_legacy_mapbufs()
1392 && (dma->flags & _DRM_DMA_USE_SG))) { drm_legacy_mapbufs()
1405 virtual = vm_mmap(file_priv->filp, 0, dma->byte_count, drm_legacy_mapbufs()
1416 for (i = 0; i < dma->buf_count; i++) { drm_legacy_mapbufs()
1418 &dma->buflist[i]->idx, drm_legacy_mapbufs()
1424 &dma->buflist[i]->total, drm_legacy_mapbufs()
1434 address = virtual + dma->buflist[i]->offset; /* *** */ drm_legacy_mapbufs()
1443 request->count = dma->buf_count; drm_legacy_mapbufs()
/linux-4.1.27/drivers/soc/ti/
H A Dknav_dma.c20 #include <linux/dma-direction.h>
117 struct knav_dma_device *dma; member in struct:knav_dma_chan
213 /* wait for the dma to shut itself down */ chan_teardown()
241 /* teardown the dma channel */ chan_stop()
257 static void dma_hw_enable_all(struct knav_dma_device *dma) dma_hw_enable_all() argument
261 for (i = 0; i < dma->max_tx_chan; i++) { dma_hw_enable_all()
262 writel_relaxed(0, &dma->reg_tx_chan[i].mode); dma_hw_enable_all()
263 writel_relaxed(DMA_ENABLE, &dma->reg_tx_chan[i].control); dma_hw_enable_all()
268 static void knav_dma_hw_init(struct knav_dma_device *dma) knav_dma_hw_init() argument
273 spin_lock(&dma->lock); knav_dma_hw_init()
274 v = dma->loopback ? DMA_LOOPBACK : 0; knav_dma_hw_init()
275 writel_relaxed(v, &dma->reg_global->emulation_control); knav_dma_hw_init()
277 v = readl_relaxed(&dma->reg_global->perf_control); knav_dma_hw_init()
278 v |= ((dma->rx_timeout & DMA_RX_TIMEOUT_MASK) << DMA_RX_TIMEOUT_SHIFT); knav_dma_hw_init()
279 writel_relaxed(v, &dma->reg_global->perf_control); knav_dma_hw_init()
281 v = ((dma->tx_priority << DMA_TX_PRIO_SHIFT) | knav_dma_hw_init()
282 (dma->rx_priority << DMA_RX_PRIO_SHIFT)); knav_dma_hw_init()
284 writel_relaxed(v, &dma->reg_global->priority_control); knav_dma_hw_init()
287 for (i = 0; i < dma->max_rx_chan; i++) knav_dma_hw_init()
288 writel_relaxed(DMA_ENABLE, &dma->reg_rx_chan[i].control); knav_dma_hw_init()
290 for (i = 0; i < dma->logical_queue_managers; i++) knav_dma_hw_init()
291 writel_relaxed(dma->qm_base_address[i], knav_dma_hw_init()
292 &dma->reg_global->qm_base_address[i]); knav_dma_hw_init()
293 spin_unlock(&dma->lock); knav_dma_hw_init()
296 static void knav_dma_hw_destroy(struct knav_dma_device *dma) knav_dma_hw_destroy() argument
301 spin_lock(&dma->lock); knav_dma_hw_destroy()
304 for (i = 0; i < dma->max_rx_chan; i++) knav_dma_hw_destroy()
305 writel_relaxed(v, &dma->reg_rx_chan[i].control); knav_dma_hw_destroy()
307 for (i = 0; i < dma->max_tx_chan; i++) knav_dma_hw_destroy()
308 writel_relaxed(v, &dma->reg_tx_chan[i].control); knav_dma_hw_destroy()
309 spin_unlock(&dma->lock); knav_dma_hw_destroy()
341 struct knav_dma_device *dma) dma_debug_show_devices()
345 list_for_each_entry(chan, &dma->chan_list, list) { dma_debug_show_devices()
353 struct knav_dma_device *dma; dma_debug_show() local
355 list_for_each_entry(dma, &kdev->list, list) { dma_debug_show()
356 if (atomic_read(&dma->ref_count)) { dma_debug_show()
358 dma->name, dma->max_tx_chan, dma->max_rx_flow); dma_debug_show()
359 dma_debug_show_devices(s, dma); dma_debug_show()
390 index = of_property_match_string(np, "ti,navigator-dma-names", name); of_channel_match_helper()
392 dev_err(kdev->dev, "No 'ti,navigator-dma-names' propery\n"); of_channel_match_helper()
414 * @config: dma configuration parameters
422 struct knav_dma_device *dma; knav_dma_open_channel() local
428 pr_err("keystone-navigator-dma driver not registered\n"); knav_dma_open_channel()
449 /* Look for correct dma instance */ knav_dma_open_channel()
450 list_for_each_entry(dma, &kdev->list, list) { knav_dma_open_channel()
451 if (!strcmp(dma->name, instance)) { knav_dma_open_channel()
461 /* Look for correct dma channel from dma instance */ knav_dma_open_channel()
463 list_for_each_entry(chan, &dma->chan_list, list) { knav_dma_open_channel()
490 if (atomic_inc_return(&chan->dma->ref_count) <= 1) knav_dma_open_channel()
491 knav_dma_hw_init(chan->dma); knav_dma_open_channel()
504 * knav_dma_close_channel() - Destroy a dma channel
506 * channel: dma channel handle
514 pr_err("keystone-navigator-dma driver not registered\n"); knav_dma_close_channel()
521 if (atomic_dec_return(&chan->dma->ref_count) <= 0) knav_dma_close_channel()
522 knav_dma_hw_destroy(chan->dma); knav_dma_close_channel()
525 chan->channel, chan->flow, chan->dma->name); knav_dma_close_channel()
529 static void __iomem *pktdma_get_regs(struct knav_dma_device *dma, pktdma_get_regs() argument
557 struct knav_dma_device *dma = chan->dma; pktdma_init_rx_chan() local
560 chan->reg_rx_flow = dma->reg_rx_flow + flow; pktdma_init_rx_chan()
569 struct knav_dma_device *dma = chan->dma; pktdma_init_tx_chan() local
572 chan->reg_chan = dma->reg_tx_chan + channel; pktdma_init_tx_chan()
573 chan->reg_tx_sched = dma->reg_tx_sched + channel; pktdma_init_tx_chan()
580 static int pktdma_init_chan(struct knav_dma_device *dma, pktdma_init_chan() argument
593 chan->dma = dma; pktdma_init_chan()
608 list_add_tail(&chan->list, &dma->chan_list); pktdma_init_chan()
617 struct knav_dma_device *dma; dma_init() local
623 dma = devm_kzalloc(kdev->dev, sizeof(*dma), GFP_KERNEL); dma_init()
624 if (!dma) { dma_init()
628 INIT_LIST_HEAD(&dma->list); dma_init()
629 INIT_LIST_HEAD(&dma->chan_list); dma_init()
636 dma->logical_queue_managers = len / sizeof(u32); dma_init()
637 if (dma->logical_queue_managers > DMA_MAX_QMS) { dma_init()
639 dma->logical_queue_managers); dma_init()
640 dma->logical_queue_managers = DMA_MAX_QMS; dma_init()
644 dma->qm_base_address, dma_init()
645 dma->logical_queue_managers); dma_init()
651 dma->reg_global = pktdma_get_regs(dma, node, 0, &size); dma_init()
652 if (!dma->reg_global) dma_init()
659 dma->reg_tx_chan = pktdma_get_regs(dma, node, 1, &size); dma_init()
660 if (!dma->reg_tx_chan) dma_init()
664 dma->reg_rx_chan = pktdma_get_regs(dma, node, 2, &size); dma_init()
665 if (!dma->reg_rx_chan) dma_init()
669 dma->reg_tx_sched = pktdma_get_regs(dma, node, 3, &size); dma_init()
670 if (!dma->reg_tx_sched) dma_init()
674 dma->reg_rx_flow = pktdma_get_regs(dma, node, 4, &size); dma_init()
675 if (!dma->reg_rx_flow) dma_init()
679 dma->rx_priority = DMA_PRIO_DEFAULT; dma_init()
680 dma->tx_priority = DMA_PRIO_DEFAULT; dma_init()
682 dma->enable_all = (of_get_property(node, "ti,enable-all", NULL) != NULL); dma_init()
683 dma->loopback = (of_get_property(node, "ti,loop-back", NULL) != NULL); dma_init()
692 dma->rx_timeout = timeout; dma_init()
693 dma->max_rx_chan = max_rx_chan; dma_init()
694 dma->max_rx_flow = max_rx_flow; dma_init()
695 dma->max_tx_chan = min(max_tx_chan, max_tx_sched); dma_init()
696 atomic_set(&dma->ref_count, 0); dma_init()
697 strcpy(dma->name, node->name); dma_init()
698 spin_lock_init(&dma->lock); dma_init()
700 for (i = 0; i < dma->max_tx_chan; i++) { dma_init()
701 if (pktdma_init_chan(dma, DMA_MEM_TO_DEV, i) >= 0) dma_init()
705 for (i = 0; i < dma->max_rx_flow; i++) { dma_init()
706 if (pktdma_init_chan(dma, DMA_DEV_TO_MEM, i) >= 0) dma_init()
710 list_add_tail(&dma->list, &kdev->list); dma_init()
716 if (dma->enable_all) { dma_init()
717 atomic_inc(&dma->ref_count); dma_init()
718 knav_dma_hw_init(dma); dma_init()
719 dma_hw_enable_all(dma); dma_init()
723 dma->name, num_chan, dma->max_rx_flow, dma_init()
724 dma->max_tx_chan, dma->max_rx_chan, dma_init()
725 dma->loopback ? ", loopback" : ""); dma_init()
769 dev_err(dev, "no valid dma instance\n");
781 struct knav_dma_device *dma; knav_dma_remove() local
783 list_for_each_entry(dma, &kdev->list, list) { knav_dma_remove()
784 if (atomic_dec_return(&dma->ref_count) == 0) knav_dma_remove()
785 knav_dma_hw_destroy(dma); knav_dma_remove()
795 { .compatible = "ti,keystone-navigator-dma", },
805 .name = "keystone-navigator-dma",
340 dma_debug_show_devices(struct seq_file *s, struct knav_dma_device *dma) dma_debug_show_devices() argument
/linux-4.1.27/arch/frv/mb93090-mb00/
H A DMakefile9 obj-y += pci-dma.o
11 obj-y += pci-dma-nommu.o
/linux-4.1.27/arch/blackfin/mach-bf561/
H A DMakefile5 obj-y := ints-priority.o dma.o
/linux-4.1.27/arch/blackfin/mach-bf609/
H A DMakefile5 obj-y := dma.o clock.o ints-priority.o
/linux-4.1.27/arch/arm/plat-pxa/
H A DMakefile5 obj-y := dma.o
/linux-4.1.27/drivers/media/pci/ivtv/
H A Divtv-udma.c37 int ivtv_udma_fill_sg_list (struct ivtv_user_dma *dma, struct ivtv_dma_page_info *dma_page, int map_offset) ivtv_udma_fill_sg_list() argument
52 if (PageHighMem(dma->map[map_offset])) { ivtv_udma_fill_sg_list()
55 if (dma->bouncemap[map_offset] == NULL) ivtv_udma_fill_sg_list()
56 dma->bouncemap[map_offset] = alloc_page(GFP_KERNEL); ivtv_udma_fill_sg_list()
57 if (dma->bouncemap[map_offset] == NULL) ivtv_udma_fill_sg_list()
60 src = kmap_atomic(dma->map[map_offset]) + offset; ivtv_udma_fill_sg_list()
61 memcpy(page_address(dma->bouncemap[map_offset]) + offset, src, len); ivtv_udma_fill_sg_list()
64 sg_set_page(&dma->SGlist[map_offset], dma->bouncemap[map_offset], len, offset); ivtv_udma_fill_sg_list()
67 sg_set_page(&dma->SGlist[map_offset], dma->map[map_offset], len, offset); ivtv_udma_fill_sg_list()
75 void ivtv_udma_fill_sg_array (struct ivtv_user_dma *dma, u32 buffer_offset, u32 buffer_offset_2, u32 split) { ivtv_udma_fill_sg_array() argument
79 for (i = 0, sg = dma->SGlist; i < dma->SG_length; i++, sg = sg_next(sg)) { ivtv_udma_fill_sg_array()
80 dma->SGarray[i].size = cpu_to_le32(sg_dma_len(sg)); ivtv_udma_fill_sg_array()
81 dma->SGarray[i].src = cpu_to_le32(sg_dma_address(sg)); ivtv_udma_fill_sg_array()
82 dma->SGarray[i].dst = cpu_to_le32(buffer_offset); ivtv_udma_fill_sg_array()
106 struct ivtv_user_dma *dma = &itv->udma; ivtv_udma_setup() local
112 if (dma->SG_length || dma->page_count) { ivtv_udma_setup()
114 dma->SG_length, dma->page_count); ivtv_udma_setup()
128 user_dma.uaddr, user_dma.page_count, 0, 1, dma->map); ivtv_udma_setup()
135 put_page(dma->map[i]); ivtv_udma_setup()
141 dma->page_count = user_dma.page_count; ivtv_udma_setup()
144 if (ivtv_udma_fill_sg_list(dma, &user_dma, 0) < 0) { ivtv_udma_setup()
145 for (i = 0; i < dma->page_count; i++) { ivtv_udma_setup()
146 put_page(dma->map[i]); ivtv_udma_setup()
148 dma->page_count = 0; ivtv_udma_setup()
153 dma->SG_length = pci_map_sg(itv->pdev, dma->SGlist, dma->page_count, PCI_DMA_TODEVICE); ivtv_udma_setup()
156 ivtv_udma_fill_sg_array (dma, ivtv_dest_addr, 0, -1); ivtv_udma_setup()
159 dma->SGarray[dma->SG_length - 1].size |= cpu_to_le32(0x80000000); ivtv_udma_setup()
162 return dma->page_count; ivtv_udma_setup()
167 struct ivtv_user_dma *dma = &itv->udma; ivtv_udma_unmap() local
173 if (dma->page_count == 0) ivtv_udma_unmap()
177 if (dma->SG_length) { ivtv_udma_unmap()
178 pci_unmap_sg(itv->pdev, dma->SGlist, dma->page_count, PCI_DMA_TODEVICE); ivtv_udma_unmap()
179 dma->SG_length = 0; ivtv_udma_unmap()
185 for (i = 0; i < dma->page_count; i++) { ivtv_udma_unmap()
186 put_page(dma->map[i]); ivtv_udma_unmap()
188 dma->page_count = 0; ivtv_udma_unmap()
H A Divtv-queue.h32 return s->dma == PCI_DMA_NONE || (SLICED_VBI_PIO && s->type == IVTV_ENC_STREAM_TYPE_VBI); ivtv_might_use_pio()
39 return s->dma == PCI_DMA_NONE || ivtv_use_pio()
45 return s->dma != PCI_DMA_NONE; ivtv_might_use_dma()
57 s->buf_size + 256, s->dma); ivtv_buf_sync_for_cpu()
64 s->buf_size + 256, s->dma); ivtv_buf_sync_for_device()
/linux-4.1.27/drivers/scsi/arm/
H A Dcumana_1.c34 void __iomem *dma
51 void __iomem *dma = priv(host)->dma + 0x2000; NCR5380_pwrite() local
66 v=*laddr++; writew(L(v), dma); writew(H(v), dma); NCR5380_pwrite() local
67 v=*laddr++; writew(L(v), dma); writew(H(v), dma); NCR5380_pwrite() local
68 v=*laddr++; writew(L(v), dma); writew(H(v), dma); NCR5380_pwrite() local
69 v=*laddr++; writew(L(v), dma); writew(H(v), dma); NCR5380_pwrite() local
70 v=*laddr++; writew(L(v), dma); writew(H(v), dma); NCR5380_pwrite() local
71 v=*laddr++; writew(L(v), dma); writew(H(v), dma); NCR5380_pwrite() local
72 v=*laddr++; writew(L(v), dma); writew(H(v), dma); NCR5380_pwrite() local
73 v=*laddr++; writew(L(v), dma); writew(H(v), dma); NCR5380_pwrite() local
90 writeb(*addr++, dma); NCR5380_pwrite()
100 writeb(*addr++, dma); NCR5380_pwrite()
114 void __iomem *dma = priv(host)->dma + 0x2000; NCR5380_pread() local
128 *laddr++ = readw(dma) | (readw(dma) << 16); NCR5380_pread()
129 *laddr++ = readw(dma) | (readw(dma) << 16); NCR5380_pread()
130 *laddr++ = readw(dma) | (readw(dma) << 16); NCR5380_pread()
131 *laddr++ = readw(dma) | (readw(dma) << 16); NCR5380_pread()
132 *laddr++ = readw(dma) | (readw(dma) << 16); NCR5380_pread()
133 *laddr++ = readw(dma) | (readw(dma) << 16); NCR5380_pread()
134 *laddr++ = readw(dma) | (readw(dma) << 16); NCR5380_pread()
135 *laddr++ = readw(dma) | (readw(dma) << 16); NCR5380_pread()
152 *addr++ = readb(dma); NCR5380_pread()
162 *addr++ = readb(dma); NCR5380_pread()
234 priv(host)->dma = ioremap(ecard_resource_start(ec, ECARD_RES_MEMC), cumanascsi1_probe()
236 if (!priv(host)->base || !priv(host)->dma) { cumanascsi1_probe()
273 iounmap(priv(host)->dma); cumanascsi1_probe()
291 iounmap(priv(host)->dma); cumanascsi1_remove()
H A Dpowertec.c19 #include <linux/dma-mapping.h>
21 #include <asm/dma.h>
136 int dmach = info->info.scsi.dma; powertecscsi_dma_setup()
176 if (info->info.scsi.dma != NO_DMA) powertecscsi_dma_stop()
177 disable_dma(info->info.scsi.dma); powertecscsi_dma_stop()
336 info->info.scsi.dma = ec->dma; powertecscsi_probe()
345 info->info.dma.setup = powertecscsi_dma_setup; powertecscsi_probe()
346 info->info.dma.pseudo = NULL; powertecscsi_probe()
347 info->info.dma.stop = powertecscsi_dma_stop; powertecscsi_probe()
368 if (info->info.scsi.dma != NO_DMA) { powertecscsi_probe()
369 if (request_dma(info->info.scsi.dma, "powertec")) { powertecscsi_probe()
371 host->host_no, info->info.scsi.dma); powertecscsi_probe()
372 info->info.scsi.dma = NO_DMA; powertecscsi_probe()
374 set_dma_speed(info->info.scsi.dma, 180); powertecscsi_probe()
383 if (info->info.scsi.dma != NO_DMA) powertecscsi_probe()
384 free_dma(info->info.scsi.dma); powertecscsi_probe()
411 if (info->info.scsi.dma != NO_DMA) powertecscsi_remove()
412 free_dma(info->info.scsi.dma); powertecscsi_remove()
/linux-4.1.27/drivers/media/pci/b2c2/
H A Dflexcop-dma.c3 * flexcop-dma.c - configuring and controlling the DMA of the FlexCop
9 struct flexcop_dma *dma, u32 size) flexcop_dma_allocate()
15 err("dma buffersize has to be even."); flexcop_dma_allocate()
20 dma->pdev = pdev; flexcop_dma_allocate()
21 dma->cpu_addr0 = tcpu; flexcop_dma_allocate()
22 dma->dma_addr0 = tdma; flexcop_dma_allocate()
23 dma->cpu_addr1 = tcpu + size/2; flexcop_dma_allocate()
24 dma->dma_addr1 = tdma + size/2; flexcop_dma_allocate()
25 dma->size = size/2; flexcop_dma_allocate()
32 void flexcop_dma_free(struct flexcop_dma *dma) flexcop_dma_free() argument
34 pci_free_consistent(dma->pdev, dma->size*2, flexcop_dma_free()
35 dma->cpu_addr0, dma->dma_addr0); flexcop_dma_free()
36 memset(dma,0,sizeof(struct flexcop_dma)); flexcop_dma_free()
41 struct flexcop_dma *dma, flexcop_dma_config()
47 v0x0.dma_0x0.dma_address0 = dma->dma_addr0 >> 2; flexcop_dma_config()
48 v0xc.dma_0xc.dma_address1 = dma->dma_addr1 >> 2; flexcop_dma_config()
49 v0x4.dma_0x4_write.dma_addr_size = dma->size / 4; flexcop_dma_config()
8 flexcop_dma_allocate(struct pci_dev *pdev, struct flexcop_dma *dma, u32 size) flexcop_dma_allocate() argument
40 flexcop_dma_config(struct flexcop_device *fc, struct flexcop_dma *dma, flexcop_dma_index_t dma_idx) flexcop_dma_config() argument
H A Dflexcop-pci.c57 struct flexcop_dma dma[2]; member in struct:flexcop_pci
172 fc_pci->dma[0].cpu_addr0, flexcop_pci_isr()
173 fc_pci->dma[0].size / 188); flexcop_pci_isr()
176 fc_pci->dma[0].cpu_addr1, flexcop_pci_isr()
177 fc_pci->dma[0].size / 188); flexcop_pci_isr()
186 u32 cur_pos = cur_addr - fc_pci->dma[0].dma_addr0; flexcop_pci_isr()
200 (fc_pci->dma[0].size*2 - 1) - flexcop_pci_isr()
203 fc_pci->dma[0].cpu_addr0 + flexcop_pci_isr()
205 (fc_pci->dma[0].size*2) - flexcop_pci_isr()
214 fc_pci->dma[0].cpu_addr0 + flexcop_pci_isr()
236 flexcop_dma_config(fc, &fc_pci->dma[0], FC_DMA_1); flexcop_pci_stream_control()
237 flexcop_dma_config(fc, &fc_pci->dma[1], FC_DMA_2); flexcop_pci_stream_control()
261 ret = flexcop_dma_allocate(fc_pci->pdev, &fc_pci->dma[0], flexcop_pci_dma_init()
266 ret = flexcop_dma_allocate(fc_pci->pdev, &fc_pci->dma[1], flexcop_pci_dma_init()
269 flexcop_dma_free(&fc_pci->dma[0]); flexcop_pci_dma_init()
284 flexcop_dma_free(&fc_pci->dma[0]); flexcop_pci_dma_exit()
285 flexcop_dma_free(&fc_pci->dma[1]); flexcop_pci_dma_exit()
381 /* init dma */ flexcop_pci_probe()
/linux-4.1.27/arch/cris/include/arch-v32/arch/hwregs/asm/
H A Ddma_defs_asm.h6 * file: ../../inst/dma/inst/dma_common/rtl/dma_regdes.r
10 * by /n/asic/design/tools/rdesc/src/rdes2c -asm --outfile asm/dma_defs_asm.h ../../inst/dma/inst/dma_common/rtl/dma_regdes.r
56 /* Register rw_data, scope dma, type rw */
59 /* Register rw_data_next, scope dma, type rw */
62 /* Register rw_data_buf, scope dma, type rw */
65 /* Register rw_data_ctrl, scope dma, type rw */
80 /* Register rw_data_stat, scope dma, type rw */
86 /* Register rw_data_md, scope dma, type rw */
91 /* Register rw_data_md_s, scope dma, type rw */
96 /* Register rw_data_after, scope dma, type rw */
99 /* Register rw_ctxt, scope dma, type rw */
102 /* Register rw_ctxt_next, scope dma, type rw */
105 /* Register rw_ctxt_ctrl, scope dma, type rw */
120 /* Register rw_ctxt_stat, scope dma, type rw */
126 /* Register rw_ctxt_md0, scope dma, type rw */
131 /* Register rw_ctxt_md0_s, scope dma, type rw */
136 /* Register rw_ctxt_md1, scope dma, type rw */
139 /* Register rw_ctxt_md1_s, scope dma, type rw */
142 /* Register rw_ctxt_md2, scope dma, type rw */
145 /* Register rw_ctxt_md2_s, scope dma, type rw */
148 /* Register rw_ctxt_md3, scope dma, type rw */
151 /* Register rw_ctxt_md3_s, scope dma, type rw */
154 /* Register rw_ctxt_md4, scope dma, type rw */
157 /* Register rw_ctxt_md4_s, scope dma, type rw */
160 /* Register rw_saved_data, scope dma, type rw */
163 /* Register rw_saved_data_buf, scope dma, type rw */
166 /* Register rw_group, scope dma, type rw */
169 /* Register rw_group_next, scope dma, type rw */
172 /* Register rw_group_ctrl, scope dma, type rw */
190 /* Register rw_group_stat, scope dma, type rw */
196 /* Register rw_group_md, scope dma, type rw */
201 /* Register rw_group_md_s, scope dma, type rw */
206 /* Register rw_group_up, scope dma, type rw */
209 /* Register rw_group_down, scope dma, type rw */
212 /* Register rw_cmd, scope dma, type rw */
218 /* Register rw_cfg, scope dma, type rw */
227 /* Register rw_stat, scope dma, type rw */
238 /* Register rw_intr_mask, scope dma, type rw */
256 /* Register rw_ack_intr, scope dma, type rw */
274 /* Register r_intr, scope dma, type r */
292 /* Register r_masked_intr, scope dma, type r */
310 /* Register rw_stream_cmd, scope dma, type rw */
/linux-4.1.27/arch/cris/include/arch-v32/arch/hwregs/
H A Ddma.h76 do { reg_dma_rw_cfg e = REG_RD( dma, inst, rw_cfg );\
78 REG_WR( dma, inst, rw_cfg, e); } while( 0 )
82 do { reg_dma_rw_cfg r = REG_RD( dma, inst, rw_cfg );\
84 REG_WR( dma, inst, rw_cfg, r); } while( 0 )
88 do { reg_dma_rw_cfg s = REG_RD( dma, inst, rw_cfg );\
90 REG_WR( dma, inst, rw_cfg, s); } while( 0 )
94 do { reg_dma_rw_cfg c = REG_RD( dma, inst, rw_cfg );\
96 REG_WR( dma, inst, rw_cfg, c); } while( 0 )
101 do { __x = REG_RD(dma, inst, rw_stream_cmd); } while (__x.busy); \
103 REG_WR(dma, inst, rw_stream_cmd, __x); \
108 do { REG_WR_INT( dma, inst, rw_group, (int) group_descr ); \
116 do { REG_WR_INT( dma, inst, rw_group_down, (int) ctx_descr ); \
125 REG_WR( dma, inst, rw_cmd, c ); } while( 0 )
H A Ddma_defs.h6 * file: ../../inst/dma/inst/dma_common/rtl/dma_regdes.r
10 * by /n/asic/design/tools/rdesc/src/rdes2c --outfile dma_defs.h ../../inst/dma/inst/dma_common/rtl/dma_regdes.r
85 /* C-code for register scope dma */
87 /* Register rw_data, scope dma, type rw */
92 /* Register rw_data_next, scope dma, type rw */
97 /* Register rw_data_buf, scope dma, type rw */
102 /* Register rw_data_ctrl, scope dma, type rw */
114 /* Register rw_data_stat, scope dma, type rw */
123 /* Register rw_data_md, scope dma, type rw */
131 /* Register rw_data_md_s, scope dma, type rw */
139 /* Register rw_data_after, scope dma, type rw */
144 /* Register rw_ctxt, scope dma, type rw */
149 /* Register rw_ctxt_next, scope dma, type rw */
154 /* Register rw_ctxt_ctrl, scope dma, type rw */
167 /* Register rw_ctxt_stat, scope dma, type rw */
176 /* Register rw_ctxt_md0, scope dma, type rw */
184 /* Register rw_ctxt_md0_s, scope dma, type rw */
192 /* Register rw_ctxt_md1, scope dma, type rw */
197 /* Register rw_ctxt_md1_s, scope dma, type rw */
202 /* Register rw_ctxt_md2, scope dma, type rw */
207 /* Register rw_ctxt_md2_s, scope dma, type rw */
212 /* Register rw_ctxt_md3, scope dma, type rw */
217 /* Register rw_ctxt_md3_s, scope dma, type rw */
222 /* Register rw_ctxt_md4, scope dma, type rw */
227 /* Register rw_ctxt_md4_s, scope dma, type rw */
232 /* Register rw_saved_data, scope dma, type rw */
237 /* Register rw_saved_data_buf, scope dma, type rw */
242 /* Register rw_group, scope dma, type rw */
247 /* Register rw_group_next, scope dma, type rw */
252 /* Register rw_group_ctrl, scope dma, type rw */
266 /* Register rw_group_stat, scope dma, type rw */
275 /* Register rw_group_md, scope dma, type rw */
283 /* Register rw_group_md_s, scope dma, type rw */
291 /* Register rw_group_up, scope dma, type rw */
296 /* Register rw_group_down, scope dma, type rw */
301 /* Register rw_cmd, scope dma, type rw */
309 /* Register rw_cfg, scope dma, type rw */
318 /* Register rw_stat, scope dma, type rw */
329 /* Register rw_intr_mask, scope dma, type rw */
341 /* Register rw_ack_intr, scope dma, type rw */
353 /* Register r_intr, scope dma, type r */
364 /* Register r_masked_intr, scope dma, type r */
375 /* Register rw_stream_cmd, scope dma, type rw */
/linux-4.1.27/sound/soc/sh/rcar/
H A Ddma.c30 #define rsnd_priv_to_dmac(p) ((struct rsnd_dma_ctrl *)(p)->dma)
37 struct rsnd_dma *dma = (struct rsnd_dma *)data; rsnd_dmaen_complete() local
38 struct rsnd_mod *mod = rsnd_dma_to_mod(dma); rsnd_dmaen_complete()
56 static void rsnd_dmaen_stop(struct rsnd_dma *dma) rsnd_dmaen_stop() argument
58 struct rsnd_dmaen *dmaen = rsnd_dma_to_dmaen(dma); rsnd_dmaen_stop()
63 static void rsnd_dmaen_start(struct rsnd_dma *dma) rsnd_dmaen_start() argument
65 struct rsnd_dmaen *dmaen = rsnd_dma_to_dmaen(dma); rsnd_dmaen_start()
66 struct rsnd_mod *mod = rsnd_dma_to_mod(dma); rsnd_dmaen_start()
87 desc->callback_param = dma; rsnd_dmaen_start()
131 static int rsnd_dmaen_init(struct rsnd_priv *priv, struct rsnd_dma *dma, int id, rsnd_dmaen_init() argument
134 struct rsnd_dmaen *dmaen = rsnd_dma_to_dmaen(dma); rsnd_dmaen_init()
137 struct rsnd_mod *mod = rsnd_dma_to_mod(dma); rsnd_dmaen_init()
143 dev_err(dev, "it already has dma channel\n"); rsnd_dmaen_init()
160 dev_err(dev, "can't get dma channel\n"); rsnd_dmaen_init()
165 cfg.src_addr = dma->src_addr; rsnd_dmaen_init()
166 cfg.dst_addr = dma->dst_addr; rsnd_dmaen_init()
170 dev_dbg(dev, "dma : %pad -> %pad\n", rsnd_dmaen_init()
180 rsnd_dma_quit(dma); rsnd_dmaen_init()
192 static void rsnd_dmaen_quit(struct rsnd_dma *dma) rsnd_dmaen_quit() argument
194 struct rsnd_dmaen *dmaen = rsnd_dma_to_dmaen(dma); rsnd_dmaen_quit()
278 #define rsnd_dmapp_addr(dmac, dma, reg) \
280 (0x10 * rsnd_dma_to_dmapp(dma)->dmapp_id)) rsnd_dmapp_write()
281 static void rsnd_dmapp_write(struct rsnd_dma *dma, u32 data, u32 reg) rsnd_dmapp_write() argument
283 struct rsnd_mod *mod = rsnd_dma_to_mod(dma); rsnd_dmapp_write()
288 dev_dbg(dev, "w %p : %08x\n", rsnd_dmapp_addr(dmac, dma, reg), data); rsnd_dmapp_write()
290 iowrite32(data, rsnd_dmapp_addr(dmac, dma, reg)); rsnd_dmapp_write()
293 static u32 rsnd_dmapp_read(struct rsnd_dma *dma, u32 reg) rsnd_dmapp_read() argument
295 struct rsnd_mod *mod = rsnd_dma_to_mod(dma); rsnd_dmapp_read()
299 return ioread32(rsnd_dmapp_addr(dmac, dma, reg)); rsnd_dmapp_read()
302 static void rsnd_dmapp_stop(struct rsnd_dma *dma) rsnd_dmapp_stop() argument
306 rsnd_dmapp_write(dma, 0, PDMACHCR); rsnd_dmapp_stop()
309 if (0 == rsnd_dmapp_read(dma, PDMACHCR)) rsnd_dmapp_stop()
315 static void rsnd_dmapp_start(struct rsnd_dma *dma) rsnd_dmapp_start() argument
317 struct rsnd_dmapp *dmapp = rsnd_dma_to_dmapp(dma); rsnd_dmapp_start()
319 rsnd_dmapp_write(dma, dma->src_addr, PDMASAR); rsnd_dmapp_start()
320 rsnd_dmapp_write(dma, dma->dst_addr, PDMADAR); rsnd_dmapp_start()
321 rsnd_dmapp_write(dma, dmapp->chcr, PDMACHCR); rsnd_dmapp_start()
324 static int rsnd_dmapp_init(struct rsnd_priv *priv, struct rsnd_dma *dma, int id, rsnd_dmapp_init() argument
327 struct rsnd_dmapp *dmapp = rsnd_dma_to_dmapp(dma); rsnd_dmapp_init()
336 rsnd_dmapp_stop(dma); rsnd_dmapp_init()
339 dmapp->dmapp_id, &dma->src_addr, &dma->dst_addr, dmapp->chcr); rsnd_dmapp_init()
466 static void rsnd_dma_of_path(struct rsnd_dma *dma, rsnd_dma_of_path() argument
471 struct rsnd_mod *this = rsnd_dma_to_mod(dma); rsnd_dma_of_path()
527 void rsnd_dma_stop(struct rsnd_dma *dma) rsnd_dma_stop() argument
529 dma->ops->stop(dma); rsnd_dma_stop()
532 void rsnd_dma_start(struct rsnd_dma *dma) rsnd_dma_start() argument
534 dma->ops->start(dma); rsnd_dma_start()
537 void rsnd_dma_quit(struct rsnd_dma *dma) rsnd_dma_quit() argument
539 struct rsnd_mod *mod = rsnd_dma_to_mod(dma); rsnd_dma_quit()
546 dma->ops->quit(dma); rsnd_dma_quit()
549 int rsnd_dma_init(struct rsnd_priv *priv, struct rsnd_dma *dma, int id) rsnd_dma_init() argument
551 struct rsnd_mod *mod = rsnd_dma_to_mod(dma); rsnd_dma_init()
567 rsnd_dma_of_path(dma, is_play, &mod_from, &mod_to); rsnd_dma_init()
569 dma->src_addr = rsnd_dma_addr(priv, mod_from, is_play, 1); rsnd_dma_init()
570 dma->dst_addr = rsnd_dma_addr(priv, mod_to, is_play, 0); rsnd_dma_init()
574 dma->ops = &rsnd_dmapp_ops; rsnd_dma_init()
576 dma->ops = &rsnd_dmaen_ops; rsnd_dma_init()
580 dma->ops = &rsnd_dmaen_ops; rsnd_dma_init()
582 return dma->ops->init(priv, dma, id, mod_from, mod_to); rsnd_dma_init()
605 dev_err(dev, "dma allocate failed\n"); rsnd_dma_probe()
614 priv->dma = dmac; rsnd_dma_probe()
H A DMakefile1 snd-soc-rcar-objs := core.o gen.o dma.o src.o adg.o ssi.o dvc.o
/linux-4.1.27/drivers/gpu/drm/nouveau/
H A Dnouveau_dma.c35 mem = &mem[chan->dma.cur]; OUT_RINGp()
40 chan->dma.cur += nr_dwords; OUT_RINGp()
75 val > chan->push.vma.offset + (chan->dma.max << 2)) READ_GET()
88 int ip = (chan->dma.ib_put * 2) + chan->dma.ib_base; nv50_dma_push()
95 BUG_ON(chan->dma.ib_free < 1); nv50_dma_push()
100 chan->dma.ib_put = (chan->dma.ib_put + 1) & chan->dma.ib_max; nv50_dma_push()
106 nvif_wr32(chan, 0x8c, chan->dma.ib_put); nv50_dma_push()
107 chan->dma.ib_free--; nv50_dma_push()
115 while (chan->dma.ib_free < count) { nv50_dma_push_wait()
128 chan->dma.ib_free = get - chan->dma.ib_put; nv50_dma_push_wait()
129 if (chan->dma.ib_free <= 0) nv50_dma_push_wait()
130 chan->dma.ib_free += chan->dma.ib_max; nv50_dma_push_wait()
146 while (chan->dma.free < count) { nv50_dma_wait()
155 if (get <= chan->dma.cur) { nv50_dma_wait()
156 chan->dma.free = chan->dma.max - chan->dma.cur; nv50_dma_wait()
157 if (chan->dma.free >= count) nv50_dma_wait()
169 chan->dma.cur = 0; nv50_dma_wait()
170 chan->dma.put = 0; nv50_dma_wait()
173 chan->dma.free = get - chan->dma.cur - 1; nv50_dma_wait()
185 if (chan->dma.ib_max) nouveau_dma_wait()
188 while (chan->dma.free < size) { nouveau_dma_wait()
205 if (get <= chan->dma.cur) { nouveau_dma_wait()
219 chan->dma.free = chan->dma.max - chan->dma.cur; nouveau_dma_wait()
220 if (chan->dma.free >= size) nouveau_dma_wait()
246 chan->dma.cur = nouveau_dma_wait()
247 chan->dma.put = NOUVEAU_DMA_SKIPS; nouveau_dma_wait()
256 chan->dma.free = get - chan->dma.cur - 1; nouveau_dma_wait()
H A Dnouveau_dma.h97 chan->dma.free -= size; RING_SPACE()
104 nouveau_bo_wr32(chan->push.buffer, chan->dma.cur++, data); OUT_RING()
149 if (chan->dma.cur == chan->dma.put) FIRE_RING()
153 if (chan->dma.ib_max) { FIRE_RING()
154 nv50_dma_push(chan, chan->push.buffer, chan->dma.put << 2, FIRE_RING()
155 (chan->dma.cur - chan->dma.put) << 2); FIRE_RING()
157 WRITE_PUT(chan->dma.cur); FIRE_RING()
160 chan->dma.put = chan->dma.cur; FIRE_RING()
166 chan->dma.cur = chan->dma.put; WIND_RING()
H A Dnouveau_chan.c104 /* allocate memory for dma push buffer */ nouveau_channel_prep()
122 /* create dma object covering the *entire* memory space that the nouveau_channel_prep()
202 /* allocate dma push buffer */ nouveau_channel_ind()
255 /* allocate dma push buffer */ nouveau_channel_dma()
292 /* allocate dma objects to cover all allowed vram, and gart */ nouveau_channel_init()
338 /* initialise dma tracking parameters */ nouveau_channel_init()
344 chan->dma.max = (0x10000 / 4) - 2; nouveau_channel_init()
350 chan->dma.ib_base = 0x10000 / 4; nouveau_channel_init()
351 chan->dma.ib_max = (0x02000 / 8) - 1; nouveau_channel_init()
352 chan->dma.ib_put = 0; nouveau_channel_init()
353 chan->dma.ib_free = chan->dma.ib_max - chan->dma.ib_put; nouveau_channel_init()
354 chan->dma.max = chan->dma.ib_base; nouveau_channel_init()
358 chan->dma.put = 0; nouveau_channel_init()
359 chan->dma.cur = chan->dma.put; nouveau_channel_init()
360 chan->dma.free = chan->dma.max - chan->dma.cur; nouveau_channel_init()
411 NV_PRINTK(debug, cli, "dma channel create, %d\n", ret); nouveau_channel_new()
/linux-4.1.27/drivers/base/
H A DMakefile9 obj-$(CONFIG_DMA_CMA) += dma-contiguous.o
11 obj-$(CONFIG_HAS_DMA) += dma-mapping.o
12 obj-$(CONFIG_HAVE_GENERIC_DMA_COHERENT) += dma-coherent.o
/linux-4.1.27/arch/sparc/include/asm/
H A Dpci.h9 #include <asm-generic/pci-dma-compat.h>
/linux-4.1.27/arch/openrisc/kernel/
H A DMakefile7 obj-y := setup.o or32_ksyms.o process.o dma.o \
/linux-4.1.27/arch/arm64/mm/
H A DMakefile1 obj-y := dma-mapping.o extable.o fault.o init.o \
/linux-4.1.27/arch/avr32/include/asm/
H A Dpci.h8 #include <asm-generic/pci-dma-compat.h>
/linux-4.1.27/sound/soc/kirkwood/
H A DMakefile1 snd-soc-kirkwood-objs := kirkwood-dma.o kirkwood-i2s.o
/linux-4.1.27/arch/frv/kernel/
H A Ddma.c0 /* dma.c: DMA controller management on FR401 and the like
17 #include <asm/dma.h>
177 int dma, ret; frv_dma_open() local
184 for (dma = FRV_DMA_NCHANS - 1; dma >= 0; dma--) { frv_dma_open()
185 channel = &frv_dma_channels[dma]; frv_dma_open()
187 if (!test_bit(dma, &dmamask)) frv_dma_open()
193 if (!frv_dma_channels[dma].flags) frv_dma_open()
205 channel = &frv_dma_channels[dma]; frv_dma_open()
227 ret = dma; frv_dma_open()
239 void frv_dma_close(int dma) frv_dma_close() argument
241 struct frv_dma_channel *channel = &frv_dma_channels[dma]; frv_dma_close()
247 frv_dma_stop(dma); frv_dma_close()
260 void frv_dma_config(int dma, unsigned long ccfr, unsigned long cctr, unsigned long apr) frv_dma_config() argument
262 unsigned long ioaddr = frv_dma_channels[dma].ioaddr; frv_dma_config()
277 void frv_dma_start(int dma, frv_dma_start() argument
281 unsigned long ioaddr = frv_dma_channels[dma].ioaddr; frv_dma_start()
292 frv_set_dma_inprogress(dma); frv_dma_start()
302 void frv_dma_restart_circular(int dma, unsigned long six) frv_dma_restart_circular() argument
304 unsigned long ioaddr = frv_dma_channels[dma].ioaddr; frv_dma_restart_circular()
311 frv_set_dma_inprogress(dma); frv_dma_restart_circular()
321 void frv_dma_stop(int dma) frv_dma_stop() argument
323 unsigned long ioaddr = frv_dma_channels[dma].ioaddr; frv_dma_stop()
332 frv_clear_dma_inprogress(dma); frv_dma_stop()
341 int is_frv_dma_interrupting(int dma) is_frv_dma_interrupting() argument
343 unsigned long ioaddr = frv_dma_channels[dma].ioaddr; is_frv_dma_interrupting()
355 void frv_dma_dump(int dma) frv_dma_dump() argument
357 unsigned long ioaddr = frv_dma_channels[dma].ioaddr; frv_dma_dump()
365 printk("DMA[%d] cstr=%lx pix=%lx six=%lx bcl=%lx\n", dma, cstr, pix, six, bcl); frv_dma_dump()
382 int dma; frv_dma_pause_all() local
386 for (dma = FRV_DMA_NCHANS - 1; dma >= 0; dma--) { frv_dma_pause_all()
387 channel = &frv_dma_channels[dma]; frv_dma_pause_all()
404 frv_clear_dma_inprogress(dma); frv_dma_pause_all()
423 int dma; frv_dma_resume_all() local
425 for (dma = FRV_DMA_NCHANS - 1; dma >= 0; dma--) { frv_dma_resume_all()
426 channel = &frv_dma_channels[dma]; frv_dma_resume_all()
441 frv_set_dma_inprogress(dma); frv_dma_resume_all()
452 * dma status clear
454 void frv_dma_status_clear(int dma) frv_dma_status_clear() argument
456 unsigned long ioaddr = frv_dma_channels[dma].ioaddr; frv_dma_status_clear()
H A DMakefile11 process.o traps.o ptrace.o signal.o dma.o \
/linux-4.1.27/sound/soc/au1x/
H A Ddma.c14 #include <linux/dma-mapping.h>
32 int dma; member in struct:audio_stream
105 disable_dma(stream->dma); au1000_dma_stop()
113 init_dma(stream->dma); au1000_dma_start()
114 if (get_dma_active_buffer(stream->dma) == 0) { au1000_dma_start()
115 clear_dma_done0(stream->dma); au1000_dma_start()
116 set_dma_addr0(stream->dma, stream->buffer->start); au1000_dma_start()
117 set_dma_count0(stream->dma, stream->period_size >> 1); au1000_dma_start()
118 set_dma_addr1(stream->dma, stream->buffer->next->start); au1000_dma_start()
119 set_dma_count1(stream->dma, stream->period_size >> 1); au1000_dma_start()
121 clear_dma_done1(stream->dma); au1000_dma_start()
122 set_dma_addr1(stream->dma, stream->buffer->start); au1000_dma_start()
123 set_dma_count1(stream->dma, stream->period_size >> 1); au1000_dma_start()
124 set_dma_addr0(stream->dma, stream->buffer->next->start); au1000_dma_start()
125 set_dma_count0(stream->dma, stream->period_size >> 1); au1000_dma_start()
127 enable_dma_buffers(stream->dma); au1000_dma_start()
128 start_dma(stream->dma); au1000_dma_start()
136 switch (get_dma_buffer_done(stream->dma)) { au1000_dma_interrupt()
139 clear_dma_done0(stream->dma); au1000_dma_interrupt()
140 set_dma_addr0(stream->dma, stream->buffer->next->start); au1000_dma_interrupt()
141 set_dma_count0(stream->dma, stream->period_size >> 1); au1000_dma_interrupt()
142 enable_dma_buffer0(stream->dma); au1000_dma_interrupt()
146 clear_dma_done1(stream->dma); au1000_dma_interrupt()
147 set_dma_addr1(stream->dma, stream->buffer->next->start); au1000_dma_interrupt()
148 set_dma_count1(stream->dma, stream->period_size >> 1); au1000_dma_interrupt()
149 enable_dma_buffer1(stream->dma); au1000_dma_interrupt()
152 pr_debug("DMA %d missed interrupt.\n", stream->dma); au1000_dma_interrupt()
157 pr_debug("DMA %d empty irq.\n", stream->dma); au1000_dma_interrupt()
199 ctx->stream[s].dma = request_au1000_dma(dmaids[s], name, alchemy_pcm_open()
202 set_dma_mode(ctx->stream[s].dma, alchemy_pcm_open()
203 get_dma_mode(ctx->stream[s].dma) & ~DMA_NC); alchemy_pcm_open()
218 free_au1000_dma(ctx->stream[stype].dma); alchemy_pcm_close()
273 location = get_dma_residue(stream->dma); alchemy_pcm_pointer()
327 .name = "alchemy-pcm-dma",
H A DMakefile7 snd-soc-au1x-dma-objs := dma.o
14 obj-$(CONFIG_SND_SOC_AU1XAUDIO) += snd-soc-au1x-dma.o
/linux-4.1.27/sound/pci/cs5535audio/
H A Dcs5535audio_pcm.c119 struct cs5535audio_dma *dma, cs5535audio_build_dma_packets()
131 if (dma->desc_buf.area == NULL) { cs5535audio_build_dma_packets()
135 &dma->desc_buf) < 0) cs5535audio_build_dma_packets()
137 dma->period_bytes = dma->periods = 0; cs5535audio_build_dma_packets()
140 if (dma->periods == periods && dma->period_bytes == period_bytes) cs5535audio_build_dma_packets()
146 desc_addr = (u32) dma->desc_buf.addr; cs5535audio_build_dma_packets()
149 &((struct cs5535audio_dma_desc *) dma->desc_buf.area)[i]; cs5535audio_build_dma_packets()
157 lastdesc = &((struct cs5535audio_dma_desc *) dma->desc_buf.area)[periods]; cs5535audio_build_dma_packets()
158 lastdesc->addr = cpu_to_le32((u32) dma->desc_buf.addr); cs5535audio_build_dma_packets()
164 dma->substream = substream; cs5535audio_build_dma_packets()
165 dma->period_bytes = period_bytes; cs5535audio_build_dma_packets()
166 dma->periods = periods; cs5535audio_build_dma_packets()
168 dma->ops->disable_dma(cs5535au); cs5535audio_build_dma_packets()
169 dma->ops->setup_prd(cs5535au, jmpprd_addr); cs5535audio_build_dma_packets()
237 struct cs5535audio_dma *dma, cs5535audio_clear_dma_packets()
240 snd_dma_free_pages(&dma->desc_buf); cs5535audio_clear_dma_packets()
241 dma->desc_buf.area = NULL; cs5535audio_clear_dma_packets()
242 dma->substream = NULL; cs5535audio_clear_dma_packets()
249 struct cs5535audio_dma *dma = substream->runtime->private_data; snd_cs5535audio_hw_params() local
256 dma->buf_addr = substream->runtime->dma_addr; snd_cs5535audio_hw_params()
257 dma->buf_bytes = params_buffer_bytes(hw_params); snd_cs5535audio_hw_params()
259 err = cs5535audio_build_dma_packets(cs5535au, dma, substream, snd_cs5535audio_hw_params()
263 dma->pcm_open_flag = 1; snd_cs5535audio_hw_params()
271 struct cs5535audio_dma *dma = substream->runtime->private_data; snd_cs5535audio_hw_free() local
273 if (dma->pcm_open_flag) { snd_cs5535audio_hw_free()
280 dma->pcm_open_flag = 0; snd_cs5535audio_hw_free()
282 cs5535audio_clear_dma_packets(cs5535au, dma, substream); snd_cs5535audio_hw_free()
296 struct cs5535audio_dma *dma = substream->runtime->private_data; snd_cs5535audio_trigger() local
302 dma->ops->pause_dma(cs5535au); snd_cs5535audio_trigger()
305 dma->ops->enable_dma(cs5535au); snd_cs5535audio_trigger()
308 dma->ops->enable_dma(cs5535au); snd_cs5535audio_trigger()
311 dma->ops->enable_dma(cs5535au); snd_cs5535audio_trigger()
314 dma->ops->disable_dma(cs5535au); snd_cs5535audio_trigger()
317 dma->ops->disable_dma(cs5535au); snd_cs5535audio_trigger()
333 struct cs5535audio_dma *dma; snd_cs5535audio_pcm_pointer() local
335 dma = substream->runtime->private_data; snd_cs5535audio_pcm_pointer()
336 curdma = dma->ops->read_dma_pntr(cs5535au); snd_cs5535audio_pcm_pointer()
337 if (curdma < dma->buf_addr) { snd_cs5535audio_pcm_pointer()
339 curdma, dma->buf_addr); snd_cs5535audio_pcm_pointer()
342 curdma -= dma->buf_addr; snd_cs5535audio_pcm_pointer()
343 if (curdma >= dma->buf_bytes) { snd_cs5535audio_pcm_pointer()
345 curdma, dma->buf_bytes); snd_cs5535audio_pcm_pointer()
118 cs5535audio_build_dma_packets(struct cs5535audio *cs5535au, struct cs5535audio_dma *dma, struct snd_pcm_substream *substream, unsigned int periods, unsigned int period_bytes) cs5535audio_build_dma_packets() argument
236 cs5535audio_clear_dma_packets(struct cs5535audio *cs5535au, struct cs5535audio_dma *dma, struct snd_pcm_substream *substream) cs5535audio_clear_dma_packets() argument
H A Dcs5535audio_pm.c68 struct cs5535audio_dma *dma = &cs5535au->dmas[i]; snd_cs5535audio_suspend() local
69 if (dma && dma->substream) snd_cs5535audio_suspend()
70 dma->saved_prd = dma->ops->read_prd(cs5535au); snd_cs5535audio_suspend()
99 /* set up rate regs, dma. actual initiation is done in trig */ snd_cs5535audio_resume()
101 struct cs5535audio_dma *dma = &cs5535au->dmas[i]; snd_cs5535audio_resume() local
102 if (dma && dma->substream) { snd_cs5535audio_resume()
103 dma->substream->ops->prepare(dma->substream); snd_cs5535audio_resume()
104 dma->ops->setup_prd(cs5535au, dma->saved_prd); snd_cs5535audio_resume()
/linux-4.1.27/include/linux/platform_data/
H A Dmmc-atmel-mci.h4 #include <linux/platform_data/dma-atmel.h>
5 #include <linux/platform_data/dma-dw.h>
H A Dasoc-ux500-msp.h11 #include <linux/platform_data/dma-ste-dma40.h>
H A Dcrypto-atmel.h4 #include <linux/platform_data/dma-atmel.h>
H A Dcrypto-ux500.h10 #include <linux/platform_data/dma-ste-dma40.h>
H A Ds3c-hsotg.h28 * @dma: Whether to use DMA or not.
32 enum s3c_hsotg_dmamode dma; member in struct:s3c_hsotg_plat
H A Dvideo-mx3fb.h42 * @dma_dev: pointer to the dma-device, used for dma-slave connection
/linux-4.1.27/arch/cris/include/asm/
H A Ddma.h1 /* $Id: dma.h,v 1.2 2001/05/09 12:17:42 johana Exp $ */
6 #include <arch/dma.h>
/linux-4.1.27/arch/m68k/coldfire/
H A Ddma.c4 * dma.c -- Freescale ColdFire DMA support
13 #include <asm/dma.h>
/linux-4.1.27/arch/arm/mach-footbridge/include/mach/
H A Disa-dma.h2 * arch/arm/mach-footbridge/include/mach/isa-dma.h
14 * On CATS hardware we have an additional eight ISA dma channels
/linux-4.1.27/sound/soc/ux500/
H A DMakefile6 snd-soc-ux500-plat-dma-objs := ux500_pcm.o
7 obj-$(CONFIG_SND_SOC_UX500_PLAT_DMA) += snd-soc-ux500-plat-dma.o
/linux-4.1.27/drivers/staging/comedi/drivers/
H A Dcomedi_isadma.c19 #include <linux/dma-mapping.h>
20 #include <asm/dma.h>
105 * @dma: the ISA DMA to poll
109 unsigned int comedi_isadma_poll(struct comedi_isadma *dma) comedi_isadma_poll() argument
111 struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma]; comedi_isadma_poll()
168 struct comedi_isadma *dma = NULL; comedi_isadma_alloc() local
176 dma = kzalloc(sizeof(*dma), GFP_KERNEL); comedi_isadma_alloc()
177 if (!dma) comedi_isadma_alloc()
183 dma->desc = desc; comedi_isadma_alloc()
184 dma->n_desc = n_desc; comedi_isadma_alloc()
194 dma->chan = dma_chans[0]; comedi_isadma_alloc()
199 dma->chan2 = dma_chans[1]; comedi_isadma_alloc()
202 desc = &dma->desc[i]; comedi_isadma_alloc()
213 return dma; comedi_isadma_alloc()
216 comedi_isadma_free(dma); comedi_isadma_alloc()
223 * @dma: the ISA DMA to free
225 void comedi_isadma_free(struct comedi_isadma *dma) comedi_isadma_free() argument
230 if (!dma) comedi_isadma_free()
233 if (dma->desc) { comedi_isadma_free()
234 for (i = 0; i < dma->n_desc; i++) { comedi_isadma_free()
235 desc = &dma->desc[i]; comedi_isadma_free()
241 kfree(dma->desc); comedi_isadma_free()
243 if (dma->chan2 && dma->chan2 != dma->chan) comedi_isadma_free()
244 free_dma(dma->chan2); comedi_isadma_free()
245 if (dma->chan) comedi_isadma_free()
246 free_dma(dma->chan); comedi_isadma_free()
247 kfree(dma); comedi_isadma_free()
H A Dni_labpc_isadma.c30 /* size in bytes of dma buffer */
33 /* utility function that suggests a dma transfer size in bytes */ labpc_suggest_transfer_size()
64 struct comedi_isadma_desc *desc = &devpriv->dma->desc[0]; labpc_setup_dma()
84 struct comedi_isadma_desc *desc = &devpriv->dma->desc[0]; labpc_drain_dma()
94 * residue is the number of bytes left to be done on the dma labpc_drain_dma()
127 struct comedi_isadma_desc *desc = &devpriv->dma->desc[0]; handle_isa_dma()
134 /* clear dma tc interrupt */ handle_isa_dma()
144 * if a dma terminal count of external stop trigger labpc_handle_dma_status()
162 devpriv->dma = comedi_isadma_alloc(dev, 1, dma_chan, dma_chan, labpc_init_dma_chan()
173 comedi_isadma_free(devpriv->dma); labpc_free_dma_chan()
H A Dpcl816.c116 struct comedi_isadma *dma; member in struct:pcl816_private
127 struct comedi_isadma *dma = devpriv->dma; pcl816_ai_setup_dma() local
128 struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma]; pcl816_ai_setup_dma()
132 comedi_isadma_disable(dma->chan); pcl816_ai_setup_dma()
135 * Determine dma size based on the buffer maxsize plus the number of pcl816_ai_setup_dma()
256 struct comedi_isadma *dma = devpriv->dma; pcl816_interrupt() local
257 struct comedi_isadma_desc *desc = &dma->desc[dma->cur_dma]; pcl816_interrupt()
277 /* restart dma with the next buffer */ pcl816_interrupt()
278 dma->cur_dma = 1 - dma->cur_dma; pcl816_interrupt()
418 struct comedi_isadma *dma = devpriv->dma; pcl816_ai_cmd() local
436 /* setup and enable dma for the first buffer */ pcl816_ai_cmd()
437 dma->cur_dma = 0; pcl816_ai_cmd()
453 outb((dma->chan << 4) | dev->irq, pcl816_ai_cmd()
462 struct comedi_isadma *dma = devpriv->dma; pcl816_ai_poll() local
470 poll = comedi_isadma_poll(dma); pcl816_ai_poll()
473 desc = &dma->desc[dma->cur_dma]; pcl816_ai_poll()
595 devpriv->dma = comedi_isadma_alloc(dev, 2, dma_chan, dma_chan, pcl816_alloc_irq_and_dma()
597 if (!devpriv->dma) pcl816_alloc_irq_and_dma()
608 comedi_isadma_free(devpriv->dma); pcl816_free_dma()
/linux-4.1.27/drivers/gpu/drm/atmel-hlcdc/
H A Datmel_hlcdc_layer.c20 #include <linux/dma-mapping.h>
87 struct atmel_hlcdc_layer_dma_channel *dma = &layer->dma; atmel_hlcdc_layer_update_apply() local
116 if (dma->status == ATMEL_HLCDC_LAYER_DISABLED) { atmel_hlcdc_layer_update_apply()
139 dma->status = ATMEL_HLCDC_LAYER_ENABLED; atmel_hlcdc_layer_update_apply()
163 dma->queue = fb_flip; atmel_hlcdc_layer_update_apply()
179 struct atmel_hlcdc_layer_dma_channel *dma = &layer->dma; atmel_hlcdc_layer_irq() local
199 flip = dma->queue ? dma->queue : dma->cur; atmel_hlcdc_layer_irq()
265 atmel_hlcdc_layer_fb_flip_release_queue(layer, dma->cur); atmel_hlcdc_layer_irq()
266 dma->cur = dma->queue; atmel_hlcdc_layer_irq()
267 dma->queue = NULL; atmel_hlcdc_layer_irq()
271 atmel_hlcdc_layer_fb_flip_release_queue(layer, dma->cur); atmel_hlcdc_layer_irq()
272 dma->cur = NULL; atmel_hlcdc_layer_irq()
279 if (dma->queue) atmel_hlcdc_layer_irq()
281 dma->queue); atmel_hlcdc_layer_irq()
283 if (dma->cur) atmel_hlcdc_layer_irq()
285 dma->cur); atmel_hlcdc_layer_irq()
287 dma->cur = NULL; atmel_hlcdc_layer_irq()
288 dma->queue = NULL; atmel_hlcdc_layer_irq()
291 if (!dma->queue) { atmel_hlcdc_layer_irq()
294 if (!dma->cur) atmel_hlcdc_layer_irq()
295 dma->status = ATMEL_HLCDC_LAYER_DISABLED; atmel_hlcdc_layer_irq()
303 struct atmel_hlcdc_layer_dma_channel *dma = &layer->dma; atmel_hlcdc_layer_disable() local
321 if (dma->cur) { atmel_hlcdc_layer_disable()
322 atmel_hlcdc_layer_fb_flip_release_queue(layer, dma->cur); atmel_hlcdc_layer_disable()
323 dma->cur = NULL; atmel_hlcdc_layer_disable()
326 if (dma->queue) { atmel_hlcdc_layer_disable()
327 atmel_hlcdc_layer_fb_flip_release_queue(layer, dma->queue); atmel_hlcdc_layer_disable()
328 dma->queue = NULL; atmel_hlcdc_layer_disable()
341 dma->status = ATMEL_HLCDC_LAYER_DISABLED; atmel_hlcdc_layer_disable()
348 struct atmel_hlcdc_layer_dma_channel *dma = &layer->dma; atmel_hlcdc_layer_update_start() local
373 if (!dma->dscrs[i].status) { atmel_hlcdc_layer_update_start()
374 fb_flip->dscrs[j++] = &dma->dscrs[i]; atmel_hlcdc_layer_update_start()
375 dma->dscrs[i].status = atmel_hlcdc_layer_update_start()
497 struct atmel_hlcdc_layer_dma_channel *dma = &layer->dma; atmel_hlcdc_layer_update_commit() local
518 if (!dma->queue) atmel_hlcdc_layer_update_commit()
530 struct atmel_hlcdc_layer_dma_channel *dma = &layer->dma; atmel_hlcdc_layer_dma_init() local
534 dma->dscrs = dma_alloc_coherent(dev->dev, atmel_hlcdc_layer_dma_init()
536 sizeof(*dma->dscrs), atmel_hlcdc_layer_dma_init()
538 if (!dma->dscrs) atmel_hlcdc_layer_dma_init()
542 struct atmel_hlcdc_dma_channel_dscr *dscr = &dma->dscrs[i]; atmel_hlcdc_layer_dma_init()
553 struct atmel_hlcdc_layer_dma_channel *dma = &layer->dma; atmel_hlcdc_layer_dma_cleanup() local
557 struct atmel_hlcdc_dma_channel_dscr *dscr = &dma->dscrs[i]; atmel_hlcdc_layer_dma_cleanup()
563 sizeof(*dma->dscrs), dma->dscrs, atmel_hlcdc_layer_dma_cleanup()
564 dma->dscrs[0].next); atmel_hlcdc_layer_dma_cleanup()
/linux-4.1.27/drivers/net/ethernet/i825xx/
H A Dlib82596.c83 #include <linux/dma-mapping.h>
315 struct i596_dma *dma; member in struct:i596_private
369 static inline int wait_istat(struct net_device *dev, struct i596_dma *dma, int delcnt, char *str) wait_istat() argument
371 DMA_INV(dev, &(dma->iscp), sizeof(struct i596_iscp)); wait_istat()
372 while (--delcnt && dma->iscp.stat) { wait_istat()
374 DMA_INV(dev, &(dma->iscp), sizeof(struct i596_iscp)); wait_istat()
378 dev->name, str, SWAP16(dma->iscp.stat)); wait_istat()
385 static inline int wait_cmd(struct net_device *dev, struct i596_dma *dma, int delcnt, char *str) wait_cmd() argument
387 DMA_INV(dev, &(dma->scb), sizeof(struct i596_scb)); wait_cmd()
388 while (--delcnt && dma->scb.command) { wait_cmd()
390 DMA_INV(dev, &(dma->scb), sizeof(struct i596_scb)); wait_cmd()
395 SWAP16(dma->scb.status), wait_cmd()
396 SWAP16(dma->scb.command)); wait_cmd()
406 struct i596_dma *dma = lp->dma; i596_display_data() local
412 &dma->scp, dma->scp.sysbus, SWAP32(dma->scp.iscp)); i596_display_data()
414 &dma->iscp, SWAP32(dma->iscp.stat), SWAP32(dma->iscp.scb)); i596_display_data()
417 &dma->scb, SWAP16(dma->scb.status), SWAP16(dma->scb.command), i596_display_data()
418 SWAP16(dma->scb.cmd), SWAP32(dma->scb.rfd)); i596_display_data()
421 SWAP32(dma->scb.crc_err), SWAP32(dma->scb.align_err), i596_display_data()
422 SWAP32(dma->scb.resource_err), SWAP32(dma->scb.over_err), i596_display_data()
423 SWAP32(dma->scb.rcvdt_err), SWAP32(dma->scb.short_err)); i596_display_data()
454 DMA_INV(dev, dma, sizeof(struct i596_dma)); i596_display_data()
458 #define virt_to_dma(lp, v) ((lp)->dma_addr + (dma_addr_t)((unsigned long)(v)-(unsigned long)((lp)->dma)))
463 struct i596_dma *dma = lp->dma; init_rx_bufs() local
470 for (i = 0, rbd = dma->rbds; i < rx_ring_size; i++, rbd++) { init_rx_bufs()
487 lp->rbd_head = dma->rbds; init_rx_bufs()
488 rbd = dma->rbds + rx_ring_size - 1; init_rx_bufs()
489 rbd->v_next = dma->rbds; init_rx_bufs()
490 rbd->b_next = SWAP32(virt_to_dma(lp, dma->rbds)); init_rx_bufs()
494 for (i = 0, rfd = dma->rfds; i < rx_ring_size; i++, rfd++) { init_rx_bufs()
501 lp->rfd_head = dma->rfds; init_rx_bufs()
502 dma->scb.rfd = SWAP32(virt_to_dma(lp, dma->rfds)); init_rx_bufs()
503 rfd = dma->rfds; init_rx_bufs()
505 rfd->v_prev = dma->rfds + rx_ring_size - 1; init_rx_bufs()
506 rfd = dma->rfds + rx_ring_size - 1; init_rx_bufs()
507 rfd->v_next = dma->rfds; init_rx_bufs()
508 rfd->b_next = SWAP32(virt_to_dma(lp, dma->rfds)); init_rx_bufs()
511 DMA_WBACK_INV(dev, dma, sizeof(struct i596_dma)); init_rx_bufs()
521 for (i = 0, rbd = lp->dma->rbds; i < rx_ring_size; i++, rbd++) { remove_rx_bufs()
535 struct i596_dma *dma = lp->dma; rebuild_rx_bufs() local
541 dma->rfds[i].rbd = I596_NULL; rebuild_rx_bufs()
542 dma->rfds[i].cmd = SWAP16(CMD_FLEX); rebuild_rx_bufs()
544 dma->rfds[rx_ring_size-1].cmd = SWAP16(CMD_EOL|CMD_FLEX); rebuild_rx_bufs()
545 lp->rfd_head = dma->rfds; rebuild_rx_bufs()
546 dma->scb.rfd = SWAP32(virt_to_dma(lp, dma->rfds)); rebuild_rx_bufs()
547 lp->rbd_head = dma->rbds; rebuild_rx_bufs()
548 dma->rfds[0].rbd = SWAP32(virt_to_dma(lp, dma->rbds)); rebuild_rx_bufs()
550 DMA_WBACK_INV(dev, dma, sizeof(struct i596_dma)); rebuild_rx_bufs()
557 struct i596_dma *dma = lp->dma; init_i596_mem() local
567 dma->scp.sysbus = SYSBUS; init_i596_mem()
568 dma->scp.iscp = SWAP32(virt_to_dma(lp, &(dma->iscp))); init_i596_mem()
569 dma->iscp.scb = SWAP32(virt_to_dma(lp, &(dma->scb))); init_i596_mem()
570 dma->iscp.stat = SWAP32(ISCP_BUSY); init_i596_mem()
574 dma->scb.cmd = I596_NULL; init_i596_mem()
578 DMA_WBACK(dev, &(dma->scp), sizeof(struct i596_scp)); init_i596_mem()
579 DMA_WBACK(dev, &(dma->iscp), sizeof(struct i596_iscp)); init_i596_mem()
580 DMA_WBACK(dev, &(dma->scb), sizeof(struct i596_scb)); init_i596_mem()
582 mpu_port(dev, PORT_ALTSCP, virt_to_dma(lp, &dma->scp)); init_i596_mem()
584 if (wait_istat(dev, dma, 1000, "initialization timed out")) init_i596_mem()
598 dma->scb.command = 0; init_i596_mem()
599 DMA_WBACK(dev, &(dma->scb), sizeof(struct i596_scb)); init_i596_mem()
603 memcpy(dma->cf_cmd.i596_config, init_setup, 14); init_i596_mem()
604 dma->cf_cmd.cmd.command = SWAP16(CmdConfigure); init_i596_mem()
605 DMA_WBACK(dev, &(dma->cf_cmd), sizeof(struct cf_cmd)); init_i596_mem()
606 i596_add_cmd(dev, &dma->cf_cmd.cmd); init_i596_mem()
609 memcpy(dma->sa_cmd.eth_addr, dev->dev_addr, ETH_ALEN); init_i596_mem()
610 dma->sa_cmd.cmd.command = SWAP16(CmdSASetup); init_i596_mem()
611 DMA_WBACK(dev, &(dma->sa_cmd), sizeof(struct sa_cmd)); init_i596_mem()
612 i596_add_cmd(dev, &dma->sa_cmd.cmd); init_i596_mem()
615 dma->tdr_cmd.cmd.command = SWAP16(CmdTDR); init_i596_mem()
616 DMA_WBACK(dev, &(dma->tdr_cmd), sizeof(struct tdr_cmd)); init_i596_mem()
617 i596_add_cmd(dev, &dma->tdr_cmd.cmd); init_i596_mem()
621 if (wait_cmd(dev, dma, 1000, "timed out waiting to issue RX_START")) { init_i596_mem()
626 dma->scb.command = SWAP16(RX_START); init_i596_mem()
627 dma->scb.rfd = SWAP32(virt_to_dma(lp, dma->rfds)); init_i596_mem()
628 DMA_WBACK(dev, &(dma->scb), sizeof(struct i596_scb)); init_i596_mem()
633 if (wait_cmd(dev, dma, 1000, "RX_START not processed")) init_i596_mem()
779 lp->dma->scb.rfd = rfd->b_next; i596_rx()
832 wait_cmd(dev, lp->dma, 100, "i596_cleanup_cmd timed out"); i596_cleanup_cmd()
833 lp->dma->scb.cmd = I596_NULL; i596_cleanup_cmd()
834 DMA_WBACK(dev, &(lp->dma->scb), sizeof(struct i596_scb)); i596_cleanup_cmd()
846 wait_cmd(dev, lp->dma, 100, "i596_reset timed out"); i596_reset()
851 lp->dma->scb.command = SWAP16(CUC_ABORT | RX_ABORT); i596_reset()
852 DMA_WBACK(dev, &(lp->dma->scb), sizeof(struct i596_scb)); i596_reset()
856 wait_cmd(dev, lp->dma, 1000, "i596_reset 2 timed out"); i596_reset()
870 struct i596_dma *dma = lp->dma; i596_add_cmd() local
890 wait_cmd(dev, dma, 100, "i596_add_cmd timed out"); i596_add_cmd()
891 dma->scb.cmd = SWAP32(virt_to_dma(lp, &cmd->status)); i596_add_cmd()
892 dma->scb.command = SWAP16(CUC_START); i596_add_cmd()
893 DMA_WBACK(dev, &(dma->scb), sizeof(struct i596_scb)); i596_add_cmd()
957 lp->dma->scb.command = SWAP16(CUC_START | RX_START); i596_tx_timeout()
958 DMA_WBACK_INV(dev, &(lp->dma->scb), sizeof(struct i596_scb)); i596_tx_timeout()
987 tx_cmd = lp->dma->tx_cmds + lp->next_tx_cmd; i596_start_xmit()
988 tbd = lp->dma->tbds + lp->next_tx_cmd; i596_start_xmit()
1052 struct i596_dma *dma; i82596_probe() local
1066 dma = (struct i596_dma *) DMA_ALLOC(dev->dev.parent, i82596_probe()
1068 if (!dma) { i82596_probe()
1076 memset(dma, 0, sizeof(struct i596_dma)); i82596_probe()
1077 lp->dma = dma; i82596_probe()
1079 dma->scb.command = 0; i82596_probe()
1080 dma->scb.cmd = I596_NULL; i82596_probe()
1081 dma->scb.rfd = I596_NULL; i82596_probe()
1084 DMA_WBACK_INV(dev, dma, sizeof(struct i596_dma)); i82596_probe()
1089 (void *)dma, lp->dma_addr); i82596_probe()
1097 "%s: dma at 0x%p (%d bytes), lp->scb at 0x%p\n", i82596_probe()
1098 dev->name, dma, (int)sizeof(struct i596_dma), i82596_probe()
1099 &dma->scb)); i82596_probe()
1117 struct i596_dma *dma; i596_interrupt() local
1121 dma = lp->dma; i596_interrupt()
1125 wait_cmd(dev, dma, 100, "i596 interrupt, timeout"); i596_interrupt()
1126 status = SWAP16(dma->scb.status); i596_interrupt()
1258 dma->scb.cmd = SWAP32(virt_to_dma(lp, &lp->cmd_head->status)); i596_interrupt()
1259 DMA_WBACK_INV(dev, &dma->scb, sizeof(struct i596_scb)); i596_interrupt()
1282 wait_cmd(dev, dma, 100, "i596 interrupt, timeout"); i596_interrupt()
1283 dma->scb.command = SWAP16(ack_cmd); i596_interrupt()
1284 DMA_WBACK(dev, &dma->scb, sizeof(struct i596_scb)); i596_interrupt()
1292 wait_cmd(dev, dma, 100, "i596 interrupt, exit timeout"); i596_interrupt()
1309 dev->name, SWAP16(lp->dma->scb.status))); i596_close()
1313 wait_cmd(dev, lp->dma, 100, "close1 timed out"); i596_close()
1314 lp->dma->scb.command = SWAP16(CUC_ABORT | RX_ABORT); i596_close()
1315 DMA_WBACK(dev, &lp->dma->scb, sizeof(struct i596_scb)); i596_close()
1319 wait_cmd(dev, lp->dma, 100, "close2 timed out"); i596_close()
1337 struct i596_dma *dma = lp->dma; set_multicast_list() local
1348 !(dma->cf_cmd.i596_config[8] & 0x01)) { set_multicast_list()
1349 dma->cf_cmd.i596_config[8] |= 0x01; set_multicast_list()
1353 (dma->cf_cmd.i596_config[8] & 0x01)) { set_multicast_list()
1354 dma->cf_cmd.i596_config[8] &= ~0x01; set_multicast_list()
1358 (dma->cf_cmd.i596_config[11] & 0x20)) { set_multicast_list()
1359 dma->cf_cmd.i596_config[11] &= ~0x20; set_multicast_list()
1363 !(dma->cf_cmd.i596_config[11] & 0x20)) { set_multicast_list()
1364 dma->cf_cmd.i596_config[11] |= 0x20; set_multicast_list()
1368 if (dma->cf_cmd.cmd.command) set_multicast_list()
1373 dma->cf_cmd.cmd.command = SWAP16(CmdConfigure); set_multicast_list()
1374 DMA_WBACK_INV(dev, &dma->cf_cmd, sizeof(struct cf_cmd)); set_multicast_list()
1375 i596_add_cmd(dev, &dma->cf_cmd.cmd); set_multicast_list()
1391 cmd = &dma->mc_cmd; set_multicast_list()
1406 DMA_WBACK_INV(dev, &dma->mc_cmd, sizeof(struct mc_cmd));
/linux-4.1.27/sound/soc/atmel/
H A DMakefile3 snd-soc-atmel-pcm-dma-objs := atmel-pcm-dma.o
7 obj-$(CONFIG_SND_ATMEL_SOC_DMA) += snd-soc-atmel-pcm-dma.o
/linux-4.1.27/sound/soc/sh/
H A DMakefile2 snd-soc-dma-sh7760-objs := dma-sh7760.o
3 obj-$(CONFIG_SND_SOC_PCM_SH7760) += snd-soc-dma-sh7760.o
/linux-4.1.27/drivers/media/platform/s5p-mfc/
H A Ds5p_mfc_opr.c46 b->virt = dma_alloc_coherent(dev, b->size, &b->dma, GFP_KERNEL); s5p_mfc_alloc_priv_buf()
53 mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma); s5p_mfc_alloc_priv_buf()
61 dma_free_coherent(dev, b->size, b->virt, b->dma); s5p_mfc_release_priv_buf()
63 b->dma = 0; s5p_mfc_release_priv_buf()
/linux-4.1.27/arch/unicore32/mm/
H A DMakefile8 obj-$(CONFIG_SWIOTLB) += dma-swiotlb.o
H A Ddma-swiotlb.c14 #include <linux/dma-mapping.h>
18 #include <asm/dma.h>
/linux-4.1.27/arch/blackfin/include/asm/
H A Dpci.h7 #include <asm-generic/pci-dma-compat.h>
/linux-4.1.27/arch/c6x/kernel/
H A DMakefile10 obj-y += soc.o dma.o
/linux-4.1.27/arch/cris/arch-v10/kernel/
H A DMakefile10 dma.o io_interface_mux.o
/linux-4.1.27/arch/frv/mm/
H A DMakefile9 mmu-context.o dma-alloc.o elf-fdpic.o
/linux-4.1.27/arch/ia64/include/asm/
H A Dswiotlb.h4 #include <linux/dma-mapping.h>
/linux-4.1.27/arch/unicore32/include/asm/
H A Ddma.h2 * linux/arch/unicore32/include/asm/dma.h
17 #include <asm-generic/dma.h>
/linux-4.1.27/arch/mips/bmips/
H A Ddma.c9 #define pr_fmt(fmt) "bmips-dma: " fmt
12 #include <linux/dma-direction.h>
13 #include <linux/dma-mapping.h>
20 #include <dma-coherence.h>
27 * If the "brcm,ubus" node has a "dma-ranges" property we will enable this
29 * very limited subset of "dma-ranges" support and it will probably be
88 data = of_get_property(np, "dma-ranges", &len); bmips_init_dma_ranges()
113 pr_err("error parsing dma-ranges property\n"); bmips_init_dma_ranges()
/linux-4.1.27/arch/arm/mach-pxa/include/mach/
H A Ddma.h2 * arch/arm/mach-pxa/include/mach/dma.h
20 #include <plat/dma.h>
/linux-4.1.27/sound/pci/au88x0/
H A Dau88x0_pcm.c240 int dma, type = VORTEX_PCM_TYPE(substream->pcm); snd_vortex_pcm_hw_params() local
243 vortex_adb_allocroute(chip, stream->dma, snd_vortex_pcm_hw_params()
248 dma = snd_vortex_pcm_hw_params()
253 if (dma < 0) { snd_vortex_pcm_hw_params()
255 return dma; snd_vortex_pcm_hw_params()
257 stream = substream->runtime->private_data = &chip->dma_adb[dma]; snd_vortex_pcm_hw_params()
260 vortex_adbdma_setbuffers(chip, dma, snd_vortex_pcm_hw_params()
277 stream->dma = substream->number; snd_vortex_pcm_hw_params()
304 vortex_adb_allocroute(chip, stream->dma, snd_vortex_pcm_hw_free()
313 vortex_wt_allocroute(chip, stream->dma, 0); snd_vortex_pcm_hw_free()
328 int dma = stream->dma, fmt, dir; snd_vortex_pcm_prepare() local
338 vortex_adbdma_setmode(chip, dma, 1, dir, fmt, snd_vortex_pcm_prepare()
340 vortex_adbdma_setstartbuffer(chip, dma, 0); snd_vortex_pcm_prepare()
342 vortex_adb_setsrc(chip, dma, runtime->rate, dir); snd_vortex_pcm_prepare()
346 vortex_wtdma_setmode(chip, dma, 1, fmt, 0, 0); snd_vortex_pcm_prepare()
348 vortex_wtdma_setstartbuffer(chip, dma, 0); snd_vortex_pcm_prepare()
360 int dma = stream->dma; snd_vortex_pcm_trigger() local
366 //printk(KERN_INFO "vortex: start %d\n", dma); snd_vortex_pcm_trigger()
369 vortex_adbdma_resetup(chip, dma); snd_vortex_pcm_trigger()
370 vortex_adbdma_startfifo(chip, dma); snd_vortex_pcm_trigger()
374 dev_info(chip->card->dev, "wt start %d\n", dma); snd_vortex_pcm_trigger()
375 vortex_wtdma_startfifo(chip, dma); snd_vortex_pcm_trigger()
381 //printk(KERN_INFO "vortex: stop %d\n", dma); snd_vortex_pcm_trigger()
384 vortex_adbdma_stopfifo(chip, dma); snd_vortex_pcm_trigger()
387 dev_info(chip->card->dev, "wt stop %d\n", dma); snd_vortex_pcm_trigger()
388 vortex_wtdma_stopfifo(chip, dma); snd_vortex_pcm_trigger()
393 //printk(KERN_INFO "vortex: pause %d\n", dma); snd_vortex_pcm_trigger()
395 vortex_adbdma_pausefifo(chip, dma); snd_vortex_pcm_trigger()
398 vortex_wtdma_pausefifo(chip, dma); snd_vortex_pcm_trigger()
402 //printk(KERN_INFO "vortex: resume %d\n", dma); snd_vortex_pcm_trigger()
404 vortex_adbdma_resumefifo(chip, dma); snd_vortex_pcm_trigger()
407 vortex_wtdma_resumefifo(chip, dma); snd_vortex_pcm_trigger()
423 int dma = stream->dma; snd_vortex_pcm_pointer() local
428 current_ptr = vortex_adbdma_getlinearpos(chip, dma); snd_vortex_pcm_pointer()
431 current_ptr = vortex_wtdma_getlinearpos(chip, dma); snd_vortex_pcm_pointer()
577 switch (vortex->dma_adb[p->dma].nr_ch) { snd_vortex_pcm_vol_put()
625 * same dma engine. WT uses it own separate dma engine which can't capture. */ snd_vortex_new_pcm()
688 chip->pcm_vol[i].dma = -1; snd_vortex_new_pcm()
H A Dau88x0_core.c1084 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_setfirstbuffer() local
1087 dma->dma_ctrl); vortex_adbdma_setfirstbuffer()
1092 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_setstartbuffer() local
1096 dma->period_real = dma->period_virt = sb; vortex_adbdma_setstartbuffer()
1103 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_setbuffers() local
1105 dma->period_bytes = psize; vortex_adbdma_setbuffers()
1106 dma->nr_periods = count; vortex_adbdma_setbuffers()
1108 dma->cfg0 = 0; vortex_adbdma_setbuffers()
1109 dma->cfg1 = 0; vortex_adbdma_setbuffers()
1114 dma->cfg1 |= 0x88000000 | 0x44000000 | 0x30000000 | (psize - 1); vortex_adbdma_setbuffers()
1117 snd_pcm_sgbuf_get_addr(dma->substream, psize * 3)); vortex_adbdma_setbuffers()
1120 dma->cfg0 |= 0x12000000; vortex_adbdma_setbuffers()
1121 dma->cfg1 |= 0x80000000 | 0x40000000 | ((psize - 1) << 0xc); vortex_adbdma_setbuffers()
1124 snd_pcm_sgbuf_get_addr(dma->substream, psize * 2)); vortex_adbdma_setbuffers()
1127 dma->cfg0 |= 0x88000000 | 0x44000000 | 0x10000000 | (psize - 1); vortex_adbdma_setbuffers()
1130 snd_pcm_sgbuf_get_addr(dma->substream, psize)); vortex_adbdma_setbuffers()
1133 dma->cfg0 |= 0x80000000 | 0x40000000 | ((psize - 1) << 0xc); vortex_adbdma_setbuffers()
1136 snd_pcm_sgbuf_get_addr(dma->substream, 0)); vortex_adbdma_setbuffers()
1141 dma->cfg0, dma->cfg1); vortex_adbdma_setbuffers()
1143 hwwrite(vortex->mmio, VORTEX_ADBDMA_BUFCFG0 + (adbdma << 3), dma->cfg0); vortex_adbdma_setbuffers()
1144 hwwrite(vortex->mmio, VORTEX_ADBDMA_BUFCFG1 + (adbdma << 3), dma->cfg1); vortex_adbdma_setbuffers()
1154 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_setmode() local
1156 dma->dma_unknown = stereo; vortex_adbdma_setmode()
1157 dma->dma_ctrl = vortex_adbdma_setmode()
1158 ((offset & OFFSET_MASK) | (dma->dma_ctrl & ~OFFSET_MASK)); vortex_adbdma_setmode()
1160 dma->dma_ctrl = vortex_adbdma_setmode()
1161 (dma->dma_ctrl & ~IE_MASK) | ((ie << IE_SHIFT) & IE_MASK); vortex_adbdma_setmode()
1163 dma->dma_ctrl = vortex_adbdma_setmode()
1164 (dma->dma_ctrl & ~DIR_MASK) | ((dir << DIR_SHIFT) & DIR_MASK); vortex_adbdma_setmode()
1165 dma->dma_ctrl = vortex_adbdma_setmode()
1166 (dma->dma_ctrl & ~FMT_MASK) | ((fmt << FMT_SHIFT) & FMT_MASK); vortex_adbdma_setmode()
1169 dma->dma_ctrl); vortex_adbdma_setmode()
1175 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_bufshift() local
1181 if (dma->nr_periods >= 4) vortex_adbdma_bufshift()
1182 delta = (page - dma->period_real) & 3; vortex_adbdma_bufshift()
1184 delta = (page - dma->period_real); vortex_adbdma_bufshift()
1186 delta += dma->nr_periods; vortex_adbdma_bufshift()
1192 if (dma->nr_periods > 4) { vortex_adbdma_bufshift()
1195 p = dma->period_virt + i + 4; vortex_adbdma_bufshift()
1196 if (p >= dma->nr_periods) vortex_adbdma_bufshift()
1197 p -= dma->nr_periods; vortex_adbdma_bufshift()
1199 pp = dma->period_real + i; vortex_adbdma_bufshift()
1202 //hwwrite(vortex->mmio, VORTEX_ADBDMA_BUFBASE+(((adbdma << 2)+pp) << 2), dma->table[p].addr); vortex_adbdma_bufshift()
1205 snd_pcm_sgbuf_get_addr(dma->substream, vortex_adbdma_bufshift()
1206 dma->period_bytes * p)); vortex_adbdma_bufshift()
1212 dma->period_virt += delta; vortex_adbdma_bufshift()
1213 dma->period_real = page; vortex_adbdma_bufshift()
1214 if (dma->period_virt >= dma->nr_periods) vortex_adbdma_bufshift()
1215 dma->period_virt -= dma->nr_periods; vortex_adbdma_bufshift()
1219 adbdma, dma->period_virt, dma->period_real, delta); vortex_adbdma_bufshift()
1226 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_resetup() local
1230 for (i=0 ; i < 4 && i < dma->nr_periods; i++) { vortex_adbdma_resetup()
1232 p = dma->period_virt + i; vortex_adbdma_resetup()
1233 if (p >= dma->nr_periods) vortex_adbdma_resetup()
1234 p -= dma->nr_periods; vortex_adbdma_resetup()
1236 pp = dma->period_real + i; vortex_adbdma_resetup()
1237 if (dma->nr_periods < 4) { vortex_adbdma_resetup()
1238 if (pp >= dma->nr_periods) vortex_adbdma_resetup()
1239 pp -= dma->nr_periods; vortex_adbdma_resetup()
1247 snd_pcm_sgbuf_get_addr(dma->substream, vortex_adbdma_resetup()
1248 dma->period_bytes * p)); vortex_adbdma_resetup()
1256 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_getlinearpos() local
1261 if (dma->nr_periods >= 4) vortex_adbdma_getlinearpos()
1262 delta = (page - dma->period_real) & 3; vortex_adbdma_getlinearpos()
1264 delta = (page - dma->period_real); vortex_adbdma_getlinearpos()
1266 delta += dma->nr_periods; vortex_adbdma_getlinearpos()
1268 return (dma->period_virt + delta) * dma->period_bytes vortex_adbdma_getlinearpos()
1269 + (temp & (dma->period_bytes - 1)); vortex_adbdma_getlinearpos()
1275 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_startfifo() local
1277 switch (dma->fifo_status) { vortex_adbdma_startfifo()
1280 dma->fifo_enabled ? 1 : 0); vortex_adbdma_startfifo()
1285 dma->dma_ctrl); vortex_adbdma_startfifo()
1286 vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown, vortex_adbdma_startfifo()
1288 dma->fifo_enabled ? 1 : 0, 0); vortex_adbdma_startfifo()
1291 vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown, vortex_adbdma_startfifo()
1293 dma->fifo_enabled ? 1 : 0, 0); vortex_adbdma_startfifo()
1296 dma->fifo_status = FIFO_START; vortex_adbdma_startfifo()
1301 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_resumefifo() local
1304 switch (dma->fifo_status) { vortex_adbdma_resumefifo()
1307 dma->dma_ctrl); vortex_adbdma_resumefifo()
1308 vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown, vortex_adbdma_resumefifo()
1310 dma->fifo_enabled ? 1 : 0, 0); vortex_adbdma_resumefifo()
1313 vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown, vortex_adbdma_resumefifo()
1315 dma->fifo_enabled ? 1 : 0, 0); vortex_adbdma_resumefifo()
1318 dma->fifo_status = FIFO_START; vortex_adbdma_resumefifo()
1323 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_pausefifo() local
1326 switch (dma->fifo_status) { vortex_adbdma_pausefifo()
1328 vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown, vortex_adbdma_pausefifo()
1333 dma->dma_ctrl); vortex_adbdma_pausefifo()
1334 vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown, vortex_adbdma_pausefifo()
1338 dma->fifo_status = FIFO_PAUSE; vortex_adbdma_pausefifo()
1343 stream_t *dma = &vortex->dma_adb[adbdma]; vortex_adbdma_stopfifo() local
1346 if (dma->fifo_status == FIFO_START) vortex_adbdma_stopfifo()
1347 vortex_fifo_setadbctrl(vortex, adbdma, dma->dma_unknown, vortex_adbdma_stopfifo()
1349 else if (dma->fifo_status == FIFO_STOP) vortex_adbdma_stopfifo()
1351 dma->fifo_status = FIFO_STOP; vortex_adbdma_stopfifo()
1352 dma->fifo_enabled = 0; vortex_adbdma_stopfifo()
1361 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_setfirstbuffer() local
1363 hwwrite(vortex->mmio, VORTEX_WTDMA_CTRL + (wtdma << 2), dma->dma_ctrl); vortex_wtdma_setfirstbuffer()
1368 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_setstartbuffer() local
1372 dma->period_real = dma->period_virt = sb; vortex_wtdma_setstartbuffer()
1379 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_setbuffers() local
1381 dma->period_bytes = psize; vortex_wtdma_setbuffers()
1382 dma->nr_periods = count; vortex_wtdma_setbuffers()
1384 dma->cfg0 = 0; vortex_wtdma_setbuffers()
1385 dma->cfg1 = 0; vortex_wtdma_setbuffers()
1390 dma->cfg1 |= 0x88000000 | 0x44000000 | 0x30000000 | (psize-1); vortex_wtdma_setbuffers()
1392 snd_pcm_sgbuf_get_addr(dma->substream, psize * 3)); vortex_wtdma_setbuffers()
1395 dma->cfg0 |= 0x12000000; vortex_wtdma_setbuffers()
1396 dma->cfg1 |= 0x80000000 | 0x40000000 | ((psize-1) << 0xc); vortex_wtdma_setbuffers()
1398 snd_pcm_sgbuf_get_addr(dma->substream, psize * 2)); vortex_wtdma_setbuffers()
1401 dma->cfg0 |= 0x88000000 | 0x44000000 | 0x10000000 | (psize-1); vortex_wtdma_setbuffers()
1403 snd_pcm_sgbuf_get_addr(dma->substream, psize)); vortex_wtdma_setbuffers()
1406 dma->cfg0 |= 0x80000000 | 0x40000000 | ((psize-1) << 0xc); vortex_wtdma_setbuffers()
1408 snd_pcm_sgbuf_get_addr(dma->substream, 0)); vortex_wtdma_setbuffers()
1411 hwwrite(vortex->mmio, VORTEX_WTDMA_BUFCFG0 + (wtdma << 3), dma->cfg0); vortex_wtdma_setbuffers()
1412 hwwrite(vortex->mmio, VORTEX_WTDMA_BUFCFG1 + (wtdma << 3), dma->cfg1); vortex_wtdma_setbuffers()
1422 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_setmode() local
1424 //dma->this_08 = e; vortex_wtdma_setmode()
1425 dma->dma_unknown = d; vortex_wtdma_setmode()
1426 dma->dma_ctrl = 0; vortex_wtdma_setmode()
1427 dma->dma_ctrl = vortex_wtdma_setmode()
1428 ((offset & OFFSET_MASK) | (dma->dma_ctrl & ~OFFSET_MASK)); vortex_wtdma_setmode()
1430 dma->dma_ctrl = vortex_wtdma_setmode()
1431 (dma->dma_ctrl & ~IE_MASK) | ((ie << IE_SHIFT) & IE_MASK); vortex_wtdma_setmode()
1433 dma->dma_ctrl |= (1 << DIR_SHIFT); vortex_wtdma_setmode()
1435 dma->dma_ctrl = vortex_wtdma_setmode()
1436 (dma->dma_ctrl & FMT_MASK) | ((fmt << FMT_SHIFT) & FMT_MASK); vortex_wtdma_setmode()
1438 hwwrite(vortex->mmio, VORTEX_WTDMA_CTRL + (wtdma << 2), dma->dma_ctrl); vortex_wtdma_setmode()
1443 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_bufshift() local
1450 if (dma->nr_periods >= 4) vortex_wtdma_bufshift()
1451 delta = (page - dma->period_real) & 3; vortex_wtdma_bufshift()
1453 delta = (page - dma->period_real); vortex_wtdma_bufshift()
1455 delta += dma->nr_periods; vortex_wtdma_bufshift()
1461 if (dma->nr_periods > 4) { vortex_wtdma_bufshift()
1464 p = dma->period_virt + i + 4; vortex_wtdma_bufshift()
1465 if (p >= dma->nr_periods) vortex_wtdma_bufshift()
1466 p -= dma->nr_periods; vortex_wtdma_bufshift()
1468 pp = dma->period_real + i; vortex_wtdma_bufshift()
1474 snd_pcm_sgbuf_get_addr(dma->substream, vortex_wtdma_bufshift()
1475 dma->period_bytes * p)); vortex_wtdma_bufshift()
1481 dma->period_virt += delta; vortex_wtdma_bufshift()
1482 if (dma->period_virt >= dma->nr_periods) vortex_wtdma_bufshift()
1483 dma->period_virt -= dma->nr_periods; vortex_wtdma_bufshift()
1484 dma->period_real = page; vortex_wtdma_bufshift()
1488 dma->period_virt, delta); vortex_wtdma_bufshift()
1511 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_getlinearpos() local
1515 temp = (dma->period_virt * dma->period_bytes) + (temp & (dma->period_bytes - 1)); vortex_wtdma_getlinearpos()
1521 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_startfifo() local
1524 switch (dma->fifo_status) { vortex_wtdma_startfifo()
1527 dma->fifo_enabled ? 1 : 0); vortex_wtdma_startfifo()
1532 dma->dma_ctrl); vortex_wtdma_startfifo()
1533 vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown, vortex_wtdma_startfifo()
1535 dma->fifo_enabled ? 1 : 0, 0); vortex_wtdma_startfifo()
1538 vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown, vortex_wtdma_startfifo()
1540 dma->fifo_enabled ? 1 : 0, 0); vortex_wtdma_startfifo()
1543 dma->fifo_status = FIFO_START; vortex_wtdma_startfifo()
1548 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_resumefifo() local
1551 switch (dma->fifo_status) { vortex_wtdma_resumefifo()
1554 dma->dma_ctrl); vortex_wtdma_resumefifo()
1555 vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown, vortex_wtdma_resumefifo()
1557 dma->fifo_enabled ? 1 : 0, 0); vortex_wtdma_resumefifo()
1560 vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown, vortex_wtdma_resumefifo()
1562 dma->fifo_enabled ? 1 : 0, 0); vortex_wtdma_resumefifo()
1565 dma->fifo_status = FIFO_START; vortex_wtdma_resumefifo()
1570 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_pausefifo() local
1573 switch (dma->fifo_status) { vortex_wtdma_pausefifo()
1575 vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown, vortex_wtdma_pausefifo()
1580 dma->dma_ctrl); vortex_wtdma_pausefifo()
1581 vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown, vortex_wtdma_pausefifo()
1585 dma->fifo_status = FIFO_PAUSE; vortex_wtdma_pausefifo()
1590 stream_t *dma = &vortex->dma_wt[wtdma]; vortex_wtdma_stopfifo() local
1593 if (dma->fifo_status == FIFO_START) vortex_wtdma_stopfifo()
1594 vortex_fifo_setwtctrl(vortex, wtdma, dma->dma_unknown, vortex_wtdma_stopfifo()
1596 else if (dma->fifo_status == FIFO_STOP) vortex_wtdma_stopfifo()
1598 dma->fifo_status = FIFO_STOP; vortex_wtdma_stopfifo()
1599 dma->fifo_enabled = 0; vortex_wtdma_stopfifo()
2004 a DMA resource (root of all other resources of a dma channel).
2107 Allocate nr_ch pcm audio routes if dma < 0. If dma >= 0, existing routes
2109 dma: DMA engine routes to be deallocated when dma >= 0.
2113 Return: Return allocated DMA or same DMA passed as "dma" when dma >= 0.
2116 vortex_adb_allocroute(vortex_t *vortex, int dma, int nr_ch, int dir, vortex_adb_allocroute() argument
2123 if (dma >= 0) { vortex_adb_allocroute()
2126 vortex->dma_adb[dma].resources, en, vortex_adb_allocroute()
2130 if ((dma = vortex_adb_allocroute()
2136 stream = &vortex->dma_adb[dma]; vortex_adb_allocroute()
2137 stream->dma = dma; vortex_adb_allocroute()
2203 dma, vortex_adb_allocroute()
2225 dma, vortex_adb_allocroute()
2229 //vortex_route(vortex, en, 0x11, dma, ADB_XTALKIN(i?9:4)); vortex_adb_allocroute()
2234 ADB_DMA(stream->dma), vortex_adb_allocroute()
2257 p->dma = dma; vortex_adb_allocroute()
2268 ADB_DMA(stream->dma), vortex_adb_allocroute()
2316 src[0], dma); vortex_adb_allocroute()
2324 src[1], dma); vortex_adb_allocroute()
2327 vortex->dma_adb[dma].nr_ch = nr_ch; vortex_adb_allocroute()
2348 return dma; vortex_adb_allocroute()
2450 dev_err(vortex->card->dev, "IRQ dma error\n"); vortex_interrupt()
/linux-4.1.27/sound/oss/
H A Dpas2_card.c186 if (hw_config->dma < 0 || hw_config->dma > 7) config_pas_hw()
188 printk(KERN_ERR "PAS16: Invalid DMA selection %d", hw_config->dma); config_pas_hw()
189 hw_config->dma=-1; config_pas_hw()
194 pas_write(dma_bits[hw_config->dma], 0xF389); config_pas_hw()
195 if (!dma_bits[hw_config->dma]) config_pas_hw()
197 printk(KERN_ERR "PAS16: Invalid DMA selection %d", hw_config->dma); config_pas_hw()
198 hw_config->dma=-1; config_pas_hw()
203 if (sound_alloc_dma(hw_config->dma, "PAS16")) config_pas_hw()
206 hw_config->dma=-1; config_pas_hw()
261 if (!sb_dma_bits[sb_config->dma]) config_pas_hw()
262 printk(KERN_ERR "PAS16 Warning: Invalid SB DMA %d\n\n", sb_config->dma); config_pas_hw()
267 irq_dma = sb_dma_bits[sb_config->dma] | config_pas_hw()
363 if (hw_config->dma>0) unload_pas()
364 sound_free_dma(hw_config->dma); unload_pas()
378 static int __initdata dma = -1; variable
388 module_param(dma, int, 0);
408 cfg.dma = dma; init_pas2()
413 cfg2.dma = sb_dma; init_pas2()
416 if (cfg.io_base == -1 || cfg.dma == -1 || cfg.irq == -1) { init_pas2()
439 /* io, irq, dma, dma2, sb_io, sb_irq, sb_dma, sb_dma2 */ setup_pas2()
446 dma = ints[3]; setup_pas2()
H A Dsb_card.c45 static int __initdata dma = -1; variable
68 module_param(dma, int, 000);
69 MODULE_PARM_DESC(dma, "8-bit DMA channel (0,1,3)");
148 legacy->conf.dma = dma; sb_init_legacy()
154 legacy->mpucnf.dma = -1; sb_init_legacy()
171 scc->conf.dma = -1; sb_dev2cfg()
175 scc->mpucnf.dma = -1; sb_dev2cfg()
183 scc->conf.dma = pnp_dma(dev,0); sb_dev2cfg()
191 scc->conf.dma = pnp_dma(dev,0); sb_dev2cfg()
198 scc->conf.dma = pnp_dma(dev,0); sb_dev2cfg()
206 scc->conf.dma = pnp_dma(dev,0); sb_dev2cfg()
213 scc->conf.dma = pnp_dma(dev,0); sb_dev2cfg()
220 scc->conf.dma = pnp_dma(dev,0); sb_dev2cfg()
224 scc->conf.dma = pnp_dma(dev,1); sb_dev2cfg()
232 scc->conf.dma = pnp_dma(dev,1); sb_dev2cfg()
264 "dma=%d, dma16=%d\n", scc->conf.io_base, scc->conf.irq, sb_pnp_probe()
265 scc->conf.dma, scc->conf.dma2); sb_pnp_probe()
309 if(io != -1 && irq != -1 && dma != -1) { sb_init()
311 "irq=%d, dma=%d, dma16=%d\n",io, irq, dma, dma16); sb_init()
313 } else if((io != -1 || irq != -1 || dma != -1) || sb_init()
314 (!pnp && (io == -1 && irq == -1 && dma == -1))) sb_init()
315 printk(KERN_ERR "sb: Error: At least io, irq, and dma "\ sb_init()
/linux-4.1.27/drivers/usb/core/
H A Dbuffer.c14 #include <linux/dma-mapping.h>
52 * Call this as part of initializing a host controller that uses the dma
53 * memory allocators. It initializes some pools of dma-coherent memory that
114 dma_addr_t *dma hcd_buffer_alloc()
123 *dma = ~(dma_addr_t) 0; hcd_buffer_alloc()
129 return dma_pool_alloc(hcd->pool[i], mem_flags, dma); hcd_buffer_alloc()
131 return dma_alloc_coherent(hcd->self.controller, size, dma, mem_flags); hcd_buffer_alloc()
138 dma_addr_t dma hcd_buffer_free()
155 dma_pool_free(hcd->pool[i], addr, dma); hcd_buffer_free()
159 dma_free_coherent(hcd->self.controller, size, addr, dma); hcd_buffer_free()
/linux-4.1.27/drivers/usb/host/
H A Dohci-mem.c18 * device driver provides us with dma addresses
88 dma_addr_t dma; td_alloc() local
91 td = dma_pool_alloc (hc->td_cache, mem_flags, &dma); td_alloc()
95 td->hwNextTD = cpu_to_hc32 (hc, dma); td_alloc()
96 td->td_dma = dma; td_alloc()
122 dma_addr_t dma; ed_alloc() local
125 ed = dma_pool_alloc (hc->ed_cache, mem_flags, &dma); ed_alloc()
129 ed->dma = dma; ed_alloc()
137 dma_pool_free (hc->ed_cache, ed, ed->dma); ed_free()
H A Dxhci-dbg.c315 u64 addr = seg->dma; xhci_debug_segment()
331 xhci_dbg(xhci, "Ring deq = %p (virt), 0x%llx (dma)\n", xhci_dbg_ring_ptrs()
337 xhci_dbg(xhci, "Ring enq = %p (virt), 0x%llx (dma)\n", xhci_dbg_ring_ptrs()
423 static void dbg_rsvd64(struct xhci_hcd *xhci, u64 *ctx, dma_addr_t dma) dbg_rsvd64() argument
428 "(dma) %#08llx - rsvd64[%d]\n", dbg_rsvd64()
429 &ctx[4 + i], (unsigned long long)dma, dbg_rsvd64()
431 dma += 8; dbg_rsvd64()
461 dma_addr_t dma = ctx->dma + xhci_dbg_slot_ctx() local
466 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - dev_info\n", xhci_dbg_slot_ctx()
468 (unsigned long long)dma, slot_ctx->dev_info); xhci_dbg_slot_ctx()
469 dma += field_size; xhci_dbg_slot_ctx()
470 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - dev_info2\n", xhci_dbg_slot_ctx()
472 (unsigned long long)dma, slot_ctx->dev_info2); xhci_dbg_slot_ctx()
473 dma += field_size; xhci_dbg_slot_ctx()
474 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - tt_info\n", xhci_dbg_slot_ctx()
476 (unsigned long long)dma, slot_ctx->tt_info); xhci_dbg_slot_ctx()
477 dma += field_size; xhci_dbg_slot_ctx()
478 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - dev_state\n", xhci_dbg_slot_ctx()
480 (unsigned long long)dma, slot_ctx->dev_state); xhci_dbg_slot_ctx()
481 dma += field_size; xhci_dbg_slot_ctx()
483 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - rsvd[%d]\n", xhci_dbg_slot_ctx()
484 &slot_ctx->reserved[i], (unsigned long long)dma, xhci_dbg_slot_ctx()
486 dma += field_size; xhci_dbg_slot_ctx()
490 dbg_rsvd64(xhci, (u64 *)slot_ctx, dma); xhci_dbg_slot_ctx()
508 dma_addr_t dma = ctx->dma + xhci_dbg_ep_ctx() local
514 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - ep_info\n", xhci_dbg_ep_ctx()
516 (unsigned long long)dma, ep_ctx->ep_info); xhci_dbg_ep_ctx()
517 dma += field_size; xhci_dbg_ep_ctx()
518 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - ep_info2\n", xhci_dbg_ep_ctx()
520 (unsigned long long)dma, ep_ctx->ep_info2); xhci_dbg_ep_ctx()
521 dma += field_size; xhci_dbg_ep_ctx()
522 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08llx - deq\n", xhci_dbg_ep_ctx()
524 (unsigned long long)dma, ep_ctx->deq); xhci_dbg_ep_ctx()
525 dma += 2*field_size; xhci_dbg_ep_ctx()
526 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - tx_info\n", xhci_dbg_ep_ctx()
528 (unsigned long long)dma, ep_ctx->tx_info); xhci_dbg_ep_ctx()
529 dma += field_size; xhci_dbg_ep_ctx()
531 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - rsvd[%d]\n", xhci_dbg_ep_ctx()
533 (unsigned long long)dma, xhci_dbg_ep_ctx()
535 dma += field_size; xhci_dbg_ep_ctx()
539 dbg_rsvd64(xhci, (u64 *)ep_ctx, dma); xhci_dbg_ep_ctx()
550 dma_addr_t dma = ctx->dma; xhci_dbg_ctx() local
561 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - drop flags\n", xhci_dbg_ctx()
562 &ctrl_ctx->drop_flags, (unsigned long long)dma, xhci_dbg_ctx()
564 dma += field_size; xhci_dbg_ctx()
565 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - add flags\n", xhci_dbg_ctx()
566 &ctrl_ctx->add_flags, (unsigned long long)dma, xhci_dbg_ctx()
568 dma += field_size; xhci_dbg_ctx()
570 xhci_dbg(xhci, "@%p (virt) @%08llx (dma) %#08x - rsvd2[%d]\n", xhci_dbg_ctx()
571 &ctrl_ctx->rsvd2[i], (unsigned long long)dma, xhci_dbg_ctx()
573 dma += field_size; xhci_dbg_ctx()
577 dbg_rsvd64(xhci, (u64 *)ctrl_ctx, dma); xhci_dbg_ctx()
/linux-4.1.27/arch/frv/include/asm/
H A Ddma.h0 /* dma.h: FRV DMA controller management
19 #undef MAX_DMA_CHANNELS /* don't use kernel/dma.c */
46 extern void frv_dma_close(int dma);
48 extern void frv_dma_config(int dma, unsigned long ccfr, unsigned long cctr, unsigned long apr);
50 extern void frv_dma_start(int dma,
54 extern void frv_dma_restart_circular(int dma, unsigned long six);
56 extern void frv_dma_stop(int dma);
58 extern int is_frv_dma_interrupting(int dma);
60 extern void frv_dma_dump(int dma);
62 extern void frv_dma_status_clear(int dma);
/linux-4.1.27/mm/
H A Ddmapool.c26 #include <linux/dma-mapping.h>
59 dma_addr_t dma; member in struct:dma_page
111 * dma_pool_create - Creates a pool of consistent memory blocks, for dma.
119 * Returns a dma allocation pool with the requested characteristics, or
230 &page->dma, mem_flags); pool_alloc_page()
252 dma_addr_t dma = page->dma; pool_free_page() local
257 dma_free_coherent(pool->dev, pool->allocation, page->vaddr, dma); pool_free_page()
263 * dma_pool_destroy - destroys a pool of dma memory blocks.
264 * @pool: dma pool that will be destroyed
310 * @pool: dma pool that will produce the block
312 * @handle: pointer to dma address of block
315 * and reports its dma address through the handle.
349 *handle = offset + page->dma; dma_pool_alloc()
382 static struct dma_page *pool_find_page(struct dma_pool *pool, dma_addr_t dma) pool_find_page() argument
387 if (dma < page->dma) pool_find_page()
389 if (dma < (page->dma + pool->allocation)) pool_find_page()
396 * dma_pool_free - put block back into dma pool
397 * @pool: the dma pool holding the block
399 * @dma: dma address of block
404 void dma_pool_free(struct dma_pool *pool, void *vaddr, dma_addr_t dma) dma_pool_free() argument
411 page = pool_find_page(pool, dma); dma_pool_free()
416 "dma_pool_free %s, %p/%lx (bad dma)\n", dma_pool_free()
417 pool->name, vaddr, (unsigned long)dma); dma_pool_free()
419 printk(KERN_ERR "dma_pool_free %s, %p/%lx (bad dma)\n", dma_pool_free()
420 pool->name, vaddr, (unsigned long)dma); dma_pool_free()
426 if ((dma - page->dma) != offset) { dma_pool_free()
431 pool->name, vaddr, (unsigned long long)dma); dma_pool_free()
435 pool->name, vaddr, (unsigned long long)dma); dma_pool_free()
447 dev_err(pool->dev, "dma_pool_free %s, dma %Lx " dma_pool_free()
449 (unsigned long long)dma); dma_pool_free()
451 printk(KERN_ERR "dma_pool_free %s, dma %Lx " dma_pool_free()
453 (unsigned long long)dma); dma_pool_free()
519 * @pool: dma pool that will be destroyed
/linux-4.1.27/drivers/tty/serial/
H A Dsamsung.c32 #include <linux/dma-mapping.h>
164 struct s3c24xx_uart_dma *dma = ourport->dma; s3c24xx_serial_stop_tx() local
178 if (dma && dma->tx_chan && ourport->tx_in_progress == S3C24XX_TX_DMA) { s3c24xx_serial_stop_tx()
179 dmaengine_pause(dma->tx_chan); s3c24xx_serial_stop_tx()
180 dmaengine_tx_status(dma->tx_chan, dma->tx_cookie, &state); s3c24xx_serial_stop_tx()
181 dmaengine_terminate_all(dma->tx_chan); s3c24xx_serial_stop_tx()
183 dma->tx_transfer_addr, dma->tx_size, DMA_TO_DEVICE); s3c24xx_serial_stop_tx()
184 async_tx_ack(dma->tx_desc); s3c24xx_serial_stop_tx()
185 count = dma->tx_bytes_requested - state.residue; s3c24xx_serial_stop_tx()
206 struct s3c24xx_uart_dma *dma = ourport->dma; s3c24xx_serial_tx_dma_complete() local
212 dmaengine_tx_status(dma->tx_chan, dma->tx_cookie, &state); s3c24xx_serial_tx_dma_complete()
213 count = dma->tx_bytes_requested - state.residue; s3c24xx_serial_tx_dma_complete()
214 async_tx_ack(dma->tx_desc); s3c24xx_serial_tx_dma_complete()
216 dma_sync_single_for_cpu(ourport->port.dev, dma->tx_transfer_addr, s3c24xx_serial_tx_dma_complete()
217 dma->tx_size, DMA_TO_DEVICE); s3c24xx_serial_tx_dma_complete()
244 /* Enable tx dma mode */ enable_tx_dma()
292 struct s3c24xx_uart_dma *dma = ourport->dma; s3c24xx_serial_start_tx_dma() local
298 dma->tx_size = count & ~(dma_get_cache_alignment() - 1); s3c24xx_serial_start_tx_dma()
299 dma->tx_transfer_addr = dma->tx_addr + xmit->tail; s3c24xx_serial_start_tx_dma()
301 dma_sync_single_for_device(ourport->port.dev, dma->tx_transfer_addr, s3c24xx_serial_start_tx_dma()
302 dma->tx_size, DMA_TO_DEVICE); s3c24xx_serial_start_tx_dma()
304 dma->tx_desc = dmaengine_prep_slave_single(dma->tx_chan, s3c24xx_serial_start_tx_dma()
305 dma->tx_transfer_addr, dma->tx_size, s3c24xx_serial_start_tx_dma()
307 if (!dma->tx_desc) { s3c24xx_serial_start_tx_dma()
312 dma->tx_desc->callback = s3c24xx_serial_tx_dma_complete; s3c24xx_serial_start_tx_dma()
313 dma->tx_desc->callback_param = ourport; s3c24xx_serial_start_tx_dma()
314 dma->tx_bytes_requested = dma->tx_size; s3c24xx_serial_start_tx_dma()
317 dma->tx_cookie = dmaengine_submit(dma->tx_desc); s3c24xx_serial_start_tx_dma()
318 dma_async_issue_pending(dma->tx_chan); s3c24xx_serial_start_tx_dma()
336 if (!ourport->dma || !ourport->dma->tx_chan || s3c24xx_serial_start_next_tx()
354 if (!ourport->dma || !ourport->dma->tx_chan) s3c24xx_serial_start_tx()
358 if (ourport->dma && ourport->dma->tx_chan) { s3c24xx_serial_start_tx()
367 struct s3c24xx_uart_dma *dma = ourport->dma; s3c24xx_uart_copy_rx_to_tty() local
373 dma_sync_single_for_cpu(ourport->port.dev, dma->rx_addr, s3c24xx_uart_copy_rx_to_tty()
374 dma->rx_size, DMA_FROM_DEVICE); s3c24xx_uart_copy_rx_to_tty()
382 ((unsigned char *)(ourport->dma->rx_buf)), count); s3c24xx_uart_copy_rx_to_tty()
418 struct s3c24xx_uart_dma *dma = ourport->dma; s3c24xx_serial_stop_rx() local
433 if (dma && dma->rx_chan) { s3c24xx_serial_stop_rx()
434 dmaengine_pause(dma->tx_chan); s3c24xx_serial_stop_rx()
435 dma_status = dmaengine_tx_status(dma->rx_chan, s3c24xx_serial_stop_rx()
436 dma->rx_cookie, &state); s3c24xx_serial_stop_rx()
439 received = dma->rx_bytes_requested - state.residue; s3c24xx_serial_stop_rx()
440 dmaengine_terminate_all(dma->rx_chan); s3c24xx_serial_stop_rx()
481 struct s3c24xx_uart_dma *dma = ourport->dma; s3c24xx_serial_rx_dma_complete() local
489 dmaengine_tx_status(dma->rx_chan, dma->rx_cookie, &state); s3c24xx_serial_rx_dma_complete()
490 received = dma->rx_bytes_requested - state.residue; s3c24xx_serial_rx_dma_complete()
491 async_tx_ack(dma->rx_desc); s3c24xx_serial_rx_dma_complete()
510 struct s3c24xx_uart_dma *dma = ourport->dma; s3c64xx_start_rx_dma() local
512 dma_sync_single_for_device(ourport->port.dev, dma->rx_addr, s3c64xx_start_rx_dma()
513 dma->rx_size, DMA_FROM_DEVICE); s3c64xx_start_rx_dma()
515 dma->rx_desc = dmaengine_prep_slave_single(dma->rx_chan, s3c64xx_start_rx_dma()
516 dma->rx_addr, dma->rx_size, DMA_DEV_TO_MEM, s3c64xx_start_rx_dma()
518 if (!dma->rx_desc) { s3c64xx_start_rx_dma()
523 dma->rx_desc->callback = s3c24xx_serial_rx_dma_complete; s3c64xx_start_rx_dma()
524 dma->rx_desc->callback_param = ourport; s3c64xx_start_rx_dma()
525 dma->rx_bytes_requested = dma->rx_size; s3c64xx_start_rx_dma()
527 dma->rx_cookie = dmaengine_submit(dma->rx_desc); s3c64xx_start_rx_dma()
528 dma_async_issue_pending(dma->rx_chan); s3c64xx_start_rx_dma()
582 struct s3c24xx_uart_dma *dma = ourport->dma; s3c24xx_serial_rx_chars_dma() local
601 dmaengine_pause(dma->rx_chan); s3c24xx_serial_rx_chars_dma()
602 dmaengine_tx_status(dma->rx_chan, dma->rx_cookie, &state); s3c24xx_serial_rx_chars_dma()
603 dmaengine_terminate_all(dma->rx_chan); s3c24xx_serial_rx_chars_dma()
604 received = dma->rx_bytes_requested - state.residue; s3c24xx_serial_rx_chars_dma()
721 if (ourport->dma && ourport->dma->rx_chan) s3c24xx_serial_rx_chars()
738 if (ourport->dma && ourport->dma->tx_chan && s3c24xx_serial_tx_chars()
880 struct s3c24xx_uart_dma *dma = p->dma; s3c24xx_serial_request_dma() local
885 dma->rx_conf.direction = DMA_DEV_TO_MEM; s3c24xx_serial_request_dma()
886 dma->rx_conf.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; s3c24xx_serial_request_dma()
887 dma->rx_conf.src_addr = p->port.mapbase + S3C2410_URXH; s3c24xx_serial_request_dma()
888 dma->rx_conf.src_maxburst = 16; s3c24xx_serial_request_dma()
890 dma->tx_conf.direction = DMA_MEM_TO_DEV; s3c24xx_serial_request_dma()
891 dma->tx_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; s3c24xx_serial_request_dma()
892 dma->tx_conf.dst_addr = p->port.mapbase + S3C2410_UTXH; s3c24xx_serial_request_dma()
894 dma->tx_conf.dst_maxburst = 16; s3c24xx_serial_request_dma()
896 dma->tx_conf.dst_maxburst = 1; s3c24xx_serial_request_dma()
901 dma->rx_chan = dma_request_slave_channel_compat(mask, dma->fn, s3c24xx_serial_request_dma()
902 dma->rx_param, p->port.dev, "rx"); s3c24xx_serial_request_dma()
903 if (!dma->rx_chan) s3c24xx_serial_request_dma()
906 dmaengine_slave_config(dma->rx_chan, &dma->rx_conf); s3c24xx_serial_request_dma()
908 dma->tx_chan = dma_request_slave_channel_compat(mask, dma->fn, s3c24xx_serial_request_dma()
909 dma->tx_param, p->port.dev, "tx"); s3c24xx_serial_request_dma()
910 if (!dma->tx_chan) { s3c24xx_serial_request_dma()
911 dma_release_channel(dma->rx_chan); s3c24xx_serial_request_dma()
915 dmaengine_slave_config(dma->tx_chan, &dma->tx_conf); s3c24xx_serial_request_dma()
918 dma->rx_size = PAGE_SIZE; s3c24xx_serial_request_dma()
920 dma->rx_buf = kmalloc(dma->rx_size, GFP_KERNEL); s3c24xx_serial_request_dma()
922 if (!dma->rx_buf) { s3c24xx_serial_request_dma()
923 dma_release_channel(dma->rx_chan); s3c24xx_serial_request_dma()
924 dma_release_channel(dma->tx_chan); s3c24xx_serial_request_dma()
928 dma->rx_addr = dma_map_single(dma->rx_chan->device->dev, dma->rx_buf, s3c24xx_serial_request_dma()
929 dma->rx_size, DMA_FROM_DEVICE); s3c24xx_serial_request_dma()
934 dma->tx_addr = dma_map_single(dma->tx_chan->device->dev, s3c24xx_serial_request_dma()
945 struct s3c24xx_uart_dma *dma = p->dma; s3c24xx_serial_release_dma() local
947 if (dma->rx_chan) { s3c24xx_serial_release_dma()
948 dmaengine_terminate_all(dma->rx_chan); s3c24xx_serial_release_dma()
949 dma_unmap_single(dma->rx_chan->device->dev, dma->rx_addr, s3c24xx_serial_release_dma()
950 dma->rx_size, DMA_FROM_DEVICE); s3c24xx_serial_release_dma()
951 kfree(dma->rx_buf); s3c24xx_serial_release_dma()
952 dma_release_channel(dma->rx_chan); s3c24xx_serial_release_dma()
953 dma->rx_chan = NULL; s3c24xx_serial_release_dma()
956 if (dma->tx_chan) { s3c24xx_serial_release_dma()
957 dmaengine_terminate_all(dma->tx_chan); s3c24xx_serial_release_dma()
958 dma_unmap_single(dma->tx_chan->device->dev, dma->tx_addr, s3c24xx_serial_release_dma()
960 dma_release_channel(dma->tx_chan); s3c24xx_serial_release_dma()
961 dma->tx_chan = NULL; s3c24xx_serial_release_dma()
992 if (ourport->dma) s3c24xx_serial_shutdown()
1055 if (ourport->dma) { s3c64xx_serial_startup()
1755 ourport->dma = devm_kzalloc(port->dev, s3c24xx_serial_init_port()
1756 sizeof(*ourport->dma), s3c24xx_serial_init_port()
1758 if (!ourport->dma) s3c24xx_serial_init_port()
/linux-4.1.27/arch/sh/drivers/dma/
H A Ddma-pvr2.c2 * arch/sh/drivers/dma/dma-pvr2.c
17 #include <mach/dma.h>
18 #include <asm/dma.h>
33 pr_debug("Got a pvr2 dma interrupt for channel %d\n", pvr2_dma_interrupt()
H A Ddma-sysfs.c2 * arch/sh/drivers/dma/dma-sysfs.c
19 #include <asm/dma.h>
22 .name = "dma",
23 .dev_name = "dma",
148 snprintf(name, sizeof(name), "dma%d", chan->chan); dma_create_sysfs_files()
163 snprintf(name, sizeof(name), "dma%d", chan->chan); dma_remove_sysfs_files()
/linux-4.1.27/arch/arm/mach-davinci/
H A Dsram.c26 void *sram_alloc(size_t len, dma_addr_t *dma) sram_alloc() argument
30 if (dma) sram_alloc()
31 *dma = 0; sram_alloc()
32 if (!sram_pool || (dma && !dma_base)) sram_alloc()
35 return gen_pool_dma_alloc(sram_pool, len, dma); sram_alloc()
/linux-4.1.27/drivers/net/wireless/b43legacy/
H A Ddma.c31 #include "dma.h"
36 #include <linux/dma-mapping.h>
75 addr |= ring->dev->dma.translation; op32_fill_descriptor()
199 return dev->dma.tx_ring1; priority_to_txring()
207 ring = dev->dma.tx_ring3; priority_to_txring()
210 ring = dev->dma.tx_ring2; priority_to_txring()
213 ring = dev->dma.tx_ring1; priority_to_txring()
216 ring = dev->dma.tx_ring0; priority_to_txring()
219 ring = dev->dma.tx_ring4; priority_to_txring()
222 ring = dev->dma.tx_ring5; priority_to_txring()
545 u32 trans = ring->dev->dma.translation; dmacontroller_setup()
679 /* test for ability to dma to txhdr_cache */ b43legacy_setup_dmaring()
775 struct b43legacy_dma *dma; b43legacy_dma_free() local
779 dma = &dev->dma; b43legacy_dma_free()
781 b43legacy_destroy_dmaring(dma->rx_ring3); b43legacy_dma_free()
782 dma->rx_ring3 = NULL; b43legacy_dma_free()
783 b43legacy_destroy_dmaring(dma->rx_ring0); b43legacy_dma_free()
784 dma->rx_ring0 = NULL; b43legacy_dma_free()
786 b43legacy_destroy_dmaring(dma->tx_ring5); b43legacy_dma_free()
787 dma->tx_ring5 = NULL; b43legacy_dma_free()
788 b43legacy_destroy_dmaring(dma->tx_ring4); b43legacy_dma_free()
789 dma->tx_ring4 = NULL; b43legacy_dma_free()
790 b43legacy_destroy_dmaring(dma->tx_ring3); b43legacy_dma_free()
791 dma->tx_ring3 = NULL; b43legacy_dma_free()
792 b43legacy_destroy_dmaring(dma->tx_ring2); b43legacy_dma_free()
793 dma->tx_ring2 = NULL; b43legacy_dma_free()
794 b43legacy_destroy_dmaring(dma->tx_ring1); b43legacy_dma_free()
795 dma->tx_ring1 = NULL; b43legacy_dma_free()
796 b43legacy_destroy_dmaring(dma->tx_ring0); b43legacy_dma_free()
797 dma->tx_ring0 = NULL; b43legacy_dma_free()
839 struct b43legacy_dma *dma = &dev->dma; b43legacy_dma_init() local
860 dma->translation = ssb_dma_translation(dev->dev); b43legacy_dma_init()
867 dma->tx_ring0 = ring; b43legacy_dma_init()
872 dma->tx_ring1 = ring; b43legacy_dma_init()
877 dma->tx_ring2 = ring; b43legacy_dma_init()
882 dma->tx_ring3 = ring; b43legacy_dma_init()
887 dma->tx_ring4 = ring; b43legacy_dma_init()
892 dma->tx_ring5 = ring; b43legacy_dma_init()
898 dma->rx_ring0 = ring; b43legacy_dma_init()
904 dma->rx_ring3 = ring; b43legacy_dma_init()
913 b43legacy_destroy_dmaring(dma->rx_ring0); b43legacy_dma_init()
914 dma->rx_ring0 = NULL; b43legacy_dma_init()
916 b43legacy_destroy_dmaring(dma->tx_ring5); b43legacy_dma_init()
917 dma->tx_ring5 = NULL; b43legacy_dma_init()
919 b43legacy_destroy_dmaring(dma->tx_ring4); b43legacy_dma_init()
920 dma->tx_ring4 = NULL; b43legacy_dma_init()
922 b43legacy_destroy_dmaring(dma->tx_ring3); b43legacy_dma_init()
923 dma->tx_ring3 = NULL; b43legacy_dma_init()
925 b43legacy_destroy_dmaring(dma->tx_ring2); b43legacy_dma_init()
926 dma->tx_ring2 = NULL; b43legacy_dma_init()
928 b43legacy_destroy_dmaring(dma->tx_ring1); b43legacy_dma_init()
929 dma->tx_ring1 = NULL; b43legacy_dma_init()
931 b43legacy_destroy_dmaring(dma->tx_ring0); b43legacy_dma_init()
932 dma->tx_ring0 = NULL; b43legacy_dma_init()
979 struct b43legacy_dma *dma = &dev->dma; parse_cookie() local
984 ring = dma->tx_ring0; parse_cookie()
987 ring = dma->tx_ring1; parse_cookie()
990 ring = dma->tx_ring2; parse_cookie()
993 ring = dma->tx_ring3; parse_cookie()
996 ring = dma->tx_ring4; parse_cookie()
999 ring = dma->tx_ring5; parse_cookie()
1438 b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring0); b43legacy_dma_tx_suspend()
1439 b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring1); b43legacy_dma_tx_suspend()
1440 b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring2); b43legacy_dma_tx_suspend()
1441 b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring3); b43legacy_dma_tx_suspend()
1442 b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring4); b43legacy_dma_tx_suspend()
1443 b43legacy_dma_tx_suspend_ring(dev->dma.tx_ring5); b43legacy_dma_tx_suspend()
1448 b43legacy_dma_tx_resume_ring(dev->dma.tx_ring5); b43legacy_dma_tx_resume()
1449 b43legacy_dma_tx_resume_ring(dev->dma.tx_ring4); b43legacy_dma_tx_resume()
1450 b43legacy_dma_tx_resume_ring(dev->dma.tx_ring3); b43legacy_dma_tx_resume()
1451 b43legacy_dma_tx_resume_ring(dev->dma.tx_ring2); b43legacy_dma_tx_resume()
1452 b43legacy_dma_tx_resume_ring(dev->dma.tx_ring1); b43legacy_dma_tx_resume()
1453 b43legacy_dma_tx_resume_ring(dev->dma.tx_ring0); b43legacy_dma_tx_resume()
/linux-4.1.27/drivers/i2c/busses/
H A Di2c-imx.c38 #include <linux/dma-mapping.h>
211 struct imx_i2c_dma *dma; member in struct:imx_i2c_struct
286 struct imx_i2c_dma *dma; i2c_imx_dma_request() local
291 dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL); i2c_imx_dma_request()
292 if (!dma) i2c_imx_dma_request()
295 dma->chan_tx = dma_request_slave_channel(dev, "tx"); i2c_imx_dma_request()
296 if (!dma->chan_tx) { i2c_imx_dma_request()
306 ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig); i2c_imx_dma_request()
312 dma->chan_rx = dma_request_slave_channel(dev, "rx"); i2c_imx_dma_request()
313 if (!dma->chan_rx) { i2c_imx_dma_request()
323 ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig); i2c_imx_dma_request()
329 i2c_imx->dma = dma; i2c_imx_dma_request()
330 init_completion(&dma->cmd_complete); i2c_imx_dma_request()
332 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx)); i2c_imx_dma_request()
337 dma_release_channel(dma->chan_rx); i2c_imx_dma_request()
339 dma_release_channel(dma->chan_tx); i2c_imx_dma_request()
341 devm_kfree(dev, dma); i2c_imx_dma_request()
348 struct imx_i2c_dma *dma = i2c_imx->dma; i2c_imx_dma_callback() local
350 dma_unmap_single(dma->chan_using->device->dev, dma->dma_buf, i2c_imx_dma_callback()
351 dma->dma_len, dma->dma_data_dir); i2c_imx_dma_callback()
352 complete(&dma->cmd_complete); i2c_imx_dma_callback()
358 struct imx_i2c_dma *dma = i2c_imx->dma; i2c_imx_dma_xfer() local
361 struct device *chan_dev = dma->chan_using->device->dev; i2c_imx_dma_xfer()
363 dma->dma_buf = dma_map_single(chan_dev, msgs->buf, i2c_imx_dma_xfer()
364 dma->dma_len, dma->dma_data_dir); i2c_imx_dma_xfer()
365 if (dma_mapping_error(chan_dev, dma->dma_buf)) { i2c_imx_dma_xfer()
370 txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf, i2c_imx_dma_xfer()
371 dma->dma_len, dma->dma_transfer_dir, i2c_imx_dma_xfer()
385 dma_async_issue_pending(dma->chan_using); i2c_imx_dma_xfer()
390 dma_unmap_single(chan_dev, dma->dma_buf, i2c_imx_dma_xfer()
391 dma->dma_len, dma->dma_data_dir); i2c_imx_dma_xfer()
398 struct imx_i2c_dma *dma = i2c_imx->dma; i2c_imx_dma_free() local
400 dma->dma_buf = 0; i2c_imx_dma_free()
401 dma->dma_len = 0; i2c_imx_dma_free()
403 dma_release_channel(dma->chan_tx); i2c_imx_dma_free()
404 dma->chan_tx = NULL; i2c_imx_dma_free()
406 dma_release_channel(dma->chan_rx); i2c_imx_dma_free()
407 dma->chan_rx = NULL; i2c_imx_dma_free()
409 dma->chan_using = NULL; i2c_imx_dma_free()
558 if (i2c_imx->dma) i2c_imx_stop()
607 struct imx_i2c_dma *dma = i2c_imx->dma; i2c_imx_dma_write() local
610 dma->chan_using = dma->chan_tx; i2c_imx_dma_write()
611 dma->dma_transfer_dir = DMA_MEM_TO_DEV; i2c_imx_dma_write()
612 dma->dma_data_dir = DMA_TO_DEVICE; i2c_imx_dma_write()
613 dma->dma_len = msgs->len - 1; i2c_imx_dma_write()
627 reinit_completion(&i2c_imx->dma->cmd_complete); i2c_imx_dma_write()
629 &i2c_imx->dma->cmd_complete, i2c_imx_dma_write()
632 dmaengine_terminate_all(dma->chan_using); i2c_imx_dma_write()
670 struct imx_i2c_dma *dma = i2c_imx->dma; i2c_imx_dma_read() local
677 dma->chan_using = dma->chan_rx; i2c_imx_dma_read()
678 dma->dma_transfer_dir = DMA_DEV_TO_MEM; i2c_imx_dma_read()
679 dma->dma_data_dir = DMA_FROM_DEVICE; i2c_imx_dma_read()
681 dma->dma_len = msgs->len - 2; i2c_imx_dma_read()
686 reinit_completion(&i2c_imx->dma->cmd_complete); i2c_imx_dma_read()
688 &i2c_imx->dma->cmd_complete, i2c_imx_dma_read()
691 dmaengine_terminate_all(dma->chan_using); i2c_imx_dma_read()
820 if (i2c_imx->dma && msgs->len >= DMA_THRESHOLD && !block_data) i2c_imx_read()
940 if (i2c_imx->dma && msgs[i].len >= DMA_THRESHOLD) i2c_imx_xfer()
1087 if (i2c_imx->dma) i2c_imx_remove()
H A Di2c-at91.c22 #include <linux/dma-mapping.h>
33 #include <linux/platform_data/dma-atmel.h>
110 struct at91_twi_dma dma; member in struct:at91_twi_dev
177 struct at91_twi_dma *dma = &dev->dma; at91_twi_dma_cleanup() local
181 if (dma->xfer_in_progress) { at91_twi_dma_cleanup()
182 if (dma->direction == DMA_FROM_DEVICE) at91_twi_dma_cleanup()
183 dmaengine_terminate_all(dma->chan_rx); at91_twi_dma_cleanup()
185 dmaengine_terminate_all(dma->chan_tx); at91_twi_dma_cleanup()
186 dma->xfer_in_progress = false; at91_twi_dma_cleanup()
188 if (dma->buf_mapped) { at91_twi_dma_cleanup()
189 dma_unmap_single(dev->dev, sg_dma_address(&dma->sg), at91_twi_dma_cleanup()
190 dev->buf_len, dma->direction); at91_twi_dma_cleanup()
191 dma->buf_mapped = false; at91_twi_dma_cleanup()
217 dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg), at91_twi_write_data_dma_callback()
235 struct at91_twi_dma *dma = &dev->dma; at91_twi_write_data_dma() local
236 struct dma_chan *chan_tx = dma->chan_tx; at91_twi_write_data_dma()
241 dma->direction = DMA_TO_DEVICE; at91_twi_write_data_dma()
247 dev_err(dev->dev, "dma map failed\n"); at91_twi_write_data_dma()
250 dma->buf_mapped = true; at91_twi_write_data_dma()
252 sg_dma_len(&dma->sg) = dev->buf_len; at91_twi_write_data_dma()
253 sg_dma_address(&dma->sg) = dma_addr; at91_twi_write_data_dma()
255 txdesc = dmaengine_prep_slave_sg(chan_tx, &dma->sg, 1, DMA_MEM_TO_DEV, at91_twi_write_data_dma()
258 dev_err(dev->dev, "dma prep slave sg failed\n"); at91_twi_write_data_dma()
265 dma->xfer_in_progress = true; at91_twi_write_data_dma()
316 dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg), at91_twi_read_data_dma_callback()
319 /* The last two bytes have to be read without using dma */ at91_twi_read_data_dma_callback()
329 struct at91_twi_dma *dma = &dev->dma; at91_twi_read_data_dma() local
330 struct dma_chan *chan_rx = dma->chan_rx; at91_twi_read_data_dma()
332 dma->direction = DMA_FROM_DEVICE; at91_twi_read_data_dma()
334 /* Keep in mind that we won't use dma to read the last two bytes */ at91_twi_read_data_dma()
339 dev_err(dev->dev, "dma map failed\n"); at91_twi_read_data_dma()
342 dma->buf_mapped = true; at91_twi_read_data_dma()
344 dma->sg.dma_address = dma_addr; at91_twi_read_data_dma()
345 sg_dma_len(&dma->sg) = dev->buf_len - 2; at91_twi_read_data_dma()
347 rxdesc = dmaengine_prep_slave_sg(chan_rx, &dma->sg, 1, DMA_DEV_TO_MEM, at91_twi_read_data_dma()
350 dev_err(dev->dev, "dma prep slave sg failed\n"); at91_twi_read_data_dma()
357 dma->xfer_in_progress = true; at91_twi_read_data_dma()
359 dma_async_issue_pending(dma->chan_rx); at91_twi_read_data_dma()
447 * When using dma, the last byte has to be read manually in at91_do_twi_transfer()
450 * if you use the dma to read n-1 bytes because of latency. at91_do_twi_transfer()
451 * Reading n-2 bytes with dma and the two last ones manually at91_do_twi_transfer()
668 struct at91_twi_dma *dma = &dev->dma; at91_twi_configure_dma() local
679 dma->chan_tx = dma_request_slave_channel_reason(dev->dev, "tx"); at91_twi_configure_dma()
680 if (IS_ERR(dma->chan_tx)) { at91_twi_configure_dma()
681 ret = PTR_ERR(dma->chan_tx); at91_twi_configure_dma()
682 dma->chan_tx = NULL; at91_twi_configure_dma()
686 dma->chan_rx = dma_request_slave_channel_reason(dev->dev, "rx"); at91_twi_configure_dma()
687 if (IS_ERR(dma->chan_rx)) { at91_twi_configure_dma()
688 ret = PTR_ERR(dma->chan_rx); at91_twi_configure_dma()
689 dma->chan_rx = NULL; at91_twi_configure_dma()
694 if (dmaengine_slave_config(dma->chan_tx, &slave_config)) { at91_twi_configure_dma()
701 if (dmaengine_slave_config(dma->chan_rx, &slave_config)) { at91_twi_configure_dma()
707 sg_init_table(&dma->sg, 1); at91_twi_configure_dma()
708 dma->buf_mapped = false; at91_twi_configure_dma()
709 dma->xfer_in_progress = false; at91_twi_configure_dma()
713 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx)); at91_twi_configure_dma()
720 if (dma->chan_rx) at91_twi_configure_dma()
721 dma_release_channel(dma->chan_rx); at91_twi_configure_dma()
722 if (dma->chan_tx) at91_twi_configure_dma()
723 dma_release_channel(dma->chan_tx); at91_twi_configure_dma()
/linux-4.1.27/drivers/net/ethernet/
H A Dlantiq_etop.c36 #include <linux/dma-mapping.h>
94 struct ltq_dma_channel dma; member in struct:ltq_etop_chan
116 ch->skb[ch->dma.desc] = netdev_alloc_skb(ch->netdev, MAX_DMA_DATA_LEN); ltq_etop_alloc_skb()
117 if (!ch->skb[ch->dma.desc]) ltq_etop_alloc_skb()
119 ch->dma.desc_base[ch->dma.desc].addr = dma_map_single(NULL, ltq_etop_alloc_skb()
120 ch->skb[ch->dma.desc]->data, MAX_DMA_DATA_LEN, ltq_etop_alloc_skb()
122 ch->dma.desc_base[ch->dma.desc].addr = ltq_etop_alloc_skb()
123 CPHYSADDR(ch->skb[ch->dma.desc]->data); ltq_etop_alloc_skb()
124 ch->dma.desc_base[ch->dma.desc].ctl = ltq_etop_alloc_skb()
127 skb_reserve(ch->skb[ch->dma.desc], NET_IP_ALIGN); ltq_etop_alloc_skb()
135 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc]; ltq_etop_hw_receive()
136 struct sk_buff *skb = ch->skb[ch->dma.desc]; ltq_etop_hw_receive()
144 ltq_dma_close(&ch->dma); ltq_etop_hw_receive()
146 ch->dma.desc++; ltq_etop_hw_receive()
147 ch->dma.desc %= LTQ_DESC_NUM; ltq_etop_hw_receive()
164 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc]; ltq_etop_poll_rx()
175 ltq_dma_ack_irq(&ch->dma); ltq_etop_poll_rx()
191 while ((ch->dma.desc_base[ch->tx_free].ctl & ltq_etop_poll_tx()
195 memset(&ch->dma.desc_base[ch->tx_free], 0, ltq_etop_poll_tx()
205 ltq_dma_ack_irq(&ch->dma); ltq_etop_poll_tx()
224 ltq_dma_free(&ch->dma); ltq_etop_free_channel()
225 if (ch->dma.irq) ltq_etop_free_channel()
226 free_irq(ch->dma.irq, priv); ltq_etop_free_channel()
230 dev_kfree_skb_any(ch->skb[ch->dma.desc]); ltq_etop_free_channel()
280 ch->idx = ch->dma.nr = i; ltq_etop_hw_init()
283 ltq_dma_alloc_tx(&ch->dma); ltq_etop_hw_init()
286 ltq_dma_alloc_rx(&ch->dma); ltq_etop_hw_init()
287 for (ch->dma.desc = 0; ch->dma.desc < LTQ_DESC_NUM; ltq_etop_hw_init()
288 ch->dma.desc++) ltq_etop_hw_init()
291 ch->dma.desc = 0; ltq_etop_hw_init()
294 ch->dma.irq = irq; ltq_etop_hw_init()
490 ltq_dma_open(&ch->dma); ltq_etop_open()
512 ltq_dma_close(&ch->dma); ltq_etop_stop()
524 struct ltq_dma_desc *desc = &ch->dma.desc_base[ch->dma.desc]; ltq_etop_tx()
531 if ((desc->ctl & (LTQ_DMA_OWN | LTQ_DMA_C)) || ch->skb[ch->dma.desc]) { ltq_etop_tx()
538 /* dma needs to start on a 16 byte aligned address */ ltq_etop_tx()
540 ch->skb[ch->dma.desc] = skb; ltq_etop_tx()
550 ch->dma.desc++; ltq_etop_tx()
551 ch->dma.desc %= LTQ_DESC_NUM; ltq_etop_tx()
554 if (ch->dma.desc_base[ch->dma.desc].ctl & LTQ_DMA_OWN) ltq_etop_tx()
/linux-4.1.27/arch/x86/kernel/
H A Di8237.c15 #include <asm/dma.h>
20 * Allocation is handled in kernel/dma.c and normal usage is
21 * in asm/dma.h.
/linux-4.1.27/sound/soc/fsl/
H A Dfsl_utils.c20 * fsl_asoc_get_dma_channel - determine the dma channel for a SSI node
23 * @name: name of the phandle pointing to the dma channel
25 * @dma_channel_id: dma channel id to be returned
26 * @dma_id: dma id to be returned
28 * This function determines the dma and channel id for given SSI node. It
46 if (!of_device_is_compatible(dma_channel_np, "fsl,ssi-dma-channel")) { fsl_asoc_get_dma_channel()
/linux-4.1.27/drivers/net/appletalk/
H A Dltpc.c76 and to probe for things in the standard order of i/o, irq, dma. This
102 * Change names to ltpc. Tabs. Took a shot at dma alloc,
207 static int dma; variable
232 #include <asm/dma.h>
371 int dma = dev->dma; handlefc() local
377 disable_dma(dma); handlefc()
378 clear_dma_ff(dma); handlefc()
379 set_dma_mode(dma,DMA_MODE_READ); handlefc()
380 set_dma_addr(dma,virt_to_bus(ltdmacbuf)); handlefc()
381 set_dma_count(dma,50); handlefc()
382 enable_dma(dma); handlefc()
394 int dma = dev->dma; handlefd() local
399 disable_dma(dma); handlefd()
400 clear_dma_ff(dma); handlefd()
401 set_dma_mode(dma,DMA_MODE_READ); handlefd()
402 set_dma_addr(dma,virt_to_bus(ltdmabuf)); handlefd()
403 set_dma_count(dma,800); handlefd()
404 enable_dma(dma); handlefd()
418 int dma = dev->dma; handlewrite() local
423 disable_dma(dma); handlewrite()
424 clear_dma_ff(dma); handlewrite()
425 set_dma_mode(dma,DMA_MODE_WRITE); handlewrite()
426 set_dma_addr(dma,virt_to_bus(ltdmabuf)); handlewrite()
427 set_dma_count(dma,800); handlewrite()
428 enable_dma(dma); handlewrite()
436 printk("timed out in handlewrite, dma res %d\n", handlewrite()
437 get_dma_residue(dev->dma) ); handlewrite()
446 int dma = dev->dma; handleread() local
452 disable_dma(dma); handleread()
453 clear_dma_ff(dma); handleread()
454 set_dma_mode(dma,DMA_MODE_READ); handleread()
455 set_dma_addr(dma,virt_to_bus(ltdmabuf)); handleread()
456 set_dma_count(dma,800); handleread()
457 enable_dma(dma); handleread()
468 int dma = dev->dma; handlecommand() local
473 disable_dma(dma); handlecommand()
474 clear_dma_ff(dma); handlecommand()
475 set_dma_mode(dma,DMA_MODE_WRITE); handlecommand()
476 set_dma_addr(dma,virt_to_bus(ltdmacbuf)); handlecommand()
477 set_dma_count(dma,50); handlecommand()
478 enable_dma(dma); handlecommand()
938 static int __init ltpc_probe_dma(int base, int dma) ltpc_probe_dma() argument
940 int want = (dma == 3) ? 2 : (dma == 1) ? 1 : 3; ltpc_probe_dma()
995 /* release the other dma channel (if we opened both of them) */ ltpc_probe_dma()
1103 inb_p(io+5); /* enable dma */ ltpc_probe()
1108 /* now, figure out which dma channel we're using, unless it's ltpc_probe()
1112 dma = ltpc_probe_dma(io, dma); ltpc_probe()
1113 if (!dma) { /* no dma channel */ ltpc_probe()
1121 printk(KERN_INFO "Apple/Farallon LocalTalk-PC card at %03x, IR%d, DMA%d.\n",io,irq,dma); ltpc_probe()
1123 printk(KERN_INFO "Apple/Farallon LocalTalk-PC card at %03x, DMA%d. Using polled mode.\n",io,dma); ltpc_probe()
1128 dev->dma = dma; ltpc_probe()
1135 disable_dma(dma); ltpc_probe()
1136 clear_dma_ff(dma); ltpc_probe()
1137 set_dma_mode(dma,DMA_MODE_READ); ltpc_probe()
1138 set_dma_addr(dma,virt_to_bus(ltdmabuf)); ltpc_probe()
1139 set_dma_count(dma,0x100); ltpc_probe()
1140 enable_dma(dma); ltpc_probe()
1195 /* handles "ltpc=io,irq,dma" kernel command lines */ ltpc_setup()
1209 "ltpc: usage: ltpc=auto|iobase[,irq[,dma]]\n"); ltpc_setup()
1218 dma = ints[3]; ltpc_setup()
1236 module_param(dma, int, 0);
1266 if(debug & DEBUG_VERBOSE) printk("freeing dma\n"); ltpc_cleanup()
1268 if (dev_ltpc->dma) ltpc_cleanup()
1269 free_dma(dev_ltpc->dma); ltpc_cleanup()
/linux-4.1.27/include/media/
H A Dvideobuf2-dma-contig.h2 * videobuf2-dma-contig.h - DMA contig memory allocator for videobuf2
17 #include <linux/dma-mapping.h>
/linux-4.1.27/arch/powerpc/kernel/
H A Dprom_parse.c21 prop = of_get_property(dn, "ibm,#dma-address-cells", NULL); of_parse_dma_window()
30 prop = of_get_property(dn, "ibm,#dma-size-cells", NULL); of_parse_dma_window()
/linux-4.1.27/arch/arm64/include/asm/
H A Dpci.h7 #include <linux/dma-mapping.h>
11 #include <asm-generic/pci-dma-compat.h>
/linux-4.1.27/sound/soc/pxa/
H A Dpxa2xx-pcm.c13 #include <linux/dma-mapping.h>
18 #include <mach/dma.h>
33 struct snd_dmaengine_dai_dma_data *dma; pxa2xx_pcm_hw_params() local
36 dma = snd_soc_dai_get_dma_data(rtd->cpu_dai, substream); pxa2xx_pcm_hw_params()
40 if (!dma) pxa2xx_pcm_hw_params()
46 prtd->params = dma; pxa2xx_pcm_hw_params()
52 } else if (prtd->params != dma) { pxa2xx_pcm_hw_params()
54 prtd->params = dma; pxa2xx_pcm_hw_params()
/linux-4.1.27/sound/pci/
H A Datiixp.c356 static int atiixp_build_dma_packets(struct atiixp *chip, struct atiixp_dma *dma, atiixp_build_dma_packets() argument
368 if (dma->desc_buf.area == NULL) { atiixp_build_dma_packets()
372 &dma->desc_buf) < 0) atiixp_build_dma_packets()
374 dma->period_bytes = dma->periods = 0; /* clear */ atiixp_build_dma_packets()
377 if (dma->periods == periods && dma->period_bytes == period_bytes) atiixp_build_dma_packets()
382 writel(0, chip->remap_addr + dma->ops->llp_offset); atiixp_build_dma_packets()
383 dma->ops->enable_dma(chip, 0); atiixp_build_dma_packets()
384 dma->ops->enable_dma(chip, 1); atiixp_build_dma_packets()
389 desc_addr = (u32)dma->desc_buf.addr; atiixp_build_dma_packets()
392 desc = &((struct atiixp_dma_desc *)dma->desc_buf.area)[i]; atiixp_build_dma_packets()
398 desc->next = cpu_to_le32((u32)dma->desc_buf.addr); atiixp_build_dma_packets()
404 writel((u32)dma->desc_buf.addr | ATI_REG_LINKPTR_EN, atiixp_build_dma_packets()
405 chip->remap_addr + dma->ops->llp_offset); atiixp_build_dma_packets()
407 dma->period_bytes = period_bytes; atiixp_build_dma_packets()
408 dma->periods = periods; atiixp_build_dma_packets()
416 static void atiixp_clear_dma_packets(struct atiixp *chip, struct atiixp_dma *dma, atiixp_clear_dma_packets() argument
419 if (dma->desc_buf.area) { atiixp_clear_dma_packets()
420 writel(0, chip->remap_addr + dma->ops->llp_offset); atiixp_clear_dma_packets()
421 snd_dma_free_pages(&dma->desc_buf); atiixp_clear_dma_packets()
422 dma->desc_buf.area = NULL; atiixp_clear_dma_packets()
665 struct atiixp_dma *dma = runtime->private_data; snd_atiixp_pcm_pointer() local
670 curptr = readl(chip->remap_addr + dma->ops->dt_cur); snd_atiixp_pcm_pointer()
671 if (curptr < dma->buf_addr) snd_atiixp_pcm_pointer()
673 curptr -= dma->buf_addr; snd_atiixp_pcm_pointer()
674 if (curptr >= dma->buf_bytes) snd_atiixp_pcm_pointer()
679 readl(chip->remap_addr + dma->ops->dt_cur), dma->buf_addr); snd_atiixp_pcm_pointer()
686 static void snd_atiixp_xrun_dma(struct atiixp *chip, struct atiixp_dma *dma) snd_atiixp_xrun_dma() argument
688 if (! dma->substream || ! dma->running) snd_atiixp_xrun_dma()
690 dev_dbg(chip->card->dev, "XRUN detected (DMA %d)\n", dma->ops->type); snd_atiixp_xrun_dma()
691 snd_pcm_stop_xrun(dma->substream); snd_atiixp_xrun_dma()
697 static void snd_atiixp_update_dma(struct atiixp *chip, struct atiixp_dma *dma) snd_atiixp_update_dma() argument
699 if (! dma->substream || ! dma->running) snd_atiixp_update_dma()
701 snd_pcm_period_elapsed(dma->substream); snd_atiixp_update_dma()
724 struct atiixp_dma *dma = substream->runtime->private_data; snd_atiixp_pcm_trigger() local
727 if (snd_BUG_ON(!dma->ops->enable_transfer || snd_atiixp_pcm_trigger()
728 !dma->ops->flush_dma)) snd_atiixp_pcm_trigger()
736 dma->ops->enable_transfer(chip, 1); snd_atiixp_pcm_trigger()
737 dma->running = 1; snd_atiixp_pcm_trigger()
738 dma->suspended = 0; snd_atiixp_pcm_trigger()
743 dma->ops->enable_transfer(chip, 0); snd_atiixp_pcm_trigger()
744 dma->running = 0; snd_atiixp_pcm_trigger()
745 dma->suspended = cmd == SNDRV_PCM_TRIGGER_SUSPEND; snd_atiixp_pcm_trigger()
754 dma->ops->flush_dma(chip); snd_atiixp_pcm_trigger()
959 struct atiixp_dma *dma = substream->runtime->private_data; snd_atiixp_pcm_hw_params() local
965 dma->buf_addr = substream->runtime->dma_addr; snd_atiixp_pcm_hw_params()
966 dma->buf_bytes = params_buffer_bytes(hw_params); snd_atiixp_pcm_hw_params()
968 err = atiixp_build_dma_packets(chip, dma, substream, snd_atiixp_pcm_hw_params()
974 if (dma->ac97_pcm_type >= 0) { snd_atiixp_pcm_hw_params()
975 struct ac97_pcm *pcm = chip->pcms[dma->ac97_pcm_type]; snd_atiixp_pcm_hw_params()
979 if (dma->pcm_open_flag) { snd_atiixp_pcm_hw_params()
981 dma->pcm_open_flag = 0; snd_atiixp_pcm_hw_params()
987 dma->pcm_open_flag = 1; snd_atiixp_pcm_hw_params()
996 struct atiixp_dma *dma = substream->runtime->private_data; snd_atiixp_pcm_hw_free() local
998 if (dma->pcm_open_flag) { snd_atiixp_pcm_hw_free()
999 struct ac97_pcm *pcm = chip->pcms[dma->ac97_pcm_type]; snd_atiixp_pcm_hw_free()
1001 dma->pcm_open_flag = 0; snd_atiixp_pcm_hw_free()
1003 atiixp_clear_dma_packets(chip, dma, substream); snd_atiixp_pcm_hw_free()
1033 struct atiixp_dma *dma, int pcm_type) snd_atiixp_pcm_open()
1039 if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma)) snd_atiixp_pcm_open()
1042 if (dma->opened) snd_atiixp_pcm_open()
1044 dma->substream = substream; snd_atiixp_pcm_open()
1046 dma->ac97_pcm_type = pcm_type; snd_atiixp_pcm_open()
1056 runtime->private_data = dma; snd_atiixp_pcm_open()
1060 dma->ops->enable_dma(chip, 1); snd_atiixp_pcm_open()
1062 dma->opened = 1; snd_atiixp_pcm_open()
1068 struct atiixp_dma *dma) snd_atiixp_pcm_close()
1072 if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma)) snd_atiixp_pcm_close()
1075 dma->ops->enable_dma(chip, 0); snd_atiixp_pcm_close()
1077 dma->substream = NULL; snd_atiixp_pcm_close()
1078 dma->opened = 0; snd_atiixp_pcm_close()
1484 struct atiixp_dma *dma = &chip->dmas[i]; snd_atiixp_suspend() local
1485 if (dma->substream && dma->running) snd_atiixp_suspend()
1486 dma->saved_curptr = readl(chip->remap_addr + snd_atiixp_suspend()
1487 dma->ops->dt_cur); snd_atiixp_suspend()
1511 struct atiixp_dma *dma = &chip->dmas[i]; snd_atiixp_resume() local
1512 if (dma->substream && dma->suspended) { snd_atiixp_resume()
1513 dma->ops->enable_dma(chip, 1); snd_atiixp_resume()
1514 dma->substream->ops->prepare(dma->substream); snd_atiixp_resume()
1515 writel((u32)dma->desc_buf.addr | ATI_REG_LINKPTR_EN, snd_atiixp_resume()
1516 chip->remap_addr + dma->ops->llp_offset); snd_atiixp_resume()
1517 writel(dma->saved_curptr, chip->remap_addr + snd_atiixp_resume()
1518 dma->ops->dt_cur); snd_atiixp_resume()
1032 snd_atiixp_pcm_open(struct snd_pcm_substream *substream, struct atiixp_dma *dma, int pcm_type) snd_atiixp_pcm_open() argument
1067 snd_atiixp_pcm_close(struct snd_pcm_substream *substream, struct atiixp_dma *dma) snd_atiixp_pcm_close() argument
H A Datiixp_modem.c325 struct atiixp_dma *dma, atiixp_build_dma_packets()
337 if (dma->desc_buf.area == NULL) { atiixp_build_dma_packets()
339 ATI_DESC_LIST_SIZE, &dma->desc_buf) < 0) atiixp_build_dma_packets()
341 dma->period_bytes = dma->periods = 0; /* clear */ atiixp_build_dma_packets()
344 if (dma->periods == periods && dma->period_bytes == period_bytes) atiixp_build_dma_packets()
349 writel(0, chip->remap_addr + dma->ops->llp_offset); atiixp_build_dma_packets()
350 dma->ops->enable_dma(chip, 0); atiixp_build_dma_packets()
351 dma->ops->enable_dma(chip, 1); atiixp_build_dma_packets()
356 desc_addr = (u32)dma->desc_buf.addr; atiixp_build_dma_packets()
359 desc = &((struct atiixp_dma_desc *)dma->desc_buf.area)[i]; atiixp_build_dma_packets()
365 desc->next = cpu_to_le32((u32)dma->desc_buf.addr); atiixp_build_dma_packets()
371 writel((u32)dma->desc_buf.addr | ATI_REG_LINKPTR_EN, atiixp_build_dma_packets()
372 chip->remap_addr + dma->ops->llp_offset); atiixp_build_dma_packets()
374 dma->period_bytes = period_bytes; atiixp_build_dma_packets()
375 dma->periods = periods; atiixp_build_dma_packets()
384 struct atiixp_dma *dma, atiixp_clear_dma_packets()
387 if (dma->desc_buf.area) { atiixp_clear_dma_packets()
388 writel(0, chip->remap_addr + dma->ops->llp_offset); atiixp_clear_dma_packets()
389 snd_dma_free_pages(&dma->desc_buf); atiixp_clear_dma_packets()
390 dma->desc_buf.area = NULL; atiixp_clear_dma_packets()
614 struct atiixp_dma *dma = runtime->private_data; snd_atiixp_pcm_pointer() local
619 curptr = readl(chip->remap_addr + dma->ops->dt_cur); snd_atiixp_pcm_pointer()
620 if (curptr < dma->buf_addr) snd_atiixp_pcm_pointer()
622 curptr -= dma->buf_addr; snd_atiixp_pcm_pointer()
623 if (curptr >= dma->buf_bytes) snd_atiixp_pcm_pointer()
628 readl(chip->remap_addr + dma->ops->dt_cur), dma->buf_addr); snd_atiixp_pcm_pointer()
636 struct atiixp_dma *dma) snd_atiixp_xrun_dma()
638 if (! dma->substream || ! dma->running) snd_atiixp_xrun_dma()
640 dev_dbg(chip->card->dev, "XRUN detected (DMA %d)\n", dma->ops->type); snd_atiixp_xrun_dma()
641 snd_pcm_stop_xrun(dma->substream); snd_atiixp_xrun_dma()
648 struct atiixp_dma *dma) snd_atiixp_update_dma()
650 if (! dma->substream || ! dma->running) snd_atiixp_update_dma()
652 snd_pcm_period_elapsed(dma->substream); snd_atiixp_update_dma()
674 struct atiixp_dma *dma = substream->runtime->private_data; snd_atiixp_pcm_trigger() local
677 if (snd_BUG_ON(!dma->ops->enable_transfer || snd_atiixp_pcm_trigger()
678 !dma->ops->flush_dma)) snd_atiixp_pcm_trigger()
684 dma->ops->enable_transfer(chip, 1); snd_atiixp_pcm_trigger()
685 dma->running = 1; snd_atiixp_pcm_trigger()
688 dma->ops->enable_transfer(chip, 0); snd_atiixp_pcm_trigger()
689 dma->running = 0; snd_atiixp_pcm_trigger()
698 dma->ops->flush_dma(chip); snd_atiixp_pcm_trigger()
796 struct atiixp_dma *dma = substream->runtime->private_data; snd_atiixp_pcm_hw_params() local
803 dma->buf_addr = substream->runtime->dma_addr; snd_atiixp_pcm_hw_params()
804 dma->buf_bytes = params_buffer_bytes(hw_params); snd_atiixp_pcm_hw_params()
806 err = atiixp_build_dma_packets(chip, dma, substream, snd_atiixp_pcm_hw_params()
826 struct atiixp_dma *dma = substream->runtime->private_data; snd_atiixp_pcm_hw_free() local
828 atiixp_clear_dma_packets(chip, dma, substream); snd_atiixp_pcm_hw_free()
858 struct atiixp_dma *dma, int pcm_type) snd_atiixp_pcm_open()
870 if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma)) snd_atiixp_pcm_open()
873 if (dma->opened) snd_atiixp_pcm_open()
875 dma->substream = substream; snd_atiixp_pcm_open()
877 dma->ac97_pcm_type = pcm_type; snd_atiixp_pcm_open()
885 runtime->private_data = dma; snd_atiixp_pcm_open()
889 dma->ops->enable_dma(chip, 1); snd_atiixp_pcm_open()
891 dma->opened = 1; snd_atiixp_pcm_open()
897 struct atiixp_dma *dma) snd_atiixp_pcm_close()
901 if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma)) snd_atiixp_pcm_close()
904 dma->ops->enable_dma(chip, 0); snd_atiixp_pcm_close()
906 dma->substream = NULL; snd_atiixp_pcm_close()
907 dma->opened = 0; snd_atiixp_pcm_close()
324 atiixp_build_dma_packets(struct atiixp_modem *chip, struct atiixp_dma *dma, struct snd_pcm_substream *substream, unsigned int periods, unsigned int period_bytes) atiixp_build_dma_packets() argument
383 atiixp_clear_dma_packets(struct atiixp_modem *chip, struct atiixp_dma *dma, struct snd_pcm_substream *substream) atiixp_clear_dma_packets() argument
635 snd_atiixp_xrun_dma(struct atiixp_modem *chip, struct atiixp_dma *dma) snd_atiixp_xrun_dma() argument
647 snd_atiixp_update_dma(struct atiixp_modem *chip, struct atiixp_dma *dma) snd_atiixp_update_dma() argument
857 snd_atiixp_pcm_open(struct snd_pcm_substream *substream, struct atiixp_dma *dma, int pcm_type) snd_atiixp_pcm_open() argument
896 snd_atiixp_pcm_close(struct snd_pcm_substream *substream, struct atiixp_dma *dma) snd_atiixp_pcm_close() argument
H A Dcs4281.c473 struct cs4281_dma dma[4]; member in struct:cs4281
670 struct cs4281_dma *dma = substream->runtime->private_data; snd_cs4281_trigger() local
676 dma->valDCR |= BA0_DCR_MSK; snd_cs4281_trigger()
677 dma->valFCR |= BA0_FCR_FEN; snd_cs4281_trigger()
680 dma->valDCR &= ~BA0_DCR_MSK; snd_cs4281_trigger()
681 dma->valFCR &= ~BA0_FCR_FEN; snd_cs4281_trigger()
685 snd_cs4281_pokeBA0(chip, dma->regDMR, dma->valDMR & ~BA0_DMR_DMA); snd_cs4281_trigger()
686 dma->valDMR |= BA0_DMR_DMA; snd_cs4281_trigger()
687 dma->valDCR &= ~BA0_DCR_MSK; snd_cs4281_trigger()
688 dma->valFCR |= BA0_FCR_FEN; snd_cs4281_trigger()
692 dma->valDMR &= ~(BA0_DMR_DMA|BA0_DMR_POLL); snd_cs4281_trigger()
693 dma->valDCR |= BA0_DCR_MSK; snd_cs4281_trigger()
694 dma->valFCR &= ~BA0_FCR_FEN; snd_cs4281_trigger()
696 if (dma->regFCR != BA0_FCR0) snd_cs4281_trigger()
697 dma->valFCR &= ~BA0_FCR_FEN; snd_cs4281_trigger()
703 snd_cs4281_pokeBA0(chip, dma->regDMR, dma->valDMR); snd_cs4281_trigger()
704 snd_cs4281_pokeBA0(chip, dma->regFCR, dma->valFCR); snd_cs4281_trigger()
705 snd_cs4281_pokeBA0(chip, dma->regDCR, dma->valDCR); snd_cs4281_trigger()
734 static void snd_cs4281_mode(struct cs4281 *chip, struct cs4281_dma *dma, snd_cs4281_mode() argument
740 dma->valDMR = BA0_DMR_TYPE_SINGLE | BA0_DMR_AUTO | snd_cs4281_mode()
743 dma->valDMR |= BA0_DMR_MONO; snd_cs4281_mode()
745 dma->valDMR |= BA0_DMR_USIGN; snd_cs4281_mode()
747 dma->valDMR |= BA0_DMR_BEND; snd_cs4281_mode()
749 case 8: dma->valDMR |= BA0_DMR_SIZE8; snd_cs4281_mode()
751 dma->valDMR |= BA0_DMR_SWAPC; snd_cs4281_mode()
753 case 32: dma->valDMR |= BA0_DMR_SIZE20; break; snd_cs4281_mode()
755 dma->frag = 0; /* for workaround */ snd_cs4281_mode()
756 dma->valDCR = BA0_DCR_TCIE | BA0_DCR_MSK; snd_cs4281_mode()
758 dma->valDCR |= BA0_DCR_HTCIE; snd_cs4281_mode()
760 snd_cs4281_pokeBA0(chip, dma->regDBA, runtime->dma_addr); snd_cs4281_mode()
761 snd_cs4281_pokeBA0(chip, dma->regDBC, runtime->buffer_size - 1); snd_cs4281_mode()
762 rec_mono = (chip->dma[1].valDMR & BA0_DMR_MONO) == BA0_DMR_MONO; snd_cs4281_mode()
770 if (dma->left_slot == chip->src_left_play_slot) { snd_cs4281_mode()
772 snd_BUG_ON(dma->right_slot != chip->src_right_play_slot); snd_cs4281_mode()
776 if (dma->left_slot == chip->src_left_rec_slot) { snd_cs4281_mode()
778 snd_BUG_ON(dma->right_slot != chip->src_right_rec_slot); snd_cs4281_mode()
784 if (dma->regFCR == BA0_FCR0) snd_cs4281_mode()
785 snd_cs4281_pokeBA0(chip, dma->regFCR, snd_cs4281_peekBA0(chip, dma->regFCR) & ~BA0_FCR_FEN); snd_cs4281_mode()
787 dma->valFCR = BA0_FCR_LS(dma->left_slot) | snd_cs4281_mode()
788 BA0_FCR_RS(capture && (dma->valDMR & BA0_DMR_MONO) ? 31 : dma->right_slot) | snd_cs4281_mode()
790 BA0_FCR_OF(dma->fifo_offset); snd_cs4281_mode()
791 snd_cs4281_pokeBA0(chip, dma->regFCR, dma->valFCR | (capture ? BA0_FCR_PSH : 0)); snd_cs4281_mode()
793 if (dma->regFCR == BA0_FCR0) snd_cs4281_mode()
794 snd_cs4281_pokeBA0(chip, dma->regFCR, dma->valFCR | BA0_FCR_FEN); snd_cs4281_mode()
796 snd_cs4281_pokeBA0(chip, dma->regFSIC, 0); snd_cs4281_mode()
813 struct cs4281_dma *dma = runtime->private_data; snd_cs4281_playback_prepare() local
817 snd_cs4281_mode(chip, dma, runtime, 0, 1); snd_cs4281_playback_prepare()
825 struct cs4281_dma *dma = runtime->private_data; snd_cs4281_capture_prepare() local
829 snd_cs4281_mode(chip, dma, runtime, 1, 1); snd_cs4281_capture_prepare()
837 struct cs4281_dma *dma = runtime->private_data; snd_cs4281_pointer() local
843 snd_cs4281_peekBA0(chip, dma->regDCC), runtime->buffer_size, snd_cs4281_pointer()
847 snd_cs4281_peekBA0(chip, dma->regDCC) - 1; snd_cs4281_pointer()
904 struct cs4281_dma *dma; snd_cs4281_playback_open() local
906 dma = &chip->dma[0]; snd_cs4281_playback_open()
907 dma->substream = substream; snd_cs4281_playback_open()
908 dma->left_slot = 0; snd_cs4281_playback_open()
909 dma->right_slot = 1; snd_cs4281_playback_open()
910 runtime->private_data = dma; snd_cs4281_playback_open()
923 struct cs4281_dma *dma; snd_cs4281_capture_open() local
925 dma = &chip->dma[1]; snd_cs4281_capture_open()
926 dma->substream = substream; snd_cs4281_capture_open()
927 dma->left_slot = 10; snd_cs4281_capture_open()
928 dma->right_slot = 11; snd_cs4281_capture_open()
929 runtime->private_data = dma; snd_cs4281_capture_open()
940 struct cs4281_dma *dma = substream->runtime->private_data; snd_cs4281_playback_close() local
942 dma->substream = NULL; snd_cs4281_playback_close()
948 struct cs4281_dma *dma = substream->runtime->private_data; snd_cs4281_capture_close() local
950 dma->substream = NULL; snd_cs4281_capture_close()
1589 struct cs4281_dma *dma = &chip->dma[tmp]; snd_cs4281_chip_init() local
1590 dma->regDBA = BA0_DBA0 + (tmp * 0x10); snd_cs4281_chip_init()
1591 dma->regDCA = BA0_DCA0 + (tmp * 0x10); snd_cs4281_chip_init()
1592 dma->regDBC = BA0_DBC0 + (tmp * 0x10); snd_cs4281_chip_init()
1593 dma->regDCC = BA0_DCC0 + (tmp * 0x10); snd_cs4281_chip_init()
1594 dma->regDMR = BA0_DMR0 + (tmp * 8); snd_cs4281_chip_init()
1595 dma->regDCR = BA0_DCR0 + (tmp * 8); snd_cs4281_chip_init()
1596 dma->regHDSR = BA0_HDSR0 + (tmp * 4); snd_cs4281_chip_init()
1597 dma->regFCR = BA0_FCR0 + (tmp * 4); snd_cs4281_chip_init()
1598 dma->regFSIC = BA0_FSIC0 + (tmp * 4); snd_cs4281_chip_init()
1599 dma->fifo_offset = tmp * CS4281_FIFO_SIZE; snd_cs4281_chip_init()
1600 snd_cs4281_pokeBA0(chip, dma->regFCR, snd_cs4281_chip_init()
1604 BA0_FCR_OF(dma->fifo_offset)); snd_cs4281_chip_init()
1613 chip->dma[0].valFCR = BA0_FCR_FEN | BA0_FCR_LS(0) | snd_cs4281_chip_init()
1616 BA0_FCR_OF(chip->dma[0].fifo_offset); snd_cs4281_chip_init()
1617 snd_cs4281_pokeBA0(chip, chip->dma[0].regFCR, chip->dma[0].valFCR); snd_cs4281_chip_init()
1807 unsigned int status, dma, val; snd_cs4281_interrupt() local
1819 for (dma = 0; dma < 4; dma++) snd_cs4281_interrupt()
1820 if (status & BA0_HISR_DMA(dma)) { snd_cs4281_interrupt()
1821 cdma = &chip->dma[dma]; snd_cs4281_interrupt()
H A Dad1889.h72 #define AD_DMA_RESIC 0x80 /* RES dma interrupt current byte count */
73 #define AD_DMA_RESIB 0x84 /* RES dma interrupt base byte count */
75 #define AD_DMA_ADCIC 0x88 /* ADC dma interrupt current byte count */
76 #define AD_DMA_ADCIB 0x8c /* ADC dma interrupt base byte count */
78 #define AD_DMA_SYNIC 0x90 /* synth dma interrupt current byte count */
79 #define AD_DMA_SYNIB 0x94 /* synth dma interrupt base byte count */
81 #define AD_DMA_WAVIC 0x98 /* wave dma interrupt current byte count */
82 #define AD_DMA_WAVIB 0x9c /* wave dma interrupt base byte count */
89 #define AD_DMA_ADC 0xa8 /* ADC dma control and status */
90 #define AD_DMA_SYNTH 0xb0 /* Synth dma control and status */
91 #define AD_DMA_WAV 0xb8 /* wave dma control and status */
92 #define AD_DMA_RES 0xa0 /* Resample dma control and status */
106 #define AD_DMA_DISR 0xc0 /* dma interrupt status */
125 #define AD_DMA_CHSS 0xc4 /* dma channel stop status */
/linux-4.1.27/sound/mips/
H A Dau1x00.c85 int dma; member in struct:audio_stream
198 disable_dma(stream->dma); au1000_dma_stop()
207 init_dma(stream->dma); au1000_dma_start()
208 if (get_dma_active_buffer(stream->dma) == 0) { au1000_dma_start()
209 clear_dma_done0(stream->dma); au1000_dma_start()
210 set_dma_addr0(stream->dma, stream->buffer->start); au1000_dma_start()
211 set_dma_count0(stream->dma, stream->period_size >> 1); au1000_dma_start()
212 set_dma_addr1(stream->dma, stream->buffer->next->start); au1000_dma_start()
213 set_dma_count1(stream->dma, stream->period_size >> 1); au1000_dma_start()
215 clear_dma_done1(stream->dma); au1000_dma_start()
216 set_dma_addr1(stream->dma, stream->buffer->start); au1000_dma_start()
217 set_dma_count1(stream->dma, stream->period_size >> 1); au1000_dma_start()
218 set_dma_addr0(stream->dma, stream->buffer->next->start); au1000_dma_start()
219 set_dma_count0(stream->dma, stream->period_size >> 1); au1000_dma_start()
221 enable_dma_buffers(stream->dma); au1000_dma_start()
222 start_dma(stream->dma); au1000_dma_start()
232 switch (get_dma_buffer_done(stream->dma)) { au1000_dma_interrupt()
235 clear_dma_done0(stream->dma); au1000_dma_interrupt()
236 set_dma_addr0(stream->dma, stream->buffer->next->start); au1000_dma_interrupt()
237 set_dma_count0(stream->dma, stream->period_size >> 1); au1000_dma_interrupt()
238 enable_dma_buffer0(stream->dma); au1000_dma_interrupt()
242 clear_dma_done1(stream->dma); au1000_dma_interrupt()
243 set_dma_addr1(stream->dma, stream->buffer->next->start); au1000_dma_interrupt()
244 set_dma_count1(stream->dma, stream->period_size >> 1); au1000_dma_interrupt()
245 enable_dma_buffer1(stream->dma); au1000_dma_interrupt()
248 printk(KERN_ERR "DMA %d missed interrupt.\n",stream->dma); au1000_dma_interrupt()
253 printk(KERN_ERR "DMA %d empty irq.\n",stream->dma); au1000_dma_interrupt()
414 location = get_dma_residue(stream->dma); snd_au1000_pointer()
470 au1000->stream[PLAYBACK]->dma = request_au1000_dma(au1000->dmaid[0], snd_au1000_pcm_new()
473 if (au1000->stream[PLAYBACK]->dma < 0) { snd_au1000_pcm_new()
477 au1000->stream[CAPTURE]->dma = request_au1000_dma(au1000->dmaid[1], snd_au1000_pcm_new()
480 if (au1000->stream[CAPTURE]->dma < 0){ snd_au1000_pcm_new()
485 set_dma_mode(au1000->stream[PLAYBACK]->dma, snd_au1000_pcm_new()
486 get_dma_mode(au1000->stream[PLAYBACK]->dma) & ~DMA_NC); snd_au1000_pcm_new()
487 set_dma_mode(au1000->stream[CAPTURE]->dma, snd_au1000_pcm_new()
488 get_dma_mode(au1000->stream[CAPTURE]->dma) & ~DMA_NC); snd_au1000_pcm_new()
566 if (au1000->stream[PLAYBACK]->dma >= 0) snd_au1000_free()
567 free_au1000_dma(au1000->stream[PLAYBACK]->dma); snd_au1000_free()
572 if (au1000->stream[CAPTURE]->dma >= 0) snd_au1000_free()
573 free_au1000_dma(au1000->stream[CAPTURE]->dma); snd_au1000_free()
640 au1000->stream[PLAYBACK]->dma = -1; au1000_ac97_probe()
648 au1000->stream[CAPTURE]->dma = -1; au1000_ac97_probe()
/linux-4.1.27/drivers/net/wireless/b43/
H A Ddma.c31 #include "dma.h"
36 #include <linux/dma-mapping.h>
50 static u32 b43_dma_address(struct b43_dma *dma, dma_addr_t dmaaddr, b43_dma_address() argument
58 if (dma->translation_in_low) { b43_dma_address()
60 addr |= dma->translation; b43_dma_address()
65 if (!dma->translation_in_low) { b43_dma_address()
67 addr |= dma->translation; b43_dma_address()
71 if (dma->translation_in_low) b43_dma_address()
112 addr = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW); op32_fill_descriptor()
113 addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT); op32_fill_descriptor()
204 addrlo = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW); op64_fill_descriptor()
205 addrhi = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_HIGH); op64_fill_descriptor()
206 addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT); op64_fill_descriptor()
690 bool parity = ring->dev->dma.parity; dmacontroller_setup()
697 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); dmacontroller_setup()
698 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); dmacontroller_setup()
699 addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH); dmacontroller_setup()
711 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); dmacontroller_setup()
712 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); dmacontroller_setup()
728 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); dmacontroller_setup()
729 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); dmacontroller_setup()
730 addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH); dmacontroller_setup()
745 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); dmacontroller_setup()
746 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); dmacontroller_setup()
923 /* test for ability to dma to txhdr_cache */ b43_setup_dmaring()
1038 #define destroy_ring(dma, ring) do { \
1039 b43_destroy_dmaring((dma)->ring, __stringify(ring)); \
1040 (dma)->ring = NULL; \
1045 struct b43_dma *dma; b43_dma_free() local
1049 dma = &dev->dma; b43_dma_free()
1051 destroy_ring(dma, rx_ring); b43_dma_free()
1052 destroy_ring(dma, tx_ring_AC_BK); b43_dma_free()
1053 destroy_ring(dma, tx_ring_AC_BE); b43_dma_free()
1054 destroy_ring(dma, tx_ring_AC_VI); b43_dma_free()
1055 destroy_ring(dma, tx_ring_AC_VO); b43_dma_free()
1056 destroy_ring(dma, tx_ring_mcast); b43_dma_free()
1116 struct b43_dma *dma = &dev->dma; b43_dma_init() local
1130 dma->translation = bcma_core_dma_translation(dev->dev->bdev); b43_dma_init()
1135 dma->translation = ssb_dma_translation(dev->dev->sdev); b43_dma_init()
1139 dma->translation_in_low = b43_dma_translation_in_low_word(dev, type); b43_dma_init()
1141 dma->parity = true; b43_dma_init()
1145 dma->parity = false; b43_dma_init()
1150 dma->tx_ring_AC_BK = b43_setup_dmaring(dev, 0, 1, type); b43_dma_init()
1151 if (!dma->tx_ring_AC_BK) b43_dma_init()
1154 dma->tx_ring_AC_BE = b43_setup_dmaring(dev, 1, 1, type); b43_dma_init()
1155 if (!dma->tx_ring_AC_BE) b43_dma_init()
1158 dma->tx_ring_AC_VI = b43_setup_dmaring(dev, 2, 1, type); b43_dma_init()
1159 if (!dma->tx_ring_AC_VI) b43_dma_init()
1162 dma->tx_ring_AC_VO = b43_setup_dmaring(dev, 3, 1, type); b43_dma_init()
1163 if (!dma->tx_ring_AC_VO) b43_dma_init()
1166 dma->tx_ring_mcast = b43_setup_dmaring(dev, 4, 1, type); b43_dma_init()
1167 if (!dma->tx_ring_mcast) b43_dma_init()
1171 dma->rx_ring = b43_setup_dmaring(dev, 0, 0, type); b43_dma_init()
1172 if (!dma->rx_ring) b43_dma_init()
1185 destroy_ring(dma, tx_ring_mcast); b43_dma_init()
1187 destroy_ring(dma, tx_ring_AC_VO); b43_dma_init()
1189 destroy_ring(dma, tx_ring_AC_VI); b43_dma_init()
1191 destroy_ring(dma, tx_ring_AC_BE); b43_dma_init()
1193 destroy_ring(dma, tx_ring_AC_BK); b43_dma_init()
1221 struct b43_dma *dma = &dev->dma; parse_cookie() local
1226 ring = dma->tx_ring_AC_BK; parse_cookie()
1229 ring = dma->tx_ring_AC_BE; parse_cookie()
1232 ring = dma->tx_ring_AC_VI; parse_cookie()
1235 ring = dma->tx_ring_AC_VO; parse_cookie()
1238 ring = dma->tx_ring_mcast; parse_cookie()
1384 ring = dev->dma.tx_ring_AC_VO; select_ring_by_priority()
1387 ring = dev->dma.tx_ring_AC_VI; select_ring_by_priority()
1390 ring = dev->dma.tx_ring_AC_BE; select_ring_by_priority()
1393 ring = dev->dma.tx_ring_AC_BK; select_ring_by_priority()
1397 ring = dev->dma.tx_ring_AC_BE; select_ring_by_priority()
1412 ring = dev->dma.tx_ring_mcast; b43_dma_tx()
1782 b43_dma_tx_suspend_ring(dev->dma.tx_ring_AC_BK); b43_dma_tx_suspend()
1783 b43_dma_tx_suspend_ring(dev->dma.tx_ring_AC_BE); b43_dma_tx_suspend()
1784 b43_dma_tx_suspend_ring(dev->dma.tx_ring_AC_VI); b43_dma_tx_suspend()
1785 b43_dma_tx_suspend_ring(dev->dma.tx_ring_AC_VO); b43_dma_tx_suspend()
1786 b43_dma_tx_suspend_ring(dev->dma.tx_ring_mcast); b43_dma_tx_suspend()
1791 b43_dma_tx_resume_ring(dev->dma.tx_ring_mcast); b43_dma_tx_resume()
1792 b43_dma_tx_resume_ring(dev->dma.tx_ring_AC_VO); b43_dma_tx_resume()
1793 b43_dma_tx_resume_ring(dev->dma.tx_ring_AC_VI); b43_dma_tx_resume()
1794 b43_dma_tx_resume_ring(dev->dma.tx_ring_AC_BE); b43_dma_tx_resume()
1795 b43_dma_tx_resume_ring(dev->dma.tx_ring_AC_BK); b43_dma_tx_resume()
/linux-4.1.27/drivers/gpu/drm/nouveau/include/nvkm/engine/
H A Ddmaobj.h17 /* creates a "physical" dma object from a struct nvkm_dmaobj */
/linux-4.1.27/arch/mips/boot/dts/include/dt-bindings/dma/
H A Dnbpfaxi.h14 * Use "#dma-cells = <2>;" with the second integer defining slave DMA flags:
/linux-4.1.27/arch/mips/include/asm/mach-bcm63xx/
H A Dbcm63xx_iudma.h7 * rx/tx dma descriptor
/linux-4.1.27/include/linux/spi/
H A Difx_modem.h14 dma-able addrs */
/linux-4.1.27/arch/powerpc/boot/dts/include/dt-bindings/dma/
H A Dnbpfaxi.h14 * Use "#dma-cells = <2>;" with the second integer defining slave DMA flags:
/linux-4.1.27/arch/arm64/boot/dts/include/dt-bindings/dma/
H A Dnbpfaxi.h14 * Use "#dma-cells = <2>;" with the second integer defining slave DMA flags:
/linux-4.1.27/arch/blackfin/mach-bf533/include/mach/
H A Ddma.h1 /* mach/dma.h - arch-specific DMA defines
/linux-4.1.27/arch/blackfin/mach-bf537/include/mach/
H A Ddma.h1 /* mach/dma.h - arch-specific DMA defines
/linux-4.1.27/arch/cris/arch-v32/kernel/
H A Dcrisksyms.c3 #include <arch/dma.h>
/linux-4.1.27/arch/cris/include/arch-v32/arch/
H A Dcache.h4 #include <arch/hwregs/dma.h>
/linux-4.1.27/arch/hexagon/kernel/
H A DMakefile15 obj-$(CONFIG_HAS_DMA) += dma.o
/linux-4.1.27/arch/ia64/kernel/
H A Ddma-mapping.c1 #include <linux/dma-mapping.h>
H A Dpci-swiotlb.c7 #include <linux/dma-mapping.h>
10 #include <asm/dma.h>
/linux-4.1.27/arch/m68k/include/asm/
H A Dpci.h4 #include <asm-generic/pci-dma-compat.h>
H A Ddvma.h2 * include/asm-m68k/dma.h
84 /* everything below this line is specific to dma used for the onboard
131 #define DMA_ISBROKEN(dma) ((dma)->revision == dvmarev1)
132 #define DMA_ISESC1(dma) ((dma)->revision == dvmaesc1)
177 #define DMA_SCSI_ON 0x02000000 /* Enable SCSI dma */
214 #define DMA_IRQ_ENTRY(dma, dregs) do { \
215 if(DMA_ISBROKEN(dma)) DMA_INTSOFF(dregs); \
218 #define DMA_IRQ_EXIT(dma, dregs) do { \
219 if(DMA_ISBROKEN(dma)) DMA_INTSON(dregs); \
223 #define DMA_RESET(dma) do { \
224 struct sparc_dma_registers *regs = dma->regs; \
233 if(dma->revision>dvmarev1) regs->cond_reg |= DMA_3CLKS; \
234 dma->running = 0; \
/linux-4.1.27/arch/metag/boot/dts/include/dt-bindings/dma/
H A Dnbpfaxi.h14 * Use "#dma-cells = <2>;" with the second integer defining slave DMA flags:
/linux-4.1.27/arch/arm/plat-omap/
H A DMakefile8 obj-y := sram.o dma.o counter_32k.o
/linux-4.1.27/arch/arm/include/asm/mach/
H A Ddma.h2 * arch/arm/include/asm/mach/dma.h
11 * (dma.c) and the architecture-specific DMA backends (dma-*.c)
49 extern int isa_dma_add(unsigned int, dma_t *dma);
/linux-4.1.27/arch/arm/mach-ep93xx/
H A DMakefile6 obj-$(CONFIG_EP93XX_DMA) += dma.o
H A Ddma.c2 * arch/arm/mach-ep93xx/dma.c
8 * This work is based on the original dma-m2p implementation with
22 #include <linux/dma-mapping.h>
28 #include <linux/platform_data/dma-ep93xx.h>
72 .name = "ep93xx-dma-m2p",
103 .name = "ep93xx-dma-m2m",
/linux-4.1.27/arch/arc/mm/
H A DMakefile9 obj-y := extable.o ioremap.o dma.o fault.o init.o
H A Ddma.c18 * plat_{dma,kernel}_addr_to_{kernel,dma}
21 #include <linux/dma-mapping.h>
22 #include <linux/dma-debug.h>
/linux-4.1.27/arch/arm/boot/dts/include/dt-bindings/dma/
H A Dnbpfaxi.h14 * Use "#dma-cells = <2>;" with the second integer defining slave DMA flags:
/linux-4.1.27/include/asm-generic/
H A Ddma.h6 * implementation from kernel/dma.c by not defining MAX_DMA_CHANNELS.
/linux-4.1.27/include/dt-bindings/dma/
H A Dnbpfaxi.h14 * Use "#dma-cells = <2>;" with the second integer defining slave DMA flags:
/linux-4.1.27/include/linux/
H A Dfsldma.h10 /* fsl dma API for enxternal start */
/linux-4.1.27/drivers/dma/hsu/
H A Dhsu.c24 #include <linux/dma-mapping.h>
435 INIT_LIST_HEAD(&hsu->dma.channels); hsu_dma_probe()
440 vchan_init(&hsuc->vchan, &hsu->dma); hsu_dma_probe()
448 dma_cap_set(DMA_SLAVE, hsu->dma.cap_mask); hsu_dma_probe()
449 dma_cap_set(DMA_PRIVATE, hsu->dma.cap_mask); hsu_dma_probe()
451 hsu->dma.device_free_chan_resources = hsu_dma_free_chan_resources; hsu_dma_probe()
453 hsu->dma.device_prep_slave_sg = hsu_dma_prep_slave_sg; hsu_dma_probe()
455 hsu->dma.device_issue_pending = hsu_dma_issue_pending; hsu_dma_probe()
456 hsu->dma.device_tx_status = hsu_dma_tx_status; hsu_dma_probe()
458 hsu->dma.device_config = hsu_dma_slave_config; hsu_dma_probe()
459 hsu->dma.device_pause = hsu_dma_pause; hsu_dma_probe()
460 hsu->dma.device_resume = hsu_dma_resume; hsu_dma_probe()
461 hsu->dma.device_terminate_all = hsu_dma_terminate_all; hsu_dma_probe()
463 hsu->dma.src_addr_widths = HSU_DMA_BUSWIDTHS; hsu_dma_probe()
464 hsu->dma.dst_addr_widths = HSU_DMA_BUSWIDTHS; hsu_dma_probe()
465 hsu->dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); hsu_dma_probe()
466 hsu->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; hsu_dma_probe()
468 hsu->dma.dev = chip->dev; hsu_dma_probe()
470 ret = dma_async_device_register(&hsu->dma); hsu_dma_probe()
484 dma_async_device_unregister(&hsu->dma); hsu_dma_remove()
H A Dhsu.h17 #include <linux/dma/hsu.h>
19 #include "../virt-dma.h"
110 struct dma_device dma; member in struct:hsu_dma
118 return container_of(ddev, struct hsu_dma, dma); to_hsu_dma()
/linux-4.1.27/include/linux/soc/ti/
H A Dknav_qmss.h27 #include <linux/dma-mapping.h>
75 int knav_queue_push(void *qhandle, dma_addr_t dma,
85 dma_addr_t *dma, unsigned *dma_sz);
86 void *knav_pool_desc_unmap(void *ph, dma_addr_t dma, unsigned dma_sz);
88 void *knav_pool_desc_dma_to_virt(void *ph, dma_addr_t dma);
/linux-4.1.27/drivers/spi/
H A Dspi-topcliff-pch.c192 struct pch_spi_dma_ctrl dma; member in struct:pch_spi_data
764 rx_dma_buf = data->dma.rx_buf_virt; pch_spi_copy_rx_data_for_dma()
770 rx_dma_sbuf = data->dma.rx_buf_virt; pch_spi_copy_rx_data_for_dma()
779 struct pch_spi_dma_ctrl *dma; pch_spi_start_transfer() local
783 dma = &data->dma; pch_spi_start_transfer()
803 dma_sync_sg_for_cpu(&data->master->dev, dma->sg_rx_p, dma->nent, pch_spi_start_transfer()
806 dma_sync_sg_for_cpu(&data->master->dev, dma->sg_tx_p, dma->nent, pch_spi_start_transfer()
808 memset(data->dma.tx_buf_virt, 0, PAGE_SIZE); pch_spi_start_transfer()
810 async_tx_ack(dma->desc_rx); pch_spi_start_transfer()
811 async_tx_ack(dma->desc_tx); pch_spi_start_transfer()
812 kfree(dma->sg_tx_p); pch_spi_start_transfer()
813 kfree(dma->sg_rx_p); pch_spi_start_transfer()
860 struct pch_spi_dma_ctrl *dma; pch_spi_request_dma() local
868 dma = &data->dma; pch_spi_request_dma()
877 param = &dma->param_tx; pch_spi_request_dma()
889 dma->chan_tx = chan; pch_spi_request_dma()
892 param = &dma->param_rx; pch_spi_request_dma()
901 dma_release_channel(dma->chan_tx); pch_spi_request_dma()
902 dma->chan_tx = NULL; pch_spi_request_dma()
906 dma->chan_rx = chan; pch_spi_request_dma()
911 struct pch_spi_dma_ctrl *dma; pch_spi_release_dma() local
913 dma = &data->dma; pch_spi_release_dma()
914 if (dma->chan_tx) { pch_spi_release_dma()
915 dma_release_channel(dma->chan_tx); pch_spi_release_dma()
916 dma->chan_tx = NULL; pch_spi_release_dma()
918 if (dma->chan_rx) { pch_spi_release_dma()
919 dma_release_channel(dma->chan_rx); pch_spi_release_dma()
920 dma->chan_rx = NULL; pch_spi_release_dma()
940 struct pch_spi_dma_ctrl *dma; pch_spi_handle_dma() local
942 dma = &data->dma; pch_spi_handle_dma()
976 tx_dma_buf = dma->tx_buf_virt; pch_spi_handle_dma()
981 tx_dma_sbuf = dma->tx_buf_virt; pch_spi_handle_dma()
1015 dma->sg_rx_p = kzalloc(sizeof(struct scatterlist)*num, GFP_ATOMIC); pch_spi_handle_dma()
1016 sg_init_table(dma->sg_rx_p, num); /* Initialize SG table */ pch_spi_handle_dma()
1018 sg = dma->sg_rx_p; pch_spi_handle_dma()
1023 sg_set_page(sg, virt_to_page(dma->rx_buf_virt), rem, pch_spi_handle_dma()
1029 sg_set_page(sg, virt_to_page(dma->rx_buf_virt), size, pch_spi_handle_dma()
1035 sg_set_page(sg, virt_to_page(dma->rx_buf_virt), size, pch_spi_handle_dma()
1039 sg_dma_address(sg) = dma->rx_buf_dma + sg->offset; pch_spi_handle_dma()
1041 sg = dma->sg_rx_p; pch_spi_handle_dma()
1042 desc_rx = dmaengine_prep_slave_sg(dma->chan_rx, sg, pch_spi_handle_dma()
1053 dma->nent = num; pch_spi_handle_dma()
1054 dma->desc_rx = desc_rx; pch_spi_handle_dma()
1075 dma->sg_tx_p = kzalloc(sizeof(struct scatterlist)*num, GFP_ATOMIC); pch_spi_handle_dma()
1076 sg_init_table(dma->sg_tx_p, num); /* Initialize SG table */ pch_spi_handle_dma()
1078 sg = dma->sg_tx_p; pch_spi_handle_dma()
1082 sg_set_page(sg, virt_to_page(dma->tx_buf_virt), size + head, pch_spi_handle_dma()
1088 sg_set_page(sg, virt_to_page(dma->tx_buf_virt), rem, pch_spi_handle_dma()
1094 sg_set_page(sg, virt_to_page(dma->tx_buf_virt), size, pch_spi_handle_dma()
1098 sg_dma_address(sg) = dma->tx_buf_dma + sg->offset; pch_spi_handle_dma()
1100 sg = dma->sg_tx_p; pch_spi_handle_dma()
1101 desc_tx = dmaengine_prep_slave_sg(dma->chan_tx, pch_spi_handle_dma()
1112 dma->nent = num; pch_spi_handle_dma()
1113 dma->desc_tx = desc_tx; pch_spi_handle_dma()
1317 struct pch_spi_dma_ctrl *dma; pch_free_dma_buf() local
1319 dma = &data->dma; pch_free_dma_buf()
1320 if (dma->tx_buf_dma) pch_free_dma_buf()
1322 dma->tx_buf_virt, dma->tx_buf_dma); pch_free_dma_buf()
1323 if (dma->rx_buf_dma) pch_free_dma_buf()
1325 dma->rx_buf_virt, dma->rx_buf_dma); pch_free_dma_buf()
1332 struct pch_spi_dma_ctrl *dma; pch_alloc_dma_buf() local
1334 dma = &data->dma; pch_alloc_dma_buf()
1336 dma->tx_buf_virt = dma_alloc_coherent(&board_dat->pdev->dev, pch_alloc_dma_buf()
1337 PCH_BUF_SIZE, &dma->tx_buf_dma, GFP_KERNEL); pch_alloc_dma_buf()
1339 dma->rx_buf_virt = dma_alloc_coherent(&board_dat->pdev->dev, pch_alloc_dma_buf()
1340 PCH_BUF_SIZE, &dma->rx_buf_dma, GFP_KERNEL); pch_alloc_dma_buf()
/linux-4.1.27/drivers/crypto/ccp/
H A Dccp-ops.c50 struct ccp_dma_info dma; member in struct:ccp_dm_workarea
76 struct ccp_dma_info dma; member in union:ccp_mem::__anon3807
273 cr[1] = op->src.u.dma.length - 1; ccp_perform_aes()
274 cr[2] = ccp_addr_lo(&op->src.u.dma); ccp_perform_aes()
277 | ccp_addr_hi(&op->src.u.dma); ccp_perform_aes()
278 cr[4] = ccp_addr_lo(&op->dst.u.dma); ccp_perform_aes()
280 | ccp_addr_hi(&op->dst.u.dma); ccp_perform_aes()
303 cr[1] = op->src.u.dma.length - 1; ccp_perform_xts_aes()
304 cr[2] = ccp_addr_lo(&op->src.u.dma); ccp_perform_xts_aes()
307 | ccp_addr_hi(&op->src.u.dma); ccp_perform_xts_aes()
308 cr[4] = ccp_addr_lo(&op->dst.u.dma); ccp_perform_xts_aes()
310 | ccp_addr_hi(&op->dst.u.dma); ccp_perform_xts_aes()
329 cr[1] = op->src.u.dma.length - 1; ccp_perform_sha()
330 cr[2] = ccp_addr_lo(&op->src.u.dma); ccp_perform_sha()
333 | ccp_addr_hi(&op->src.u.dma); ccp_perform_sha()
357 cr[2] = ccp_addr_lo(&op->src.u.dma); ccp_perform_rsa()
360 | ccp_addr_hi(&op->src.u.dma); ccp_perform_rsa()
361 cr[4] = ccp_addr_lo(&op->dst.u.dma); ccp_perform_rsa()
363 | ccp_addr_hi(&op->dst.u.dma); ccp_perform_rsa()
378 cr[1] = op->src.u.dma.length - 1; ccp_perform_passthru()
380 cr[1] = op->dst.u.dma.length - 1; ccp_perform_passthru()
383 cr[2] = ccp_addr_lo(&op->src.u.dma); ccp_perform_passthru()
385 | ccp_addr_hi(&op->src.u.dma); ccp_perform_passthru()
395 cr[4] = ccp_addr_lo(&op->dst.u.dma); ccp_perform_passthru()
397 | ccp_addr_hi(&op->dst.u.dma); ccp_perform_passthru()
418 cr[1] = op->src.u.dma.length - 1; ccp_perform_ecc()
419 cr[2] = ccp_addr_lo(&op->src.u.dma); ccp_perform_ecc()
421 | ccp_addr_hi(&op->src.u.dma); ccp_perform_ecc()
422 cr[4] = ccp_addr_lo(&op->dst.u.dma); ccp_perform_ecc()
424 | ccp_addr_hi(&op->dst.u.dma); ccp_perform_ecc()
540 wa->dma.address); ccp_dm_free()
542 if (wa->dma.address) ccp_dm_free()
543 dma_unmap_single(wa->dev, wa->dma.address, wa->length, ccp_dm_free()
544 wa->dma.dir); ccp_dm_free()
549 wa->dma.address = 0; ccp_dm_free()
569 &wa->dma.address); ccp_init_dm_workarea()
573 wa->dma.length = CCP_DMAPOOL_MAX_SIZE; ccp_init_dm_workarea()
581 wa->dma.address = dma_map_single(wa->dev, wa->address, len, ccp_init_dm_workarea()
583 if (!wa->dma.address) ccp_init_dm_workarea()
586 wa->dma.length = len; ccp_init_dm_workarea()
588 wa->dma.dir = dir; ccp_init_dm_workarea()
755 * because the dma length is an unsigned int. ccp_prepare_data()
784 op->src.u.dma.address = src->dm_wa.dma.address; ccp_prepare_data()
785 op->src.u.dma.offset = 0; ccp_prepare_data()
786 op->src.u.dma.length = (blocksize_op) ? block_size : cp_len; ccp_prepare_data()
791 op->src.u.dma.address = sg_dma_address(src->sg_wa.sg); ccp_prepare_data()
792 op->src.u.dma.offset = src->sg_wa.sg_used; ccp_prepare_data()
793 op->src.u.dma.length = op_len & ~(block_size - 1); ccp_prepare_data()
795 ccp_update_sg_workarea(&src->sg_wa, op->src.u.dma.length); ccp_prepare_data()
805 op->dst.u.dma.address = dst->dm_wa.dma.address; ccp_prepare_data()
806 op->dst.u.dma.offset = 0; ccp_prepare_data()
807 op->dst.u.dma.length = op->src.u.dma.length; ccp_prepare_data()
812 op->dst.u.dma.address = sg_dma_address(dst->sg_wa.sg); ccp_prepare_data()
813 op->dst.u.dma.offset = dst->sg_wa.sg_used; ccp_prepare_data()
814 op->dst.u.dma.length = op->src.u.dma.length; ccp_prepare_data()
825 if (op->dst.u.dma.address == dst->dm_wa.dma.address) ccp_process_data()
829 op->dst.u.dma.length); ccp_process_data()
850 op.dst.u.dma.address = wa->dma.address; ccp_copy_to_from_ksb()
851 op.dst.u.dma.length = wa->length; ccp_copy_to_from_ksb()
854 op.src.u.dma.address = wa->dma.address; ccp_copy_to_from_ksb()
855 op.src.u.dma.length = wa->length; ccp_copy_to_from_ksb()
1121 * operations we need to set the dma direction to BIDIRECTIONAL ccp_run_aes_cmd()
1294 * operations we need to set the dma direction to BIDIRECTIONAL ccp_run_xts_aes_cmd()
1639 op.src.u.dma.address = src.dma.address; ccp_run_rsa_cmd()
1640 op.src.u.dma.offset = 0; ccp_run_rsa_cmd()
1641 op.src.u.dma.length = i_len; ccp_run_rsa_cmd()
1642 op.dst.u.dma.address = dst.dm_wa.dma.address; ccp_run_rsa_cmd()
1643 op.dst.u.dma.offset = 0; ccp_run_rsa_cmd()
1644 op.dst.u.dma.length = o_len; ccp_run_rsa_cmd()
1723 * operations we need to set the dma direction to BIDIRECTIONAL ccp_run_passthru_cmd()
1746 * dma address at a time, each entry in the source scatterlist ccp_run_passthru_cmd()
1765 op.src.u.dma.address = sg_dma_address(src.sg_wa.sg); ccp_run_passthru_cmd()
1766 op.src.u.dma.offset = 0; ccp_run_passthru_cmd()
1767 op.src.u.dma.length = sg_dma_len(src.sg_wa.sg); ccp_run_passthru_cmd()
1770 op.dst.u.dma.address = sg_dma_address(dst.sg_wa.sg); ccp_run_passthru_cmd()
1771 op.dst.u.dma.offset = dst.sg_wa.sg_used; ccp_run_passthru_cmd()
1772 op.dst.u.dma.length = op.src.u.dma.length; ccp_run_passthru_cmd()
1871 op.src.u.dma.address = src.dma.address; ccp_run_ecc_mm_cmd()
1872 op.src.u.dma.offset = 0; ccp_run_ecc_mm_cmd()
1873 op.src.u.dma.length = src.length; ccp_run_ecc_mm_cmd()
1874 op.dst.u.dma.address = dst.dma.address; ccp_run_ecc_mm_cmd()
1875 op.dst.u.dma.offset = 0; ccp_run_ecc_mm_cmd()
1876 op.dst.u.dma.length = dst.length; ccp_run_ecc_mm_cmd()
2020 op.src.u.dma.address = src.dma.address; ccp_run_ecc_pm_cmd()
2021 op.src.u.dma.offset = 0; ccp_run_ecc_pm_cmd()
2022 op.src.u.dma.length = src.length; ccp_run_ecc_pm_cmd()
2023 op.dst.u.dma.address = dst.dma.address; ccp_run_ecc_pm_cmd()
2024 op.dst.u.dma.offset = 0; ccp_run_ecc_pm_cmd()
2025 op.dst.u.dma.length = dst.length; ccp_run_ecc_pm_cmd()
/linux-4.1.27/drivers/mtd/nand/
H A Dr852.h66 /* dma capabilities */
70 #define R852_DMA1 0x40 /* if set w/bit #7, dma is supported */
71 #define R852_DMA2 0x80 /* if set w/bit #6, dma is supported */
78 /* dma settings */
84 /* dma IRQ status */
87 /* dma IRQ enable */
115 /* dma area */
127 int dma_error; /* dma errors */
128 int dma_usable; /* is it possible to use dma */
/linux-4.1.27/drivers/pnp/
H A Dsupport.c71 return "dma"; pnp_resource_type_name()
111 struct pnp_dma *dma; dbg_pnp_show_option() local
161 dma = &option->u.dma; dbg_pnp_show_option()
162 len += scnprintf(buf + len, sizeof(buf) - len, "dma"); dbg_pnp_show_option()
163 if (!dma->map) dbg_pnp_show_option()
168 if (dma->map & (1 << i)) dbg_pnp_show_option()
174 "flags %#x", dma->map, dma->flags); dbg_pnp_show_option()
/linux-4.1.27/drivers/scsi/
H A Dsgiwd93.c17 #include <linux/dma-mapping.h>
36 dma_addr_t dma; member in struct:ip22_hostdata
48 /* space for hpc dma descriptors */
93 * byte, we tag on an extra zero length dma descriptor at the end of fill_hpc_entries()
116 * obvious). IMHO a better fix would be, not to do these dma setups dma_setup()
127 hregs->ndptr = hdata->dma; dma_setup()
179 dma_addr_t dma = hdata->dma; init_hpc_chain() local
185 hcp->desc.pnext = (u32) (dma + sizeof(struct hpc_chunk)); init_hpc_chain()
188 dma += sizeof(struct hpc_chunk); init_hpc_chain()
192 hcp->desc.pnext = hdata->dma; init_hpc_chain()
253 &hdata->dma, GFP_KERNEL); sgiwd93_probe()
292 dma_free_noncoherent(&pdev->dev, HPC_DMA_SIZE, hdata->cpu, hdata->dma); sgiwd93_probe()
308 dma_free_noncoherent(&pdev->dev, HPC_DMA_SIZE, hdata->cpu, hdata->dma); sgiwd93_remove()
H A Dmac53c94.c46 struct dbdma_regs __iomem *dma; member in struct:fsc_state
108 struct dbdma_regs __iomem *dma = state->dma; mac53c94_host_reset() local
113 writel((RUN|PAUSE|FLUSH|WAKE) << 16, &dma->control); mac53c94_host_reset()
128 struct dbdma_regs __iomem *dma = state->dma; mac53c94_init() local
139 writel((RUN|PAUSE|FLUSH|WAKE) << 16, &dma->control); mac53c94_init()
197 struct dbdma_regs __iomem *dma = state->dma; mac53c94_interrupt() local
219 writel(RUN << 16, &dma->control); /* stop dma */ mac53c94_interrupt()
275 writel(virt_to_phys(state->dma_cmds), &dma->cmdptr); mac53c94_interrupt()
276 writel((RUN << 16) | RUN, &dma->control); mac53c94_interrupt()
314 writel(RUN << 16, &dma->control); /* stop dma */ mac53c94_interrupt()
316 /* should check dma status */ mac53c94_interrupt()
448 state->dma = (struct dbdma_regs __iomem *) mac53c94_probe()
451 if (state->regs == NULL || state->dma == NULL) { mac53c94_probe()
465 /* Space for dma command list: +1 for stop command, mac53c94_probe()
472 printk(KERN_ERR "mac53c94: couldn't allocate dma " mac53c94_probe()
502 if (state->dma != NULL) mac53c94_probe()
503 iounmap(state->dma); mac53c94_probe()
524 if (fp->dma) mac53c94_remove()
525 iounmap(fp->dma); mac53c94_remove()
/linux-4.1.27/drivers/net/ethernet/broadcom/
H A Dbcm63xx_enet.h19 /* maximum burst len for dma (4 bytes unit) */
207 /* hw view of rx & tx dma ring */
211 /* allocated size (in bytes) for rx & tx dma ring */
218 /* dma channel id for rx */
221 /* number of dma desc in rx ring */
224 /* cpu view of rx dma ring */
250 /* dma channel id for tx */
253 /* number of dma desc in tx ring */
256 /* maximum dma burst size */
259 /* cpu view of rx dma ring */
343 /* dma channel enable mask */
346 /* dma channel interrupt mask */
352 /* dma channel width */
355 /* dma descriptor shift value */
/linux-4.1.27/arch/microblaze/include/asm/
H A Ddma-mapping.h2 * Implements the generic device dma API for microblaze and the pci
11 * This file is base on powerpc and x86 dma-mapping.h versions
27 #include <linux/dma-debug.h>
28 #include <linux/dma-attrs.h>
30 #include <asm-generic/dma-coherent.h>
73 #include <asm-generic/dma-mapping-common.h>
H A Ddma.h13 /* we don't have dma address limit. define it as zero to be
/linux-4.1.27/arch/sh/include/asm/
H A Ddma.h2 * include/asm-sh/dma.h
18 #include <asm-generic/dma.h>
111 /* arch/sh/drivers/dma/dma-api.c */
140 /* arch/sh/drivers/dma/dma-sysfs.c */
/linux-4.1.27/kernel/
H A Ddma.c2 * linux/kernel/dma.c: A DMA channel allocator. Inspired by linux/kernel/irq.c.
6 * 1994/12/26: Changes by Alex Nash to fix a minor bug in /proc/dma.
10 * assumption introduced because of those /proc/dma patches. -- Hennus]
20 #include <asm/dma.h>
67 * @device_id: reserving device ID string, used in /proc/dma
151 proc_create("dma", 0, NULL, &proc_dma_operations); proc_dma_init()
/linux-4.1.27/samples/kfifo/
H A Ddma-example.c2 * Sample fifo dma implementation
15 * This module shows how to handle fifo dma operations.
85 /* put here your code to setup and exectute the dma operation */ example_init()
91 /* finish the dma operation and update the received data */ example_init()
114 /* put here your code to setup and exectute the dma operation */ example_init()
120 /* finish the dma operation and update the transmitted data */ example_init()
/linux-4.1.27/drivers/net/wan/
H A Dhostess_sv11.c40 #include <asm/dma.h>
44 static int dma; variable
86 switch (dma) { hostess_open()
103 switch (dma) { hostess_open()
137 switch (dma) { hostess_close()
234 if (dma) { sv11_init()
245 if (dma == 1) sv11_init()
296 if (dma == 1) sv11_init()
299 if (dma) sv11_init()
315 if (dma) { sv11_shutdown()
316 if (dma == 1) sv11_shutdown()
330 module_param(dma, int, 0);
331 MODULE_PARM_DESC(dma, "Set this to 1 to use DMA1/DMA3 for TX/RX");
/linux-4.1.27/drivers/vfio/
H A Dvfio_iommu_type1.c96 struct vfio_dma *dma = rb_entry(node, struct vfio_dma, node); vfio_find_dma() local
98 if (start + size <= dma->iova) vfio_find_dma()
100 else if (start >= dma->iova + dma->size) vfio_find_dma()
103 return dma; vfio_find_dma()
112 struct vfio_dma *dma; vfio_link_dma() local
116 dma = rb_entry(parent, struct vfio_dma, node); vfio_link_dma()
118 if (new->iova + new->size <= dma->iova) vfio_link_dma()
336 static void vfio_unmap_unpin(struct vfio_iommu *iommu, struct vfio_dma *dma) vfio_unmap_unpin() argument
338 dma_addr_t iova = dma->iova, end = dma->iova + dma->size; vfio_unmap_unpin()
342 if (!dma->size) vfio_unmap_unpin()
355 iommu_unmap(d->domain, dma->iova, dma->size); vfio_unmap_unpin()
387 dma->prot, false); vfio_unmap_unpin()
396 static void vfio_remove_dma(struct vfio_iommu *iommu, struct vfio_dma *dma) vfio_remove_dma() argument
398 vfio_unmap_unpin(iommu, dma); vfio_remove_dma()
399 vfio_unlink_dma(iommu, dma); vfio_remove_dma()
400 kfree(dma); vfio_remove_dma()
420 struct vfio_dma *dma; vfio_dma_do_unmap() local
467 dma = vfio_find_dma(iommu, unmap->iova, 0); vfio_dma_do_unmap()
468 if (dma && dma->iova != unmap->iova) { vfio_dma_do_unmap()
472 dma = vfio_find_dma(iommu, unmap->iova + unmap->size - 1, 0); vfio_dma_do_unmap()
473 if (dma && dma->iova + dma->size != unmap->iova + unmap->size) { vfio_dma_do_unmap()
479 while ((dma = vfio_find_dma(iommu, unmap->iova, unmap->size))) { vfio_dma_do_unmap()
480 if (!iommu->v2 && unmap->iova > dma->iova) vfio_dma_do_unmap()
482 unmapped += dma->size; vfio_dma_do_unmap()
483 vfio_remove_dma(iommu, dma); vfio_dma_do_unmap()
557 struct vfio_dma *dma; vfio_dma_do_map() local
588 dma = kzalloc(sizeof(*dma), GFP_KERNEL); vfio_dma_do_map()
589 if (!dma) { vfio_dma_do_map()
594 dma->iova = iova; vfio_dma_do_map()
595 dma->vaddr = vaddr; vfio_dma_do_map()
596 dma->prot = prot; vfio_dma_do_map()
599 vfio_link_dma(iommu, dma); vfio_dma_do_map()
603 npage = vfio_pin_pages(vaddr + dma->size, vfio_dma_do_map()
612 ret = vfio_iommu_map(iommu, iova + dma->size, pfn, npage, prot); vfio_dma_do_map()
619 dma->size += npage << PAGE_SHIFT; vfio_dma_do_map()
623 vfio_remove_dma(iommu, dma); vfio_dma_do_map()
657 struct vfio_dma *dma; vfio_iommu_replay() local
660 dma = rb_entry(n, struct vfio_dma, node); vfio_iommu_replay()
661 iova = dma->iova; vfio_iommu_replay()
663 while (iova < dma->iova + dma->size) { vfio_iommu_replay()
674 while (iova + size < dma->iova + dma->size && vfio_iommu_replay()
680 size, dma->prot | domain->prot); vfio_iommu_replay()
/linux-4.1.27/drivers/dma/
H A Dsirf-dma.c11 #include <linux/dma-mapping.h>
84 struct dma_device dma; member in struct:sirfsoc_dma
204 for (i = 0; i < sdma->dma.chancnt; i++) { sirfsoc_dma_process_completed()
386 pm_runtime_get_sync(sdma->dma.dev); sirfsoc_dma_alloc_chan_resources()
392 dev_notice(sdma->dma.dev, "Memory allocation error. " sirfsoc_dma_alloc_chan_resources()
442 pm_runtime_put(sdma->dma.dev); sirfsoc_dma_free_chan_resources()
647 struct dma_device *dma; sirfsoc_dma_probe() local
704 dma = &sdma->dma; sirfsoc_dma_probe()
705 dma->dev = dev; sirfsoc_dma_probe()
707 dma->device_alloc_chan_resources = sirfsoc_dma_alloc_chan_resources; sirfsoc_dma_probe()
708 dma->device_free_chan_resources = sirfsoc_dma_free_chan_resources; sirfsoc_dma_probe()
709 dma->device_issue_pending = sirfsoc_dma_issue_pending; sirfsoc_dma_probe()
710 dma->device_config = sirfsoc_dma_slave_config; sirfsoc_dma_probe()
711 dma->device_pause = sirfsoc_dma_pause_chan; sirfsoc_dma_probe()
712 dma->device_resume = sirfsoc_dma_resume_chan; sirfsoc_dma_probe()
713 dma->device_terminate_all = sirfsoc_dma_terminate_all; sirfsoc_dma_probe()
714 dma->device_tx_status = sirfsoc_dma_tx_status; sirfsoc_dma_probe()
715 dma->device_prep_interleaved_dma = sirfsoc_dma_prep_interleaved; sirfsoc_dma_probe()
716 dma->device_prep_dma_cyclic = sirfsoc_dma_prep_cyclic; sirfsoc_dma_probe()
717 dma->src_addr_widths = SIRFSOC_DMA_BUSWIDTHS; sirfsoc_dma_probe()
718 dma->dst_addr_widths = SIRFSOC_DMA_BUSWIDTHS; sirfsoc_dma_probe()
719 dma->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); sirfsoc_dma_probe()
721 INIT_LIST_HEAD(&dma->channels); sirfsoc_dma_probe()
722 dma_cap_set(DMA_SLAVE, dma->cap_mask); sirfsoc_dma_probe()
723 dma_cap_set(DMA_CYCLIC, dma->cap_mask); sirfsoc_dma_probe()
724 dma_cap_set(DMA_INTERLEAVE, dma->cap_mask); sirfsoc_dma_probe()
725 dma_cap_set(DMA_PRIVATE, dma->cap_mask); sirfsoc_dma_probe()
730 schan->chan.device = dma; sirfsoc_dma_probe()
740 list_add_tail(&schan->chan.device_node, &dma->channels); sirfsoc_dma_probe()
748 ret = dma_async_device_register(dma); sirfsoc_dma_probe()
765 dma_async_device_unregister(dma); sirfsoc_dma_probe()
779 dma_async_device_unregister(&sdma->dma); sirfsoc_dma_remove()
/linux-4.1.27/drivers/usb/gadget/udc/
H A Dnet2280.c43 #include <linux/dma-mapping.h>
262 if (!ep->dma) { /* pio, per-packet */ net2280_enable()
270 } else { /* dma, per-request */ net2280_enable()
275 /* for short OUT transfers, dma completions can't net2280_enable()
291 ep->dma ? "dma" : "pio", max); net2280_enable()
332 /* disable the dma, irqs, endpoint... */ ep_reset_228x()
333 if (ep->dma) { ep_reset_228x()
334 writel(0, &ep->dma->dmactl); ep_reset_228x()
338 &ep->dma->dmastat); ep_reset_228x()
408 /* disable the dma, irqs, endpoint... */ ep_reset_338x()
409 if (ep->dma) { ep_reset_338x()
410 writel(0, &ep->dma->dmactl); ep_reset_338x()
416 &ep->dma->dmastat); ep_reset_338x()
418 dmastat = readl(&ep->dma->dmastat); ep_reset_338x()
422 writel(0x5a, &ep->dma->dmastat); ep_reset_338x()
467 ep->dma ? "dma" : "pio", _ep->name); net2280_disable()
472 if (!ep->dma && ep->num >= 1 && ep->num <= 4) net2280_disable()
473 ep->dma = &ep->dev->dma[ep->num - 1]; net2280_disable()
499 /* this dma descriptor may be swapped with the previous dummy */ net2280_alloc_request()
500 if (ep->dma) { net2280_alloc_request()
542 * one packet. ep-a..ep-d should use dma instead.
731 /* fill out dma descriptor to match a given request */ fill_dma_desc()
755 td->dmaaddr = cpu_to_le32 (req->req.dma); fill_dma_desc()
757 /* 2280 may be polling VALID_BIT through ep->dma->dmadesc */ fill_dma_desc()
773 static inline void spin_stop_dma(struct net2280_dma_regs __iomem *dma) spin_stop_dma() argument
775 handshake(&dma->dmactl, BIT(DMA_ENABLE), 0, 50); spin_stop_dma()
778 static inline void stop_dma(struct net2280_dma_regs __iomem *dma) stop_dma() argument
780 writel(readl(&dma->dmactl) & ~BIT(DMA_ENABLE), &dma->dmactl); stop_dma()
781 spin_stop_dma(dma); stop_dma()
786 struct net2280_dma_regs __iomem *dma = ep->dma; start_queue() local
792 writel(tmp, &dma->dmacount); start_queue()
793 writel(readl(&dma->dmastat), &dma->dmastat); start_queue()
795 writel(td_dma, &dma->dmadesc); start_queue()
798 writel(dmactl, &dma->dmactl); start_queue()
803 writel(BIT(DMA_START), &dma->dmastat); start_queue()
812 struct net2280_dma_regs __iomem *dma = ep->dma; start_dma() local
816 /* on this path we "know" there's no dma active (yet) */ start_dma()
817 WARN_ON(readl(&dma->dmactl) & BIT(DMA_ENABLE)); start_dma()
818 writel(0, &ep->dma->dmactl); start_dma()
828 writel(readl(&dma->dmastat), &dma->dmastat); start_dma()
831 writel(req->req.dma, &dma->dmaaddr); start_dma()
834 /* dma irq, faking scatterlist status */ start_dma()
837 &dma->dmacount); start_dma()
841 writel(BIT(DMA_ENABLE), &dma->dmactl); start_dma()
842 writel(BIT(DMA_START), &dma->dmastat); start_dma()
849 /* force packet boundaries between dma requests, but prevent the start_dma()
905 if (ep->dma) done()
957 if (ep->dma && _req->length == 0) { net2280_queue()
962 /* set up dma mapping in case the caller didn't */ net2280_queue()
963 if (ep->dma) { net2280_queue()
980 !((dev->quirks & PLX_SUPERSPEED) && ep->dma && net2280_queue()
984 if (ep->dma) net2280_queue()
1031 } else if (ep->dma) { net2280_queue()
1095 tmp = readl(&ep->dma->dmacount); scan_dma_completions()
1120 "%s dma, discard %d len %d\n", scan_dma_completions()
1147 writel(BIT(DMA_ABORT), &ep->dma->dmastat); abort_dma()
1148 spin_stop_dma(ep->dma); abort_dma()
1150 stop_dma(ep->dma); abort_dma()
1161 if (ep->dma) nuke()
1190 /* quiesce dma while we patch the queue */ net2280_dequeue()
1193 if (ep->dma) { net2280_dequeue()
1194 dmactl = readl(&ep->dma->dmactl); net2280_dequeue()
1196 stop_dma(ep->dma); net2280_dequeue()
1214 if (ep->dma) { net2280_dequeue()
1215 ep_dbg(ep->dev, "unlink (%s) dma\n", _ep->name); net2280_dequeue()
1222 readl(&ep->dma->dmacount), net2280_dequeue()
1236 if (ep->dma) { net2280_dequeue()
1237 /* turn off dma on inactive queues */ net2280_dequeue()
1239 stop_dma(ep->dma); net2280_dequeue()
1243 writel(dmactl, &ep->dma->dmactl); net2280_dequeue()
1624 if (!ep->dma) registers_show()
1628 " dma\tctl %08x stat %08x count %08x\n" registers_show()
1630 readl(&ep->dma->dmactl), registers_show()
1631 readl(&ep->dma->dmastat), registers_show()
1632 readl(&ep->dma->dmacount), registers_show()
1633 readl(&ep->dma->dmaaddr), registers_show()
1634 readl(&ep->dma->dmadesc)); registers_show()
1699 ep->dma ? "dma" : "pio", ep->fifo_size queues_show()
1718 if (ep->dma && req->td_dma == readl(&ep->dma->dmadesc)) queues_show()
1724 readl(&ep->dma->dmacount)); queues_show()
1735 if (ep->dma) { queues_show()
1769 /* another driver-specific mode might be a request type doing dma
1937 /* clear old dma and irq state */ usb_reset_228x()
1940 if (ep->dma) usb_reset_228x()
1975 /* clear old dma and irq state */ usb_reset_338x()
1979 if (ep->dma) usb_reset_338x()
2025 ep->dma = &dev->dma[tmp - 1]; usb_reinit_228x()
2064 ep->dma = &dev->dma[i - 1]; usb_reinit_338x()
2345 * also works for dma-capable endpoints, in pio mode or just
2432 if (likely(ep->dma)) { handle_ep_small()
2444 /* any preceding dma transfers must finish. handle_ep_small()
2445 * dma handles (M >= N), may empty the queue handle_ep_small()
2460 count = readl(&ep->dma->dmacount); handle_ep_small()
2462 if (readl(&ep->dma->dmadesc) handle_ep_small()
2471 writel(BIT(DMA_ABORT), &ep->dma->dmastat); handle_ep_small()
2472 spin_stop_dma(ep->dma); handle_ep_small()
2491 /* (re)start dma if needed, stop NAKing */ handle_ep_small()
2496 ep_dbg(ep->dev, "%s dma ep_stat %08x ??\n", handle_ep_small()
2834 if (ep->dma) handle_stat0_irqs_superspeed()
3062 if ((dev->quirks & PLX_SUPERSPEED) && e->dma) handle_stat0_irqs()
3237 struct net2280_dma_regs __iomem *dma; variable in typeref:struct:__iomem
3245 dma = ep->dma;
3247 if (!dma)
3250 /* clear ep's dma status */
3251 tmp = readl(&dma->dmastat);
3252 writel(tmp, &dma->dmastat);
3254 /* dma sync*/
3256 u32 r_dmacount = readl(&dma->dmacount);
3267 stop_dma(ep->dma);
3281 /* disable dma on inactive queues; else maybe restart */
3283 tmp = readl(&dma->dmactl);
3293 ep_err(dev, "pci dma error; stat %08x\n", stat);
3318 /* handle disconnect, dma, and more */ net2280_irq()
3445 dev->dma = (struct net2280_dma_regs __iomem *) (base + 0x0180); net2280_probe()
3506 /* NOTE: we know only the 32 LSBs of dma addresses may be nonzero */ net2280_probe()
/linux-4.1.27/drivers/media/pci/cx88/
H A Dcx88-vbi.c54 VBI_LINE_LENGTH, buf->risc.dma); cx8800_start_vbi_dma()
71 /* start dma */ cx8800_start_vbi_dma()
82 /* stop dma */ cx8800_stop_vbi_dma()
102 dprintk(2,"restart_queue [%p/%d]: restart dma\n", cx8800_restart_vbi_queue()
159 pci_free_consistent(dev->pci, risc->size, risc->cpu, risc->dma); buffer_finish()
171 buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 8); buffer_queue()
173 buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 8); buffer_queue()
187 prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma); buffer_queue()
/linux-4.1.27/drivers/gpu/drm/imx/
H A Dipuv3-plane.h24 int dma; member in struct:ipu_plane
36 int dma, int dp, unsigned int possible_crtcs,
/linux-4.1.27/arch/x86/include/asm/xen/
H A Dpage-coherent.h5 #include <linux/dma-attrs.h>
6 #include <linux/dma-mapping.h>
/linux-4.1.27/arch/arm/mach-s3c24xx/include/mach/
H A Ddma.h1 /* arch/arm/mach-s3c2410/include/mach/dma.h
18 /* We use `virtual` dma channels to hide the fact we have only a limited
/linux-4.1.27/arch/alpha/include/asm/
H A Ddma-mapping.h4 #include <linux/dma-attrs.h>
13 #include <asm-generic/dma-mapping-common.h>
/linux-4.1.27/drivers/media/platform/exynos4-is/
H A Dfimc-isp-video.c32 #include <media/videobuf2-dma-contig.h>
90 struct param_dma_output *dma = __get_isp_dma2(is); isp_video_capture_start_streaming() local
99 dma->cmd = DMA_OUTPUT_COMMAND_ENABLE; isp_video_capture_start_streaming()
100 dma->notify_dma_done = DMA_OUTPUT_NOTIFY_DMA_DONE_ENABLE; isp_video_capture_start_streaming()
101 dma->buffer_address = is->is_dma_p_region + isp_video_capture_start_streaming()
103 dma->buffer_number = video->reqbufs_count; isp_video_capture_start_streaming()
104 dma->dma_out_mask = video->buf_mask; isp_video_capture_start_streaming()
107 "buf_count: %d, planes: %d, dma addr table: %#x\n", isp_video_capture_start_streaming()
109 dma->buffer_address); isp_video_capture_start_streaming()
132 struct param_dma_output *dma = __get_isp_dma2(is); isp_video_capture_stop_streaming() local
139 dma->cmd = DMA_OUTPUT_COMMAND_DISABLE; isp_video_capture_stop_streaming()
140 dma->notify_dma_done = DMA_OUTPUT_NOTIFY_DMA_DONE_DISABLE; isp_video_capture_stop_streaming()
141 dma->buffer_number = 0; isp_video_capture_stop_streaming()
142 dma->buffer_address = 0; isp_video_capture_stop_streaming()
143 dma->dma_out_mask = 0; isp_video_capture_stop_streaming()
424 struct param_dma_output *dma = __get_isp_dma2(is); isp_video_s_fmt_mplane() local
431 dma->format = DMA_OUTPUT_FORMAT_BAYER; isp_video_s_fmt_mplane()
432 dma->order = DMA_OUTPUT_ORDER_GB_BG; isp_video_s_fmt_mplane()
433 dma->plane = ifmt->memplanes; isp_video_s_fmt_mplane()
434 dma->bitwidth = ifmt->depth[0]; isp_video_s_fmt_mplane()
435 dma->width = pixm->width; isp_video_s_fmt_mplane()
436 dma->height = pixm->height; isp_video_s_fmt_mplane()
/linux-4.1.27/drivers/usb/musb/
H A Dmusb_gadget.c43 #include <linux/dma-mapping.h>
54 /* Maps the buffer to dma */
60 struct dma_controller *dma = musb->dma_controller; map_dma_buffer() local
64 if (!is_dma_capable() || !musb_ep->dma) map_dma_buffer()
71 if (dma->is_compatible) map_dma_buffer()
72 compatible = dma->is_compatible(musb_ep->dma, map_dma_buffer()
78 if (request->request.dma == DMA_ADDR_INVALID) { map_dma_buffer()
93 request->request.dma = dma_addr; map_dma_buffer()
97 request->request.dma, map_dma_buffer()
106 /* Unmap the buffer from dma and maps it back to cpu */ unmap_dma_buffer()
112 if (!is_buffer_mapped(request) || !musb_ep->dma) unmap_dma_buffer()
115 if (request->request.dma == DMA_ADDR_INVALID) { unmap_dma_buffer()
122 request->request.dma, unmap_dma_buffer()
127 request->request.dma = DMA_ADDR_INVALID; unmap_dma_buffer()
130 request->request.dma, unmap_dma_buffer()
167 if (!dma_mapping_error(&musb->g.dev, request->dma))
198 if (is_dma_capable() && ep->dma) { nuke()
219 value = c->channel_abort(ep->dma); nuke()
222 c->channel_release(ep->dma); nuke()
223 ep->dma = NULL; nuke()
238 * from the usb core ... sequenced a bit differently from mentor dma.
275 if (dma_channel_status(musb_ep->dma) == MUSB_DMA_STATUS_BUSY) { txstate()
276 dev_dbg(musb->controller, "dma pending...\n"); txstate()
310 musb_ep->dma->max_len); txstate()
312 use_dma = (request->dma != DMA_ADDR_INVALID && request_size); txstate()
319 musb_ep->dma->desired_mode = 0; txstate()
321 musb_ep->dma->desired_mode = 1; txstate()
324 musb_ep->dma, musb_ep->packet_sz, txstate()
325 musb_ep->dma->desired_mode, txstate()
326 request->dma + request->actual, request_size); txstate()
328 if (musb_ep->dma->desired_mode == 0) { txstate()
382 * OK since the transfer dma glue (between CPPI and txstate()
394 musb_ep->dma, musb_ep->packet_sz, txstate()
396 request->dma + request->actual, txstate()
399 c->channel_release(musb_ep->dma); txstate()
400 musb_ep->dma = NULL; txstate()
407 musb_ep->dma, musb_ep->packet_sz, txstate()
409 request->dma + request->actual, txstate()
416 * Unmap the dma buffer back to cpu if dma channel txstate()
431 musb_ep->end_point.name, use_dma ? "dma" : "pio", txstate()
450 struct dma_channel *dma; musb_g_tx() local
459 dma = is_dma_capable() ? musb_ep->dma : NULL; musb_g_tx()
481 if (dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY) { musb_g_tx()
486 dev_dbg(musb->controller, "%s dma still busy?\n", musb_ep->end_point.name); musb_g_tx()
493 if (dma && (csr & MUSB_TXCSR_DMAENAB)) { musb_g_tx()
501 request->actual += musb_ep->dma->actual_len; musb_g_tx()
503 epnum, csr, musb_ep->dma->actual_len, request); musb_g_tx()
514 || (is_dma && (!dma->desired_mode || musb_g_tx()
587 if (dma_channel_status(musb_ep->dma) == MUSB_DMA_STATUS_BUSY) { rxstate()
600 struct dma_channel *channel = musb_ep->dma; rxstate()
610 request->dma + request->actual, rxstate()
648 channel = musb_ep->dma; rxstate()
691 musb_ep->dma->desired_mode = 1; rxstate()
701 musb_ep->dma->desired_mode = 0; rxstate()
708 request->dma rxstate()
724 channel = musb_ep->dma; rxstate()
747 musb_ep->dma->desired_mode = 0; rxstate()
749 musb_ep->dma->desired_mode = 1; rxstate()
758 request->dma rxstate()
777 struct dma_channel *channel = musb_ep->dma; rxstate()
778 u32 dma_addr = request->dma + request->actual; rxstate()
791 * Unmap the dma buffer back to cpu if dma channel rxstate()
838 struct dma_channel *dma; musb_g_rx() local
855 dma = is_dma_capable() ? musb_ep->dma : NULL; musb_g_rx()
858 csr, dma ? " (dma)" : "", request); musb_g_rx()
881 if (dma_channel_status(dma) == MUSB_DMA_STATUS_BUSY) { musb_g_rx()
888 if (dma && (csr & MUSB_RXCSR_DMAENAB)) { musb_g_rx()
895 request->actual += musb_ep->dma->actual_len; musb_g_rx()
897 dev_dbg(musb->controller, "RXCSR%d %04x, dma off, %04x, len %zu, req %p\n", musb_g_rx()
900 musb_ep->dma->actual_len, request); musb_g_rx()
905 if ((dma->desired_mode == 0 && !hw_ep->rx_double_buffered) musb_g_rx()
906 || (dma->actual_len musb_g_rx()
915 && (musb_ep->dma->actual_len musb_g_rx()
1108 musb_ep->dma = c->channel_alloc(c, hw_ep, musb_gadget_enable()
1111 musb_ep->dma = NULL; musb_gadget_enable()
1126 musb_ep->dma ? "dma, " : "", musb_gadget_enable()
1198 request->request.dma = DMA_ADDR_INVALID; musb_alloc_request()
1220 dma_addr_t dma; member in struct:free_record
1323 /* ... else abort the dma transfer ... */ musb_gadget_dequeue()
1324 else if (is_dma_capable() && musb_ep->dma) { musb_gadget_dequeue()
1329 status = c->channel_abort(musb_ep->dma); musb_gadget_dequeue()
/linux-4.1.27/sound/atmel/
H A Dabdac.c13 #include <linux/dma-mapping.h>
27 #include <linux/platform_data/dma-dw.h>
28 #include <linux/dma/dw.h>
94 struct atmel_abdac_dma dma; member in struct:atmel_abdac
121 struct dma_chan *chan = dac->dma.chan; atmel_abdac_prepare_dma()
148 dac->dma.cdesc = cdesc; atmel_abdac_prepare_dma()
208 dw_dma_cyclic_free(dac->dma.chan); atmel_abdac_hw_params()
217 dw_dma_cyclic_free(dac->dma.chan); atmel_abdac_hw_free()
246 retval = dw_dma_cyclic_start(dac->dma.chan); atmel_abdac_trigger()
254 dw_dma_cyclic_stop(dac->dma.chan); atmel_abdac_trigger()
275 bytes = dw_dma_get_src_addr(dac->dma.chan); atmel_abdac_pointer()
479 dac->dma.chan = dma_request_channel(mask, filter, &pdata->dws); atmel_abdac_probe()
480 if (dac->dma.chan) { atmel_abdac_probe()
490 dmaengine_slave_config(dac->dma.chan, &dma_conf); atmel_abdac_probe()
493 if (!pdata->dws.dma_dev || !dac->dma.chan) { atmel_abdac_probe()
518 dac->regs, dev_name(&dac->dma.chan->dev->device)); atmel_abdac_probe()
523 dma_release_channel(dac->dma.chan); atmel_abdac_probe()
524 dac->dma.chan = NULL; atmel_abdac_probe()
543 dw_dma_cyclic_stop(dac->dma.chan); atmel_abdac_suspend()
558 dw_dma_cyclic_start(dac->dma.chan); atmel_abdac_resume()
578 dma_release_channel(dac->dma.chan); atmel_abdac_remove()
579 dac->dma.chan = NULL; atmel_abdac_remove()
/linux-4.1.27/drivers/crypto/
H A Dhifn_795x.c29 #include <linux/dma-mapping.h>
111 #define HIFN_PUCTRL_DMAENA 0x0002 /* enable dma */
627 #define HIFN_MAX_SEGLEN 0xffff /* maximum dma segment len */
628 #define HIFN_MAX_DMALEN 0x3ffff /* maximum dma length */
932 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_init_dma() local
937 dma->cmdr[i].p = __cpu_to_le32(dptr + hifn_init_dma()
940 dma->resr[i].p = __cpu_to_le32(dptr + hifn_init_dma()
946 dma->cmdr[HIFN_D_CMD_RSIZE].p = __cpu_to_le32(dptr + hifn_init_dma()
948 dma->srcr[HIFN_D_SRC_RSIZE].p = __cpu_to_le32(dptr + hifn_init_dma()
950 dma->dstr[HIFN_D_DST_RSIZE].p = __cpu_to_le32(dptr + hifn_init_dma()
952 dma->resr[HIFN_D_RES_RSIZE].p = __cpu_to_le32(dptr + hifn_init_dma()
955 dma->cmdu = dma->srcu = dma->dstu = dma->resu = 0; hifn_init_dma()
956 dma->cmdi = dma->srci = dma->dsti = dma->resi = 0; hifn_init_dma()
957 dma->cmdk = dma->srck = dma->dstk = dma->resk = 0; hifn_init_dma()
1128 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_setup_crypto_command() local
1145 dma->cmdu++; hifn_setup_crypto_command()
1146 if (dma->cmdu > 1) { hifn_setup_crypto_command()
1169 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_setup_cmd_desc() local
1174 sa_idx = dma->cmdi; hifn_setup_cmd_desc()
1175 buf_pos = buf = dma->command_bufs[dma->cmdi]; hifn_setup_cmd_desc()
1262 dma->cmdr[dma->cmdi].l = __cpu_to_le32(cmd_len | HIFN_D_VALID | hifn_setup_cmd_desc()
1265 if (++dma->cmdi == HIFN_D_CMD_RSIZE) { hifn_setup_cmd_desc()
1266 dma->cmdr[dma->cmdi].l = __cpu_to_le32( hifn_setup_cmd_desc()
1269 dma->cmdi = 0; hifn_setup_cmd_desc()
1271 dma->cmdr[dma->cmdi-1].l |= __cpu_to_le32(HIFN_D_VALID); hifn_setup_cmd_desc()
1286 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_setup_src_desc() local
1292 idx = dma->srci; hifn_setup_src_desc()
1294 dma->srcr[idx].p = __cpu_to_le32(addr); hifn_setup_src_desc()
1295 dma->srcr[idx].l = __cpu_to_le32(size | HIFN_D_VALID | hifn_setup_src_desc()
1299 dma->srcr[idx].l = __cpu_to_le32(HIFN_D_VALID | hifn_setup_src_desc()
1305 dma->srci = idx; hifn_setup_src_desc()
1306 dma->srcu++; hifn_setup_src_desc()
1318 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_setup_res_desc() local
1320 dma->resr[dma->resi].l = __cpu_to_le32(HIFN_USED_RESULT | hifn_setup_res_desc()
1323 * dma->resr[dma->resi].l = __cpu_to_le32(HIFN_MAX_RESULT | HIFN_D_VALID | hifn_setup_res_desc()
1327 if (++dma->resi == HIFN_D_RES_RSIZE) { hifn_setup_res_desc()
1328 dma->resr[HIFN_D_RES_RSIZE].l = __cpu_to_le32(HIFN_D_VALID | hifn_setup_res_desc()
1330 dma->resi = 0; hifn_setup_res_desc()
1333 dma->resu++; hifn_setup_res_desc()
1344 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_setup_dst_desc() local
1350 idx = dma->dsti; hifn_setup_dst_desc()
1351 dma->dstr[idx].p = __cpu_to_le32(addr); hifn_setup_dst_desc()
1352 dma->dstr[idx].l = __cpu_to_le32(size | HIFN_D_VALID | hifn_setup_dst_desc()
1356 dma->dstr[idx].l = __cpu_to_le32(HIFN_D_VALID | hifn_setup_dst_desc()
1361 dma->dsti = idx; hifn_setup_dst_desc()
1362 dma->dstu++; hifn_setup_dst_desc()
1817 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_clear_rings() local
1823 dma->cmdi, dma->srci, dma->dsti, dma->resi, hifn_clear_rings()
1824 dma->cmdu, dma->srcu, dma->dstu, dma->resu, hifn_clear_rings()
1825 dma->cmdk, dma->srck, dma->dstk, dma->resk); hifn_clear_rings()
1827 i = dma->resk; u = dma->resu; hifn_clear_rings()
1829 if (dma->resr[i].l & __cpu_to_le32(HIFN_D_VALID)) hifn_clear_rings()
1843 dma->resk = i; dma->resu = u; hifn_clear_rings()
1845 i = dma->srck; u = dma->srcu; hifn_clear_rings()
1847 if (dma->srcr[i].l & __cpu_to_le32(HIFN_D_VALID)) hifn_clear_rings()
1853 dma->srck = i; dma->srcu = u; hifn_clear_rings()
1855 i = dma->cmdk; u = dma->cmdu; hifn_clear_rings()
1857 if (dma->cmdr[i].l & __cpu_to_le32(HIFN_D_VALID)) hifn_clear_rings()
1863 dma->cmdk = i; dma->cmdu = u; hifn_clear_rings()
1865 i = dma->dstk; u = dma->dstu; hifn_clear_rings()
1867 if (dma->dstr[i].l & __cpu_to_le32(HIFN_D_VALID)) hifn_clear_rings()
1873 dma->dstk = i; dma->dstu = u; hifn_clear_rings()
1878 dma->cmdi, dma->srci, dma->dsti, dma->resi, hifn_clear_rings()
1879 dma->cmdu, dma->srcu, dma->dstu, dma->resu, hifn_clear_rings()
1880 dma->cmdk, dma->srck, dma->dstk, dma->resk); hifn_clear_rings()
1893 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_work() local
1895 if (dma->cmdu == 0 && (dev->flags & HIFN_FLAG_CMD_BUSY)) { hifn_work()
1899 if (dma->srcu == 0 && (dev->flags & HIFN_FLAG_SRC_BUSY)) { hifn_work()
1903 if (dma->dstu == 0 && (dev->flags & HIFN_FLAG_DST_BUSY)) { hifn_work()
1907 if (dma->resu == 0 && (dev->flags & HIFN_FLAG_RES_BUSY)) { hifn_work()
1924 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_work() local
1934 printk("%x.%p ", dma->resr[i].l, dev->sa[i]); hifn_work()
1957 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_interrupt() local
1964 dev->name, dmacsr, dev->dmareg, dmacsr & dev->dmareg, dma->cmdi, hifn_interrupt()
1965 dma->cmdi, dma->srci, dma->dsti, dma->resi, hifn_interrupt()
1966 dma->cmdu, dma->srcu, dma->dstu, dma->resu); hifn_interrupt()
2006 if ((dmacsr & HIFN_DMACSR_C_WAIT) && (dma->cmdu == 0)) { hifn_interrupt()
2022 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; hifn_flush() local
2026 struct hifn_desc *d = &dma->resr[i]; hifn_flush()
/linux-4.1.27/drivers/crypto/caam/
H A Dsg_sw_sec4.h11 * convert single dma address to h/w link table format
14 dma_addr_t dma, u32 len, u32 offset) dma_to_sec4_sg_one()
16 sec4_sg_ptr->ptr = dma; dma_to_sec4_sg_one()
48 * scatterlist must have been previously dma mapped
13 dma_to_sec4_sg_one(struct sec4_sg_entry *sec4_sg_ptr, dma_addr_t dma, u32 len, u32 offset) dma_to_sec4_sg_one() argument
/linux-4.1.27/drivers/dma/dw/
H A Dinternal.h14 #include <linux/dma/dw.h>
/linux-4.1.27/arch/unicore32/kernel/
H A DMakefile6 obj-y := dma.o elf.o entry.o process.o ptrace.o
/linux-4.1.27/arch/microblaze/kernel/
H A DMakefile17 obj-y += dma.o exceptions.o \
/linux-4.1.27/arch/mips/cavium-octeon/
H A DMakefile16 obj-y += dma-octeon.o
/linux-4.1.27/arch/mips/include/asm/
H A Ddma-mapping.h5 #include <asm/dma-coherence.h>
7 #include <asm-generic/dma-coherent.h>
10 #include <dma-coherence.h>
33 #include <asm-generic/dma-mapping-common.h> dma_mark_clean()
/linux-4.1.27/drivers/usb/chipidea/
H A Dudc.h65 dma_addr_t dma; member in struct:td_node
74 * @dma: dma address for the transfer descriptor
76 * @zdma: dma address of the zero packet's transfer descriptor
/linux-4.1.27/drivers/video/fbdev/
H A Damba-clcd-versatile.c2 #include <linux/dma-mapping.h>
155 dma_addr_t dma; versatile_clcd_setup_dma() local
158 &dma, GFP_KERNEL); versatile_clcd_setup_dma()
164 fb->fb.fix.smem_start = dma; versatile_clcd_setup_dma()
/linux-4.1.27/include/sound/
H A Datmel-abdac.h13 #include <linux/platform_data/dma-dw.h>
/linux-4.1.27/arch/sh/include/mach-dreamcast/mach/
H A Ddma.h2 * include/asm-sh/dreamcast/dma.h
/linux-4.1.27/arch/openrisc/include/asm/
H A Ddma-mapping.h25 #include <linux/dma-debug.h>
26 #include <asm-generic/dma-coherent.h>
28 #include <linux/dma-mapping.h>
39 #include <asm-generic/dma-mapping-common.h>
/linux-4.1.27/arch/blackfin/mach-bf538/include/mach/
H A Ddma.h1 /* mach/dma.h - arch-specific DMA defines
/linux-4.1.27/arch/blackfin/mach-bf561/include/mach/
H A Ddma.h1 /* mach/dma.h - arch-specific DMA defines
/linux-4.1.27/arch/m68k/kernel/
H A DMakefile23 obj-$(CONFIG_HAS_DMA) += dma.o
/linux-4.1.27/arch/arm/mach-rpc/include/mach/
H A Disa-dma.h2 * arch/arm/mach-rpc/include/mach/isa-dma.h
/linux-4.1.27/arch/arm/mach-netx/
H A Dfb.c22 #include <linux/dma-mapping.h>
41 dma_addr_t dma; netx_clcd_setup() local
46 &dma, GFP_KERNEL); netx_clcd_setup()
52 fb->fb.fix.smem_start = dma; netx_clcd_setup()
/linux-4.1.27/arch/arm/mach-nspire/
H A Dclcd.c16 #include <linux/dma-mapping.h>
70 dma_addr_t dma; nspire_clcd_setup() local
94 panel_size, &dma, GFP_KERNEL); nspire_clcd_setup()
101 fb->fb.fix.smem_start = dma; nspire_clcd_setup()

Completed in 6724 milliseconds

1234567891011>>