Lines Matching refs:dma
84 void msm_stop_dma(struct uart_port *port, struct msm_dma *dma) in msm_stop_dma() argument
90 mapped = dma->count; in msm_stop_dma()
91 dma->count = 0; in msm_stop_dma()
93 dmaengine_terminate_all(dma->chan); in msm_stop_dma()
103 val &= ~dma->enable_bit; in msm_stop_dma()
107 dma_unmap_single(dev, dma->phys, mapped, dma->dir); in msm_stop_dma()
112 struct msm_dma *dma; in msm_release_dma() local
114 dma = &msm_port->tx_dma; in msm_release_dma()
115 if (dma->chan) { in msm_release_dma()
116 msm_stop_dma(&msm_port->uart, dma); in msm_release_dma()
117 dma_release_channel(dma->chan); in msm_release_dma()
120 memset(dma, 0, sizeof(*dma)); in msm_release_dma()
122 dma = &msm_port->rx_dma; in msm_release_dma()
123 if (dma->chan) { in msm_release_dma()
124 msm_stop_dma(&msm_port->uart, dma); in msm_release_dma()
125 dma_release_channel(dma->chan); in msm_release_dma()
126 kfree(dma->virt); in msm_release_dma()
129 memset(dma, 0, sizeof(*dma)); in msm_release_dma()
136 struct msm_dma *dma; in msm_request_tx_dma() local
140 dma = &msm_port->tx_dma; in msm_request_tx_dma()
143 dma->chan = dma_request_slave_channel_reason(dev, "tx"); in msm_request_tx_dma()
144 if (IS_ERR(dma->chan)) in msm_request_tx_dma()
156 ret = dmaengine_slave_config(dma->chan, &conf); in msm_request_tx_dma()
160 dma->dir = DMA_TO_DEVICE; in msm_request_tx_dma()
163 dma->enable_bit = UARTDM_DMEN_TX_DM_ENABLE; in msm_request_tx_dma()
165 dma->enable_bit = UARTDM_DMEN_TX_BAM_ENABLE; in msm_request_tx_dma()
170 dma_release_channel(dma->chan); in msm_request_tx_dma()
172 memset(dma, 0, sizeof(*dma)); in msm_request_tx_dma()
179 struct msm_dma *dma; in msm_request_rx_dma() local
183 dma = &msm_port->rx_dma; in msm_request_rx_dma()
186 dma->chan = dma_request_slave_channel_reason(dev, "rx"); in msm_request_rx_dma()
187 if (IS_ERR(dma->chan)) in msm_request_rx_dma()
192 dma->virt = kzalloc(UARTDM_RX_SIZE, GFP_KERNEL); in msm_request_rx_dma()
193 if (!dma->virt) in msm_request_rx_dma()
203 ret = dmaengine_slave_config(dma->chan, &conf); in msm_request_rx_dma()
207 dma->dir = DMA_FROM_DEVICE; in msm_request_rx_dma()
210 dma->enable_bit = UARTDM_DMEN_RX_DM_ENABLE; in msm_request_rx_dma()
212 dma->enable_bit = UARTDM_DMEN_RX_BAM_ENABLE; in msm_request_rx_dma()
216 kfree(dma->virt); in msm_request_rx_dma()
218 dma_release_channel(dma->chan); in msm_request_rx_dma()
220 memset(dma, 0, sizeof(*dma)); in msm_request_rx_dma()
244 struct msm_dma *dma = &msm_port->tx_dma; in msm_start_tx() local
247 if (dma->count) in msm_start_tx()
266 struct msm_dma *dma = &msm_port->tx_dma; in msm_complete_tx_dma() local
276 if (!dma->count) in msm_complete_tx_dma()
279 status = dmaengine_tx_status(dma->chan, dma->cookie, &state); in msm_complete_tx_dma()
281 dma_unmap_single(port->dev, dma->phys, dma->count, dma->dir); in msm_complete_tx_dma()
284 val &= ~dma->enable_bit; in msm_complete_tx_dma()
292 count = dma->count - state.residue; in msm_complete_tx_dma()
294 dma->count = 0; in msm_complete_tx_dma()
315 struct msm_dma *dma = &msm_port->tx_dma; in msm_handle_tx_dma() local
322 dma->phys = dma_map_single(port->dev, cpu_addr, count, dma->dir); in msm_handle_tx_dma()
323 ret = dma_mapping_error(port->dev, dma->phys); in msm_handle_tx_dma()
327 dma->desc = dmaengine_prep_slave_single(dma->chan, dma->phys, in msm_handle_tx_dma()
331 if (!dma->desc) { in msm_handle_tx_dma()
336 dma->desc->callback = msm_complete_tx_dma; in msm_handle_tx_dma()
337 dma->desc->callback_param = msm_port; in msm_handle_tx_dma()
339 dma->cookie = dmaengine_submit(dma->desc); in msm_handle_tx_dma()
340 ret = dma_submit_error(dma->cookie); in msm_handle_tx_dma()
351 dma->count = count; in msm_handle_tx_dma()
354 val |= dma->enable_bit; in msm_handle_tx_dma()
364 dma_async_issue_pending(dma->chan); in msm_handle_tx_dma()
367 dma_unmap_single(port->dev, dma->phys, count, dma->dir); in msm_handle_tx_dma()
376 struct msm_dma *dma = &msm_port->rx_dma; in msm_complete_rx_dma() local
384 if (!dma->count) in msm_complete_rx_dma()
388 val &= ~dma->enable_bit; in msm_complete_rx_dma()
405 dma->count = 0; in msm_complete_rx_dma()
407 dma_unmap_single(port->dev, dma->phys, UARTDM_RX_SIZE, dma->dir); in msm_complete_rx_dma()
412 if (msm_port->break_detected && dma->virt[i] == 0) { in msm_complete_rx_dma()
424 sysrq = uart_handle_sysrq_char(port, dma->virt[i]); in msm_complete_rx_dma()
427 tty_insert_flip_char(tport, dma->virt[i], flag); in msm_complete_rx_dma()
440 struct msm_dma *dma = &msm_port->rx_dma; in msm_start_rx_dma() local
445 if (!dma->chan) in msm_start_rx_dma()
448 dma->phys = dma_map_single(uart->dev, dma->virt, in msm_start_rx_dma()
449 UARTDM_RX_SIZE, dma->dir); in msm_start_rx_dma()
450 ret = dma_mapping_error(uart->dev, dma->phys); in msm_start_rx_dma()
454 dma->desc = dmaengine_prep_slave_single(dma->chan, dma->phys, in msm_start_rx_dma()
457 if (!dma->desc) in msm_start_rx_dma()
460 dma->desc->callback = msm_complete_rx_dma; in msm_start_rx_dma()
461 dma->desc->callback_param = msm_port; in msm_start_rx_dma()
463 dma->cookie = dmaengine_submit(dma->desc); in msm_start_rx_dma()
464 ret = dma_submit_error(dma->cookie); in msm_start_rx_dma()
482 dma->count = UARTDM_RX_SIZE; in msm_start_rx_dma()
484 dma_async_issue_pending(dma->chan); in msm_start_rx_dma()
490 val |= dma->enable_bit; in msm_start_rx_dma()
502 dma_unmap_single(uart->dev, dma->phys, UARTDM_RX_SIZE, dma->dir); in msm_start_rx_dma()
508 struct msm_dma *dma = &msm_port->rx_dma; in msm_stop_rx() local
513 if (dma->chan) in msm_stop_rx()
514 msm_stop_dma(port, dma); in msm_stop_rx()
704 struct msm_dma *dma = &msm_port->tx_dma; in msm_handle_tx() local
744 if (!dma->chan || dma_count < dma_min) in msm_handle_tx()
764 struct msm_dma *dma = &msm_port->rx_dma; in msm_uart_irq() local
779 if (dma->count) { in msm_uart_irq()
788 dmaengine_terminate_all(dma->chan); in msm_uart_irq()
1033 struct msm_dma *dma = &msm_port->rx_dma; in msm_set_termios() local
1039 if (dma->chan) /* Terminate if any */ in msm_set_termios()
1040 msm_stop_dma(port, dma); in msm_set_termios()