Home
last modified time | relevance | path

Searched refs:dma_tx (Results 1 – 34 of 34) sorted by relevance

/linux-4.1.27/drivers/net/irda/
Dsa1100_ir.c64 struct sa1100_buf dma_tx; member
230 dma_unmap_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, DMA_TO_DEVICE); in sa1100_irda_sirtxdma_irq()
231 dev_kfree_skb(si->dma_tx.skb); in sa1100_irda_sirtxdma_irq()
232 si->dma_tx.skb = NULL; in sa1100_irda_sirtxdma_irq()
235 dev->stats.tx_bytes += sg_dma_len(&si->dma_tx.sg); in sa1100_irda_sirtxdma_irq()
262 si->dma_tx.skb = skb; in sa1100_irda_sir_tx_start()
263 sg_set_buf(&si->dma_tx.sg, si->tx_buff.data, si->tx_buff.len); in sa1100_irda_sir_tx_start()
264 if (dma_map_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, DMA_TO_DEVICE) == 0) { in sa1100_irda_sir_tx_start()
265 si->dma_tx.skb = NULL; in sa1100_irda_sir_tx_start()
271 sa1100_irda_dma_start(&si->dma_tx, DMA_MEM_TO_DEV, sa1100_irda_sirtxdma_irq, dev); in sa1100_irda_sir_tx_start()
[all …]
/linux-4.1.27/drivers/spi/
Dspi-rockchip.c201 struct rockchip_spi_dma_data dma_tx; member
327 dmaengine_terminate_all(rs->dma_tx.ch); in rockchip_spi_handle_err()
466 txconf.direction = rs->dma_tx.direction; in rockchip_spi_prepare_dma()
467 txconf.dst_addr = rs->dma_tx.addr; in rockchip_spi_prepare_dma()
470 dmaengine_slave_config(rs->dma_tx.ch, &txconf); in rockchip_spi_prepare_dma()
473 rs->dma_tx.ch, in rockchip_spi_prepare_dma()
475 rs->dma_tx.direction, DMA_PREP_INTERRUPT); in rockchip_spi_prepare_dma()
495 dma_async_issue_pending(rs->dma_tx.ch); in rockchip_spi_prepare_dma()
720 rs->dma_tx.ch = dma_request_slave_channel(rs->dev, "tx"); in rockchip_spi_probe()
721 if (!rs->dma_tx.ch) in rockchip_spi_probe()
[all …]
Dspi-sh-msiof.c700 dma_sync_single_for_device(p->master->dma_tx->device->dev, in sh_msiof_dma_once()
702 desc_tx = dmaengine_prep_slave_single(p->master->dma_tx, in sh_msiof_dma_once()
738 dma_async_issue_pending(p->master->dma_tx); in sh_msiof_dma_once()
774 dmaengine_terminate_all(p->master->dma_tx); in sh_msiof_dma_once()
846 while (master->dma_tx && len > 15) { in sh_msiof_transfer_one()
1083 master->dma_tx = sh_msiof_request_dma_chan(dev, DMA_MEM_TO_DEV, in sh_msiof_request_dma()
1086 if (!master->dma_tx) in sh_msiof_request_dma()
1103 tx_dev = master->dma_tx->device->dev; in sh_msiof_request_dma()
1127 dma_release_channel(master->dma_tx); in sh_msiof_request_dma()
1128 master->dma_tx = NULL; in sh_msiof_request_dma()
[all …]
Dspi-qup.c297 chan = master->dma_tx; in spi_qup_prep_sg()
322 dmaengine_terminate_all(master->dma_tx); in spi_qup_dma_terminate()
350 dma_async_issue_pending(master->dma_tx); in spi_qup_do_dma()
681 IS_ERR_OR_NULL(master->dma_tx) || in spi_qup_can_dma()
698 if (!IS_ERR_OR_NULL(master->dma_tx)) in spi_qup_release_dma()
699 dma_release_channel(master->dma_tx); in spi_qup_release_dma()
715 master->dma_tx = dma_request_slave_channel_reason(dev, "tx"); in spi_qup_init_dma()
716 if (IS_ERR(master->dma_tx)) { in spi_qup_init_dma()
717 ret = PTR_ERR(master->dma_tx); in spi_qup_init_dma()
738 ret = dmaengine_slave_config(master->dma_tx, tx_conf); in spi_qup_init_dma()
[all …]
Dspi-omap2-mcspi.c101 struct dma_chan *dma_tx; member
402 if (mcspi_dma->dma_tx) { in omap2_mcspi_tx_dma()
406 dmaengine_slave_config(mcspi_dma->dma_tx, &cfg); in omap2_mcspi_tx_dma()
412 tx = dmaengine_prep_slave_sg(mcspi_dma->dma_tx, &sg, 1, in omap2_mcspi_tx_dma()
422 dma_async_issue_pending(mcspi_dma->dma_tx); in omap2_mcspi_tx_dma()
966 mcspi_dma->dma_tx = in omap2_mcspi_request_dma()
971 if (!mcspi_dma->dma_tx) { in omap2_mcspi_request_dma()
1008 if (!mcspi_dma->dma_rx || !mcspi_dma->dma_tx) { in omap2_mcspi_setup()
1048 if (mcspi_dma->dma_tx) { in omap2_mcspi_cleanup()
1049 dma_release_channel(mcspi_dma->dma_tx); in omap2_mcspi_cleanup()
[all …]
Dspi-ep93xx.c102 struct dma_chan *dma_tx; member
461 chan = espi->dma_tx; in ep93xx_spi_dma_prepare()
543 chan = espi->dma_tx; in ep93xx_spi_dma_finish()
584 dma_async_issue_pending(espi->dma_tx); in ep93xx_spi_dma_transfer()
825 espi->dma_tx = dma_request_channel(mask, ep93xx_spi_dma_filter, in ep93xx_spi_setup_dma()
827 if (!espi->dma_tx) { in ep93xx_spi_setup_dma()
849 if (espi->dma_tx) { in ep93xx_spi_release_dma()
850 dma_release_channel(espi->dma_tx); in ep93xx_spi_release_dma()
Dspi-dw-mid.c49 struct dw_dma_slave *tx = dws->dma_tx; in mid_spi_dma_init()
76 dws->master->dma_tx = dws->txchan; in mid_spi_dma_init()
325 dws->dma_tx = &mid_dma_tx; in dw_spi_mid_init()
Dspi-imx.c809 if (master->dma_tx) { in spi_imx_sdma_exit()
810 dma_release_channel(master->dma_tx); in spi_imx_sdma_exit()
811 master->dma_tx = NULL; in spi_imx_sdma_exit()
829 master->dma_tx = dma_request_slave_channel(dev, "tx"); in spi_imx_sdma_init()
830 if (!master->dma_tx) { in spi_imx_sdma_init()
840 ret = dmaengine_slave_config(master->dma_tx, &slave_config); in spi_imx_sdma_init()
906 desc_tx = dmaengine_prep_slave_sg(master->dma_tx, in spi_imx_dma_transfer()
944 dma_async_issue_pending(master->dma_tx); in spi_imx_dma_transfer()
953 dmaengine_terminate_all(master->dma_tx); in spi_imx_dma_transfer()
Dspi-rspi.c545 desc_tx = dmaengine_prep_slave_sg(rspi->master->dma_tx, in rspi_dma_transfer()
585 dma_async_issue_pending(rspi->master->dma_tx); in rspi_dma_transfer()
595 dmaengine_terminate_all(rspi->master->dma_tx); in rspi_dma_transfer()
1063 master->dma_tx = rspi_request_dma_chan(dev, DMA_MEM_TO_DEV, dma_tx_id, in rspi_request_dma()
1065 if (!master->dma_tx) in rspi_request_dma()
1071 dma_release_channel(master->dma_tx); in rspi_request_dma()
1072 master->dma_tx = NULL; in rspi_request_dma()
1083 if (master->dma_tx) in rspi_release_dma()
1084 dma_release_channel(master->dma_tx); in rspi_release_dma()
Dspi-davinci.c132 struct dma_chan *dma_tx; member
649 dmaengine_slave_config(dspi->dma_tx, &dma_tx_conf); in davinci_spi_bufs()
685 txdesc = dmaengine_prep_slave_sg(dspi->dma_tx, in davinci_spi_bufs()
703 dma_async_issue_pending(dspi->dma_tx); in davinci_spi_bufs()
818 dspi->dma_tx = dma_request_channel(mask, edma_filter_fn, in davinci_spi_request_dma()
820 if (!dspi->dma_tx) { in davinci_spi_request_dma()
1081 dma_release_channel(dspi->dma_tx); in davinci_spi_probe()
Dspi-dw.h134 void *dma_tx; member
Dspi-img-spfi.c664 master->dma_tx = spfi->tx_ch; in img_spfi_probe()
Dspi.c542 tx_dev = master->dma_tx->device->dev; in __spi_map_msg()
582 tx_dev = master->dma_tx->device->dev; in spi_unmap_msg()
Dspi-s3c64xx.c342 spi->dma_tx = sdd->tx_dma.ch; in s3c64xx_spi_prepare_transfer()
/linux-4.1.27/drivers/i2c/busses/
Di2c-sh_mobile.c145 struct dma_chan *dma_tx; member
521 ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_dma_unmap()
536 dmaengine_terminate_all(pd->dma_tx); in sh_mobile_i2c_cleanup_dma()
592 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_xfer_dma()
602 chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV, in sh_mobile_i2c_xfer_dma()
808 if (!IS_ERR(pd->dma_tx)) { in sh_mobile_i2c_release_dma()
809 dma_release_channel(pd->dma_tx); in sh_mobile_i2c_release_dma()
810 pd->dma_tx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_release_dma()
909 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_probe()
/linux-4.1.27/drivers/net/ethernet/stmicro/stmmac/
Ddwmac100_dma.c36 int burst_len, u32 dma_tx, u32 dma_rx, int atds) in dwmac100_dma_init() argument
63 writel(dma_tx, ioaddr + DMA_TX_BASE_ADDR); in dwmac100_dma_init()
Ddwmac1000_dma.c34 int burst_len, u32 dma_tx, u32 dma_rx, int atds) in dwmac1000_dma_init() argument
103 writel(dma_tx, ioaddr + DMA_TX_BASE_ADDR); in dwmac1000_dma_init()
Dring_mode.c43 desc = priv->dma_tx + entry; in stmmac_jumbo_frm()
70 desc = priv->dma_tx + entry; in stmmac_jumbo_frm()
Dchain_mode.c36 struct dma_desc *desc = priv->dma_tx + entry; in stmmac_jumbo_frm()
58 desc = priv->dma_tx + entry; in stmmac_jumbo_frm()
Dstmmac_main.c360 desc = (priv->dma_tx + entry); in stmmac_get_tx_hwtstamp()
914 stmmac_display_ring((void *)priv->dma_tx, txsize, 0); in stmmac_display_rings()
962 priv->hw->desc->init_tx_desc(&priv->dma_tx[i], in stmmac_clear_descriptors()
1083 priv->hw->mode->init(priv->dma_tx, priv->dma_tx_phy, in init_dma_desc_rings()
1094 p = priv->dma_tx + i; in init_dma_desc_rings()
1135 p = priv->dma_tx + i; in dma_free_tx_skbufs()
1222 priv->dma_tx = dma_zalloc_coherent(priv->device, txsize * in alloc_dma_desc_resources()
1226 if (!priv->dma_tx) { in alloc_dma_desc_resources()
1257 priv->dma_tx, priv->dma_tx_phy); in free_dma_desc_resources()
1326 p = priv->dma_tx + entry; in stmmac_tx_clean()
[all …]
Dstmmac.h45 struct dma_desc *dma_tx; member
Dcommon.h355 int burst_len, u32 dma_tx, u32 dma_rx, int atds);
/linux-4.1.27/drivers/net/ethernet/samsung/sxgbe/
Dsxgbe_dma.c47 int fix_burst, int pbl, dma_addr_t dma_tx, in sxgbe_dma_channel_init() argument
69 writel(upper_32_bits(dma_tx), in sxgbe_dma_channel_init()
71 writel(lower_32_bits(dma_tx), in sxgbe_dma_channel_init()
83 dma_addr = dma_tx + ((t_rsize - 1) * SXGBE_DESC_SIZE_BYTES); in sxgbe_dma_channel_init()
Dsxgbe_dma.h27 int pbl, dma_addr_t dma_tx, dma_addr_t dma_rx,
Dsxgbe_main.c337 priv->hw->desc->init_tx_desc(&priv->txq[j]->dma_tx[i]); in sxgbe_clear_descriptors()
404 tx_ring->dma_tx = dma_zalloc_coherent(dev, in init_tx_ring()
407 if (!tx_ring->dma_tx) in init_tx_ring()
436 tx_ring->dma_tx, tx_ring->dma_tx_phy); in init_tx_ring()
553 tx_ring->dma_tx, tx_ring->dma_tx_phy); in free_tx_ring()
622 struct sxgbe_tx_norm_desc *tdesc = txqueue->dma_tx + dma_desc; in tx_free_ring_skbufs()
754 p = tqueue->dma_tx + entry; in sxgbe_tx_queue_clean()
1326 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1351 first_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1373 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
Dsxgbe_common.h382 struct sxgbe_tx_norm_desc *dma_tx; member
/linux-4.1.27/drivers/net/ethernet/micrel/
Dks8842.c159 #define KS8842_USE_DMA(adapter) (((adapter)->dma_tx.channel != -1) && \
171 struct ks8842_tx_dma_ctl dma_tx; member
434 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_tx_frame_dma()
857 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_dma_tx_cb()
873 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_stop_dma()
895 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_dealloc_dma_bufs()
927 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_alloc_dma_bufs()
1002 adapter->dma_tx.channel = -1; in ks8842_open()
1056 if (adapter->dma_tx.adesc) in ks8842_xmit_frame()
1183 adapter->dma_tx.channel = pdata->tx_dma_channel; in ks8842_probe()
[all …]
/linux-4.1.27/drivers/net/ethernet/calxeda/
Dxgmac.c370 struct xgmac_dma_desc *dma_tx; member
760 priv->dma_tx = dma_alloc_coherent(priv->device, in xgmac_dma_desc_rings_init()
765 if (!priv->dma_tx) in xgmac_dma_desc_rings_init()
770 priv->dma_rx, priv->dma_tx, in xgmac_dma_desc_rings_init()
781 desc_init_tx_desc(priv->dma_tx, DMA_TX_RING_SZ); in xgmac_dma_desc_rings_init()
832 p = priv->dma_tx + i; in xgmac_free_tx_skbufs()
853 if (priv->dma_tx) { in xgmac_free_dma_desc_rings()
856 priv->dma_tx, priv->dma_tx_phy); in xgmac_free_dma_desc_rings()
857 priv->dma_tx = NULL; in xgmac_free_dma_desc_rings()
881 struct xgmac_dma_desc *p = priv->dma_tx + entry; in xgmac_tx_complete()
[all …]
/linux-4.1.27/drivers/net/wan/
Dz85230.c541 if(!chan->dma_tx) in z8530_dma_tx()
572 if(chan->dma_tx) in z8530_dma_status()
807 c->dma_tx = 0; in z8530_sync_open()
897 c->dma_tx = 1; in z8530_sync_dma_open()
1098 c->dma_tx = 1; in z8530_sync_txdma_open()
1444 if(c->dma_tx) in z8530_tx_begin()
1465 if(c->dma_tx) in z8530_tx_begin()
1747 …if(c->dma_tx && ((unsigned long)(virt_to_bus(skb->data+skb->len))>=16*1024*1024 || spans_boundary(… in z8530_queue_xmit()
Dz85230.h302 u8 dma_tx; /* TX is to use DMA */ member
/linux-4.1.27/drivers/mmc/host/
Ddavinci_mmc.c206 struct dma_chan *dma_tx; member
413 sync_dev = host->dma_tx; in davinci_abort_dma()
433 chan = host->dma_tx; in mmc_davinci_send_dma_request()
434 dmaengine_slave_config(host->dma_tx, &dma_tx_conf); in mmc_davinci_send_dma_request()
436 desc = dmaengine_prep_slave_sg(host->dma_tx, in mmc_davinci_send_dma_request()
514 dma_release_channel(host->dma_tx); in davinci_release_dma_channels()
526 host->dma_tx = in davinci_acquire_dma_channels()
529 if (!host->dma_tx) { in davinci_acquire_dma_channels()
546 dma_release_channel(host->dma_tx); in davinci_acquire_dma_channels()
Domap.c132 struct dma_chan *dma_tx; member
414 c = host->dma_tx; in mmc_omap_release_dma()
1003 c = host->dma_tx; in mmc_omap_prepare_data()
1077 host->dma_tx : host->dma_rx; in mmc_omap_start_request()
1393 host->dma_tx = dma_request_slave_channel_compat(mask, in mmc_omap_probe()
1395 if (!host->dma_tx) in mmc_omap_probe()
1445 if (host->dma_tx) in mmc_omap_probe()
1446 dma_release_channel(host->dma_tx); in mmc_omap_probe()
1475 if (host->dma_tx) in mmc_omap_remove()
1476 dma_release_channel(host->dma_tx); in mmc_omap_remove()
Djz4740_mmc.c150 struct dma_chan *dma_tx; member
170 dma_release_channel(host->dma_tx); in jz4740_mmc_release_dma_channels()
181 host->dma_tx = dma_request_channel(mask, NULL, host); in jz4740_mmc_acquire_dma_channels()
182 if (!host->dma_tx) { in jz4740_mmc_acquire_dma_channels()
199 dma_release_channel(host->dma_tx); in jz4740_mmc_acquire_dma_channels()
211 return (data->flags & MMC_DATA_READ) ? host->dma_rx : host->dma_tx; in jz4740_mmc_get_dma_chan()
287 chan = host->dma_tx; in jz4740_mmc_start_dma_transfer()
/linux-4.1.27/include/linux/spi/
Dspi.h456 struct dma_chan *dma_tx; member