Home
last modified time | relevance | path

Searched refs:txd (Results 1 – 89 of 89) sorted by relevance

/linux-4.1.27/drivers/dma/
Ds3c24xx-dma.c381 struct s3c24xx_txd *txd = s3cchan->at; in s3c24xx_dma_getbytes_chan() local
384 return tc * txd->width; in s3c24xx_dma_getbytes_chan()
425 struct s3c24xx_txd *txd = kzalloc(sizeof(*txd), GFP_NOWAIT); in s3c24xx_dma_get_txd() local
427 if (txd) { in s3c24xx_dma_get_txd()
428 INIT_LIST_HEAD(&txd->dsg_list); in s3c24xx_dma_get_txd()
429 txd->dcon = S3C24XX_DCON_INT | S3C24XX_DCON_NORELOAD; in s3c24xx_dma_get_txd()
432 return txd; in s3c24xx_dma_get_txd()
435 static void s3c24xx_dma_free_txd(struct s3c24xx_txd *txd) in s3c24xx_dma_free_txd() argument
439 list_for_each_entry_safe(dsg, _dsg, &txd->dsg_list, node) { in s3c24xx_dma_free_txd()
444 kfree(txd); in s3c24xx_dma_free_txd()
[all …]
Dsa11x0-dma.c152 static void sa11x0_dma_start_desc(struct sa11x0_dma_phy *p, struct sa11x0_dma_desc *txd) in sa11x0_dma_start_desc() argument
154 list_del(&txd->vd.node); in sa11x0_dma_start_desc()
155 p->txd_load = txd; in sa11x0_dma_start_desc()
159 p->num, &txd->vd, txd->vd.tx.cookie, txd->ddar); in sa11x0_dma_start_desc()
165 struct sa11x0_dma_desc *txd = p->txd_load; in sa11x0_dma_start_sg() local
171 if (!txd) in sa11x0_dma_start_sg()
180 if (p->sg_load == txd->sglen) { in sa11x0_dma_start_sg()
181 if (!txd->cyclic) { in sa11x0_dma_start_sg()
189 if (txn && txn->ddar == txd->ddar) { in sa11x0_dma_start_sg()
190 txd = txn; in sa11x0_dma_start_sg()
[all …]
Damba-pl08x.c399 struct pl08x_txd *txd = to_pl08x_txd(&vd->tx); in pl08x_start_next_txd() local
402 list_del(&txd->vd.node); in pl08x_start_next_txd()
404 plchan->at = txd; in pl08x_start_next_txd()
410 pl08x_write_lli(pl08x, phychan, &txd->llis_va[0], txd->ccfg); in pl08x_start_next_txd()
532 struct pl08x_txd *txd; in pl08x_getbytes_chan() local
538 txd = plchan->at; in pl08x_getbytes_chan()
540 if (!ch || !txd) in pl08x_getbytes_chan()
560 llis_va = txd->llis_va; in pl08x_getbytes_chan()
561 llis_bus = txd->llis_bus; in pl08x_getbytes_chan()
804 struct pl08x_txd *txd; member
[all …]
Dtimb_dma.c76 struct dma_async_tx_descriptor txd; member
211 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
221 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
235 struct dma_async_tx_descriptor *txd; in __td_finish() local
244 txd = &td_desc->txd; in __td_finish()
247 txd->cookie); in __td_finish()
256 dma_cookie_complete(txd); in __td_finish()
259 callback = txd->callback; in __td_finish()
260 param = txd->callback_param; in __td_finish()
264 dma_descriptor_unmap(txd); in __td_finish()
[all …]
Dtxx9dmac.c142 txd_to_txx9dmac_desc(struct dma_async_tx_descriptor *txd) in txd_to_txx9dmac_desc() argument
144 return container_of(txd, struct txx9dmac_desc, txd); in txd_to_txx9dmac_desc()
202 dma_async_tx_descriptor_init(&desc->txd, &dc->chan); in txx9dmac_desc_alloc()
203 desc->txd.tx_submit = txx9dmac_tx_submit; in txx9dmac_desc_alloc()
205 desc->txd.flags = DMA_CTRL_ACK; in txx9dmac_desc_alloc()
206 desc->txd.phys = dma_map_single(chan2parent(&dc->chan), &desc->hwdesc, in txx9dmac_desc_alloc()
219 if (async_tx_test_ack(&desc->txd)) { in txx9dmac_desc_get()
252 child->txd.phys, ddev->descsize, in txx9dmac_sync_desc_for_cpu()
255 desc->txd.phys, ddev->descsize, in txx9dmac_sync_desc_for_cpu()
341 first->txd.cookie, first); in txx9dmac_dostart()
[all …]
Dsun6i-dma.c260 struct sun6i_desc *txd) in sun6i_dma_lli_add() argument
262 if ((!prev && !txd) || !next) in sun6i_dma_lli_add()
266 txd->p_lli = next_phy; in sun6i_dma_lli_add()
267 txd->v_lli = next; in sun6i_dma_lli_add()
334 struct sun6i_desc *txd = to_sun6i_desc(&vd->tx); in sun6i_dma_free_desc() local
339 if (unlikely(!txd)) in sun6i_dma_free_desc()
342 p_lli = txd->p_lli; in sun6i_dma_free_desc()
343 v_lli = txd->v_lli; in sun6i_dma_free_desc()
355 kfree(txd); in sun6i_dma_free_desc()
512 struct sun6i_desc *txd; in sun6i_dma_prep_dma_memcpy() local
[all …]
Dep93xx_dma.c127 struct dma_async_tx_descriptor txd; member
254 d->txd.callback = desc->txd.callback; in ep93xx_dma_set_active()
255 d->txd.callback_param = desc->txd.callback_param; in ep93xx_dma_set_active()
300 return !desc->txd.cookie; in ep93xx_dma_advance_active()
420 desc->txd.cookie, desc->src_addr, desc->dst_addr, in m2p_hw_interrupt()
609 last_done = !desc || desc->txd.cookie; in m2m_hw_interrupt()
673 if (async_tx_test_ack(&desc->txd)) { in ep93xx_dma_desc_get()
681 desc->txd.cookie = 0; in ep93xx_dma_desc_get()
682 desc->txd.callback = NULL; in ep93xx_dma_desc_get()
683 desc->txd.callback_param = NULL; in ep93xx_dma_desc_get()
[all …]
Dat_hdmac.c116 dma_async_tx_descriptor_init(&desc->txd, chan); in atc_alloc_descriptor()
118 desc->txd.flags = DMA_CTRL_ACK; in atc_alloc_descriptor()
119 desc->txd.tx_submit = atc_tx_submit; in atc_alloc_descriptor()
120 desc->txd.phys = phys; in atc_alloc_descriptor()
141 if (async_tx_test_ack(&desc->txd)) { in atc_desc_get()
208 (*prev)->lli.dscr = desc->txd.phys; in atc_desc_chain()
249 channel_writel(atchan, DSCR, first->txd.phys); in atc_dostart()
266 if (desc->txd.cookie == cookie) in atc_get_desc_by_cookie()
271 if (desc->txd.cookie == cookie) in atc_get_desc_by_cookie()
388 struct dma_async_tx_descriptor *txd = &desc->txd; in atc_chain_complete() local
[all …]
Dpch_dma.c97 struct dma_async_tx_descriptor txd; member
150 struct pch_dma_desc *to_pd_desc(struct dma_async_tx_descriptor *txd) in to_pd_desc() argument
152 return container_of(txd, struct pch_dma_desc, txd); in to_pd_desc()
355 channel_writel(pd_chan, NEXT, desc->txd.phys); in pdc_dostart()
363 struct dma_async_tx_descriptor *txd = &desc->txd; in pdc_chain_complete() local
364 dma_async_tx_callback callback = txd->callback; in pdc_chain_complete()
365 void *param = txd->callback_param; in pdc_chain_complete()
405 bad_desc->txd.cookie); in pdc_handle_error()
421 static dma_cookie_t pd_tx_submit(struct dma_async_tx_descriptor *txd) in pd_tx_submit() argument
423 struct pch_dma_desc *desc = to_pd_desc(txd); in pd_tx_submit()
[all …]
Dtegra20-apb-dma.c169 struct dma_async_tx_descriptor txd; member
261 return container_of(td, struct tegra_dma_desc, txd); in txd_to_tegra_dma_desc()
284 if (async_tx_test_ack(&dma_desc->txd)) { in tegra_dma_desc_get()
287 dma_desc->txd.flags = 0; in tegra_dma_desc_get()
301 dma_async_tx_descriptor_init(&dma_desc->txd, &tdc->dma_chan); in tegra_dma_desc_get()
302 dma_desc->txd.tx_submit = tegra_dma_tx_submit; in tegra_dma_desc_get()
303 dma_desc->txd.flags = 0; in tegra_dma_desc_get()
591 dma_cookie_complete(&dma_desc->txd); in handle_once_dma_done()
648 callback = dma_desc->txd.callback; in tegra_dma_tasklet()
649 callback_param = dma_desc->txd.callback_param; in tegra_dma_tasklet()
[all …]
Dpl330.c498 struct dma_async_tx_descriptor txd; member
1939 return container_of(tx, struct dma_pl330_desc, txd); in to_desc()
1963 __func__, __LINE__, desc->txd.cookie); in fill_queue()
1982 dma_cookie_complete(&desc->txd); in pl330_tasklet()
2008 callback = desc->txd.callback; in pl330_tasklet()
2009 callback_param = desc->txd.callback_param; in pl330_tasklet()
2025 dma_descriptor_unmap(&desc->txd); in pl330_tasklet()
2143 dma_cookie_complete(&desc->txd); in pl330_terminate_all()
2148 dma_cookie_complete(&desc->txd); in pl330_terminate_all()
2264 if (desc->txd.cookie == cookie) { in pl330_tx_status()
[all …]
Dat_hdmac_regs.h194 struct dma_async_tx_descriptor txd; member
202 txd_to_at_desc(struct dma_async_tx_descriptor *txd) in txd_to_at_desc() argument
204 return container_of(txd, struct at_desc, txd); in txd_to_at_desc()
Dat_xdmac.c281 static inline struct at_xdmac_desc *txd_to_at_desc(struct dma_async_tx_descriptor *txd) in txd_to_at_desc() argument
283 return container_of(txd, struct at_xdmac_desc, tx_dma_desc); in txd_to_at_desc()
1068 struct dma_async_tx_descriptor *txd; in at_xdmac_handle_cyclic() local
1071 txd = &desc->tx_dma_desc; in at_xdmac_handle_cyclic()
1073 if (txd->callback && (txd->flags & DMA_PREP_INTERRUPT)) in at_xdmac_handle_cyclic()
1074 txd->callback(txd->callback_param); in at_xdmac_handle_cyclic()
1094 struct dma_async_tx_descriptor *txd; in at_xdmac_tasklet() local
1110 txd = &desc->tx_dma_desc; in at_xdmac_tasklet()
1116 dma_cookie_complete(txd); in at_xdmac_tasklet()
1117 if (txd->callback && (txd->flags & DMA_PREP_INTERRUPT)) in at_xdmac_tasklet()
[all …]
Dfsldma.c512 struct dma_async_tx_descriptor *txd = &desc->async_tx; in fsldma_run_tx_complete_actions() local
515 BUG_ON(txd->cookie < 0); in fsldma_run_tx_complete_actions()
517 if (txd->cookie > 0) { in fsldma_run_tx_complete_actions()
518 ret = txd->cookie; in fsldma_run_tx_complete_actions()
521 if (txd->callback) { in fsldma_run_tx_complete_actions()
523 txd->callback(txd->callback_param); in fsldma_run_tx_complete_actions()
528 dma_run_dependencies(txd); in fsldma_run_tx_complete_actions()
Dcppi41.c95 struct dma_async_tx_descriptor txd; member
333 dma_cookie_complete(&c->txd); in cppi41_irq()
334 c->txd.callback(c->txd.callback_param); in cppi41_irq()
354 dma_async_tx_descriptor_init(&c->txd, chan); in cppi41_dma_alloc_chan_resources()
355 c->txd.tx_submit = cppi41_tx_submit; in cppi41_dma_alloc_chan_resources()
525 return &c->txd; in cppi41_dma_prep_slave_sg()
Dste_dma40.c373 struct dma_async_tx_descriptor txd; member
767 if (async_tx_test_ack(&d->txd)) { in d40_desc_get()
1489 struct d40_desc *d40d = container_of(tx, struct d40_desc, txd); in d40_tx_submit()
1614 dma_cookie_complete(&d40d->txd); in dma_tasklet()
1626 callback_active = !!(d40d->txd.flags & DMA_PREP_INTERRUPT); in dma_tasklet()
1627 callback = d40d->txd.callback; in dma_tasklet()
1628 callback_param = d40d->txd.callback_param; in dma_tasklet()
1631 if (async_tx_test_ack(&d40d->txd)) { in dma_tasklet()
2223 desc->txd.flags = dma_flags; in d40_prep_desc()
2224 desc->txd.tx_submit = d40_tx_submit; in d40_prep_desc()
[all …]
Dmmp_pdma.c921 struct dma_async_tx_descriptor *txd = &desc->async_tx; in dma_do_tasklet() local
926 if (txd->callback) in dma_do_tasklet()
927 txd->callback(txd->callback_param); in dma_do_tasklet()
929 dma_pool_free(chan->desc_pool, desc, txd->phys); in dma_do_tasklet()
Dsirf-dma.c263 static dma_cookie_t sirfsoc_dma_tx_submit(struct dma_async_tx_descriptor *txd) in sirfsoc_dma_tx_submit() argument
265 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(txd->chan); in sirfsoc_dma_tx_submit()
270 sdesc = container_of(txd, struct sirfsoc_dma_desc, desc); in sirfsoc_dma_tx_submit()
277 cookie = dma_cookie_assign(txd); in sirfsoc_dma_tx_submit()
Dmpc512x_dma.c473 static dma_cookie_t mpc_dma_tx_submit(struct dma_async_tx_descriptor *txd) in mpc_dma_tx_submit() argument
475 struct mpc_dma_chan *mchan = dma_chan_to_mpc_dma_chan(txd->chan); in mpc_dma_tx_submit()
480 mdesc = container_of(txd, struct mpc_dma_desc, desc); in mpc_dma_tx_submit()
492 cookie = dma_cookie_assign(txd); in mpc_dma_tx_submit()
Dtxx9dmac.h234 struct dma_async_tx_descriptor txd; member
/linux-4.1.27/Documentation/devicetree/bindings/pinctrl/
Dmarvell,armada-39x-pinctrl.txt18 mpp1 1 gpio, ua0(txd)
21 mpp4 4 gpio, ua1(txd), ua0(rts), smi(mdc)
31 mpp14 14 gpio, m(vtt), dev(wen1), ua1(txd)
35 mpp18 18 gpio, ua1(txd), spi0(cs0), i2c2(sck)
37 mpp20 20 gpio, sata0(present) [1], ua0(rts), ua1(txd), smi(mdc)
42 mpp25 25 gpio, spi0(cs0), ua0(rts), ua1(txd), sd(d5), dev(cs0)
59 mpp42 42 gpio, ua1(txd), ua0(rts), dev(ad7)
63 mpp46 46 gpio, ref(clk), pcie0(rstout), ua1(txd), led(stb)
65 …8 gpio, sata0(present) [1], m(vtt), tdm(pclk) [1], audio(mclk) [1], sd(d4), pcie0(clkreq), ua1(txd)
68 mpp51 51 gpio, tdm(dtx) [1], audio(sdo) [1], m(decc), ua2(txd)
[all …]
Dmarvell,kirkwood-pinctrl.txt29 mpp5 5 gpo, nand(io7), uart0(txd), ptp(trig)
36 mpp10 10 gpo, spi(sck), uart0(txd), ptp(trig)
40 mpp13 13 gpio, sdio(cmd), uart1(txd)
42 mpp15 15 gpio, sdio(d1), uart0(rts), uart1(txd)
67 mpp5 5 gpo, nand(io7), uart0(txd), ptp(trig), sata0(act)
74 mpp10 10 gpo, spi(sck), uart0(txd), ptp(trig)
78 mpp13 13 gpio, sdio(cmd), uart1(txd)
80 mpp15 15 gpio, sdio(d1), uart0(rts), uart1(txd), sata0(act)
111 mpp5 5 gpo, nand(io7), uart0(txd), ptp(trig), sata0(act)
118 mpp10 10 gpo, spi(sck), uart0(txd), ptp(trig), sata1(act)
[all …]
Dmarvell,armada-370-pinctrl.txt17 mpp1 1 gpo, uart0(txd)
18 mpp2 2 gpio, i2c0(sck), uart0(txd)
21 mpp5 5 gpo, ge0(txclko), uart1(txd), spi1(clk), audio(mclk)
25 mpp9 9 gpo, ge0(txd3), uart1(txd), sd0(clk), audio(spdifo)
42 mpp21 21 gpo, ge0(txd5), ge1(txd1), uart1(txd)
63 mpp42 42 gpo, dev(ad3), uart1(txd)
80 mpp55 55 gpio, dev(cs1), uart1(txd), tdm(rst), sata1(prsnt),
91 mpp61 61 gpo, dev(wen1), uart1(txd), audio(rclk)
Dmarvell,dove-pinctrl.txt20 mpp2 2 gpio, pmu, uart2(txd), sdio0(buspwr), sata(prsnt),
26 mpp6 6 gpio, pmu, uart3(txd), sdio1(buspwr), spi1(mosi), pmu*
37 mpp14 14 gpio, pmu, uart2(txd), sdio1(buspwr), ssp(rxd), pmu*
42 mpp18 18 gpio, uart3(txd), sdio0(buspwr), ac97(sdi3), lcd0(pwm)
49 lcd-spi(mosi), uart1(cts), ssp(txd)
Dmarvell,armada-38x-pinctrl.txt19 mpp1 1 gpio, ua0(txd)
22 mpp4 4 gpio, ge(mdc), ua1(txd), ua0(rts)
36 mpp18 18 gpio, ge0(rxerr), ptp(trig_gen), ua1(txd), spi0(cs0)
43 mpp25 25 gpio, spi0(cs0), ua0(rts), ua1(txd), sd0(d5), dev(cs0)
60 mpp42 42 gpio, ua1(txd), ua0(rts), dev(ad7)
Dmarvell,armada-375-pinctrl.txt30 mpp14 14 gpio, i2c0(sda), uart1(txd)
32 mpp16 16 gpio, uart0(txd)
56 mpp40 40 gpio, uart1(txd)
76 mpp60 60 gpio, uart1(txd), led(c2)
Dmarvell,orion-pinctrl.txt64 mpp17 17 uart1(txd), ge(rxd5), gpio
89 mpp17 17 uart1(txd), ge(rxd5)
Dmarvell,armada-xp-pinctrl.txt65 mpp43 43 gpio, uart2(txd), uart0(rts), spi(cs3), pcie(rstout)
68 mpp45 45 gpio, uart2(rts), uart3(txd), spi(cs5), sata1(prsnt)
/linux-4.1.27/include/linux/
Ddmaengine.h489 static inline void txd_lock(struct dma_async_tx_descriptor *txd) in txd_lock() argument
492 static inline void txd_unlock(struct dma_async_tx_descriptor *txd) in txd_unlock() argument
495 static inline void txd_chain(struct dma_async_tx_descriptor *txd, struct dma_async_tx_descriptor *n… in txd_chain() argument
499 static inline void txd_clear_parent(struct dma_async_tx_descriptor *txd) in txd_clear_parent() argument
502 static inline void txd_clear_next(struct dma_async_tx_descriptor *txd) in txd_clear_next() argument
505 static inline struct dma_async_tx_descriptor *txd_next(struct dma_async_tx_descriptor *txd) in txd_next() argument
509 static inline struct dma_async_tx_descriptor *txd_parent(struct dma_async_tx_descriptor *txd) in txd_parent() argument
515 static inline void txd_lock(struct dma_async_tx_descriptor *txd) in txd_lock() argument
517 spin_lock_bh(&txd->lock); in txd_lock()
519 static inline void txd_unlock(struct dma_async_tx_descriptor *txd) in txd_unlock() argument
[all …]
/linux-4.1.27/drivers/rapidio/devices/
Dtsi721_dma.c61 struct tsi721_tx_desc *to_tsi721_desc(struct dma_async_tx_descriptor *txd) in to_tsi721_desc() argument
63 return container_of(txd, struct tsi721_tx_desc, txd); in to_tsi721_desc()
380 struct dma_async_tx_descriptor *txd = &desc->txd; in tsi721_dma_tx_err() local
381 dma_async_tx_callback callback = txd->callback; in tsi721_dma_tx_err()
382 void *param = txd->callback_param; in tsi721_dma_tx_err()
416 struct dma_chan *dchan = desc->txd.chan; in tsi721_submit_sg()
604 dma_cookie_complete(&desc->txd); in tsi721_dma_tasklet()
605 if (desc->txd.flags & DMA_PREP_INTERRUPT) { in tsi721_dma_tasklet()
606 callback = desc->txd.callback; in tsi721_dma_tasklet()
607 param = desc->txd.callback_param; in tsi721_dma_tasklet()
[all …]
Dtsi721.h648 struct dma_async_tx_descriptor txd; member
/linux-4.1.27/drivers/net/ethernet/intel/fm10k/
Dfm10k_debugfs.c78 struct fm10k_tx_desc *txd = FM10K_TX_DESC(ring, i); in fm10k_dbg_tx_desc_seq_show() local
81 i, txd->buffer_addr, txd->buflen, txd->vlan, in fm10k_dbg_tx_desc_seq_show()
82 txd->mss, txd->hdrlen, txd->flags); in fm10k_dbg_tx_desc_seq_show()
/linux-4.1.27/drivers/media/pci/mantis/
Dmantis_i2c.c87 u32 txd = 0, stat, trials; in mantis_i2c_write() local
94 txd = (msg->addr << 25) | (msg->buf[i] << 8) in mantis_i2c_write()
100 txd &= ~MANTIS_I2C_STOP; in mantis_i2c_write()
103 mmwrite(txd, MANTIS_I2CDATA_CTL); in mantis_i2c_write()
131 u32 stat, data, txd; in mantis_i2c_xfer() local
154 txd = msgs[i].addr << 25 | (0x1 << 24) in mantis_i2c_xfer()
158 mmwrite(txd, MANTIS_I2CDATA_CTL); in mantis_i2c_xfer()
/linux-4.1.27/drivers/video/fbdev/
Dmx3fb.c269 struct dma_async_tx_descriptor *txd; member
397 if (mx3_fbi->txd) in sdc_enable_channel()
399 to_tx_desc(mx3_fbi->txd), to_tx_desc(mx3_fbi->txd)->sg); in sdc_enable_channel()
405 mx3_fbi->txd = dmaengine_prep_slave_sg(dma_chan, in sdc_enable_channel()
407 if (!mx3_fbi->txd) { in sdc_enable_channel()
413 mx3_fbi->txd->callback_param = mx3_fbi->txd; in sdc_enable_channel()
414 mx3_fbi->txd->callback = mx3fb_dma_done; in sdc_enable_channel()
416 cookie = mx3_fbi->txd->tx_submit(mx3_fbi->txd); in sdc_enable_channel()
418 mx3_fbi->txd, cookie, list_empty(&ichan->queue) ? '-' : '+'); in sdc_enable_channel()
420 if (!mx3_fbi->txd || !mx3_fbi->txd->tx_submit) { in sdc_enable_channel()
[all …]
/linux-4.1.27/drivers/media/platform/soc_camera/
Dmx3_camera.c70 struct dma_async_tx_descriptor *txd; member
145 struct dma_chan *chan = desc->txd.chan; in mx3_cam_dma_done()
150 desc->txd.cookie, mx3_cam->active ? sg_dma_address(&mx3_cam->active->sg) : 0); in mx3_cam_dma_done()
265 struct dma_async_tx_descriptor *txd; in mx3_videobuf_queue() local
280 if (!buf->txd) { in mx3_videobuf_queue()
284 txd = dmaengine_prep_slave_sg( in mx3_videobuf_queue()
287 if (!txd) in mx3_videobuf_queue()
290 txd->callback_param = txd; in mx3_videobuf_queue()
291 txd->callback = mx3_cam_dma_done; in mx3_videobuf_queue()
293 buf->txd = txd; in mx3_videobuf_queue()
[all …]
/linux-4.1.27/drivers/dma/dw/
Dcore.c93 if (async_tx_test_ack(&desc->txd)) { in dwc_desc_get()
251 channel_writel(dwc, LLP, first->txd.phys); in dwc_dostart()
267 dev_vdbg(chan2dev(&dwc->chan), "%s: started %u\n", __func__, desc->txd.cookie); in dwc_dostart_first_queued()
279 struct dma_async_tx_descriptor *txd = &desc->txd; in dwc_descriptor_complete() local
283 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); in dwc_descriptor_complete()
286 dma_cookie_complete(txd); in dwc_descriptor_complete()
288 callback = txd->callback; in dwc_descriptor_complete()
289 param = txd->callback_param; in dwc_descriptor_complete()
294 async_tx_ack(&child->txd); in dwc_descriptor_complete()
295 async_tx_ack(&desc->txd); in dwc_descriptor_complete()
[all …]
Dregs.h334 struct dma_async_tx_descriptor txd; member
342 txd_to_dw_desc(struct dma_async_tx_descriptor *txd) in txd_to_dw_desc() argument
344 return container_of(txd, struct dw_desc, txd); in txd_to_dw_desc()
/linux-4.1.27/drivers/net/fddi/skfp/h/
Dhwmtm.h210 #define HWM_GET_TX_PHYS(txd) (u_long)AIX_REVERSE((txd)->txd_tbadr) argument
226 #define HWM_GET_TX_LEN(txd) ((int)AIX_REVERSE((txd)->txd_tbctrl)& RD_LENGTH) argument
/linux-4.1.27/drivers/dma/ioat/
Ddma_v2.c112 async_tx_ack(&desc->txd); in __ioat2_start_null_desc()
113 ioat2_set_chainaddr(ioat, desc->txd.phys); in __ioat2_start_null_desc()
144 tx = &desc->txd; in __cleanup()
215 ioat2_set_chainaddr(ioat, desc->txd.phys); in __ioat2_restart_chan()
457 dma_async_tx_descriptor_init(&desc->txd, chan); in ioat2_alloc_ring_ent()
458 desc->txd.tx_submit = ioat2_tx_submit_unlock; in ioat2_alloc_ring_ent()
460 desc->txd.phys = phys; in ioat2_alloc_ring_ent()
469 pci_pool_free(dma->dma_pool, desc->hw, desc->txd.phys); in ioat2_free_ring_ent()
502 hw->next = next->txd.phys; in ioat2_alloc_ring()
504 ring[i]->hw->next = ring[0]->txd.phys; in ioat2_alloc_ring()
[all …]
Ddma.c246 chain_tail->hw->next = first->txd.phys; in ioat1_tx_submit()
290 dma_async_tx_descriptor_init(&desc_sw->txd, &ioat->base.common); in ioat_dma_alloc_descriptor()
291 desc_sw->txd.tx_submit = ioat1_tx_submit; in ioat_dma_alloc_descriptor()
293 desc_sw->txd.phys = phys; in ioat_dma_alloc_descriptor()
432 desc->txd.phys); in ioat1_dma_free_chan_resources()
439 desc->txd.phys); in ioat1_dma_free_chan_resources()
526 async_tx_ack(&desc->txd); in ioat1_dma_prep_memcpy()
528 hw->next = next ? next->txd.phys : 0; in ioat1_dma_prep_memcpy()
546 desc->txd.flags = flags; in ioat1_dma_prep_memcpy()
554 return &desc->txd; in ioat1_dma_prep_memcpy()
[all …]
Ddma_v3.c406 tx = &desc->txd; in __cleanup()
544 tx = &desc->txd; in ioat3_eh()
559 *chan->completion = desc->txd.phys; in ioat3_eh()
731 desc->txd.flags = flags; in __ioat3_prep_xor_lock()
739 compl_desc->txd.flags = flags & DMA_PREP_INTERRUPT; in __ioat3_prep_xor_lock()
749 return &compl_desc->txd; in __ioat3_prep_xor_lock()
786 desc_id(desc), (unsigned long long) desc->txd.phys, in dump_pq_desc_dbg()
788 desc->txd.flags, pq->size, pq->ctl, pq->ctl_f.op, pq->ctl_f.int_en, in dump_pq_desc_dbg()
819 desc_id(desc), (unsigned long long) desc->txd.phys, in dump_pq16_desc_dbg()
821 desc->txd.flags, pq->size, pq->ctl, in dump_pq16_desc_dbg()
[all …]
Ddma.h36 #define tx_to_ioat_desc(tx) container_of(tx, struct ioat_desc_sw, txd)
183 struct dma_async_tx_descriptor txd; member
211 ({ if (d) __dump_desc_dbg(&c->base, d->hw, &d->txd, desc_id(d)); 0; })
Ddma_v2.h130 struct dma_async_tx_descriptor txd; member
/linux-4.1.27/drivers/dma/ipu/
Dipu_idmac.c782 if (async_tx_test_ack(&desc->txd)) in ipu_submit_buffer()
928 struct dma_async_tx_descriptor *txd = &desc->txd; in idmac_desc_alloc() local
930 memset(txd, 0, sizeof(*txd)); in idmac_desc_alloc()
931 dma_async_tx_descriptor_init(txd, &ichan->dma_chan); in idmac_desc_alloc()
932 txd->tx_submit = idmac_tx_submit; in idmac_desc_alloc()
1281 callback = descnew->txd.callback; in idmac_interrupt()
1282 callback_param = descnew->txd.callback_param; in idmac_interrupt()
1293 dma_cookie_complete(&desc->txd); in idmac_interrupt()
1295 callback = desc->txd.callback; in idmac_interrupt()
1296 callback_param = desc->txd.callback_param; in idmac_interrupt()
[all …]
/linux-4.1.27/drivers/net/ethernet/realtek/
D8139cp.c655 struct cp_desc *txd = cp->tx_ring + tx_tail; in cp_tx() local
660 status = le32_to_cpu(txd->opts1); in cp_tx()
667 dma_unmap_single(&cp->pdev->dev, le64_to_cpu(txd->addr), in cp_tx()
668 le32_to_cpu(txd->opts1) & 0xffff, in cp_tx()
719 struct cp_desc *txd; in unwind_tx_frag_mapping() local
724 txd = &cp->tx_ring[index]; in unwind_tx_frag_mapping()
726 dma_unmap_single(&cp->pdev->dev, le64_to_cpu(txd->addr), in unwind_tx_frag_mapping()
758 struct cp_desc *txd = &cp->tx_ring[entry]; in cp_start_xmit() local
767 txd->opts2 = opts2; in cp_start_xmit()
768 txd->addr = cpu_to_le64(mapping); in cp_start_xmit()
[all …]
Dr8169.c6811 struct TxDesc *uninitialized_var(txd); in rtl8169_xmit_frags()
6823 txd = tp->TxDescArray + entry; in rtl8169_xmit_frags()
6838 txd->opts1 = cpu_to_le32(status); in rtl8169_xmit_frags()
6839 txd->opts2 = cpu_to_le32(opts[1]); in rtl8169_xmit_frags()
6840 txd->addr = cpu_to_le64(mapping); in rtl8169_xmit_frags()
6847 txd->opts1 |= cpu_to_le32(LastFrag); in rtl8169_xmit_frags()
7043 struct TxDesc *txd = tp->TxDescArray + entry; in rtl8169_start_xmit() local
7056 if (unlikely(le32_to_cpu(txd->opts1) & DescOwn)) in rtl8169_start_xmit()
7076 txd->addr = cpu_to_le64(mapping); in rtl8169_start_xmit()
7088 txd->opts2 = cpu_to_le32(opts[1]); in rtl8169_start_xmit()
[all …]
/linux-4.1.27/drivers/spi/
Dspi-ep93xx.c435 struct dma_async_tx_descriptor *txd; in ep93xx_spi_dma_prepare() local
517 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); in ep93xx_spi_dma_prepare()
518 if (!txd) { in ep93xx_spi_dma_prepare()
522 return txd; in ep93xx_spi_dma_prepare()
558 struct dma_async_tx_descriptor *rxd, *txd; in ep93xx_spi_dma_transfer() local
567 txd = ep93xx_spi_dma_prepare(espi, DMA_MEM_TO_DEV); in ep93xx_spi_dma_transfer()
568 if (IS_ERR(txd)) { in ep93xx_spi_dma_transfer()
571 msg->status = PTR_ERR(txd); in ep93xx_spi_dma_transfer()
581 dmaengine_submit(txd); in ep93xx_spi_dma_transfer()
Dspi-ppc4xx.c99 u8 txd; member
162 out_8(&hw->regs->txd, data); in spi_ppc4xx_txrx()
339 out_8(&hw->regs->txd, data); in spi_ppc4xx_int()
/linux-4.1.27/include/linux/dma/
Dipu-dma.h148 struct dma_async_tx_descriptor txd; member
174 #define to_tx_desc(tx) container_of(tx, struct idmac_tx_desc, txd)
/linux-4.1.27/drivers/net/ethernet/sun/
Dsungem.c652 struct gem_txd *txd; in gem_tx() local
681 txd = &gp->init_block->txd[entry]; in gem_tx()
683 dma_addr = le64_to_cpu(txd->buffer); in gem_tx()
684 dma_len = le64_to_cpu(txd->control_word) & TXDCTRL_BUFSZ; in gem_tx()
1028 struct gem_txd *txd = &gp->init_block->txd[entry]; in gem_start_xmit() local
1040 txd->buffer = cpu_to_le64(mapping); in gem_start_xmit()
1042 txd->control_word = cpu_to_le64(ctrl); in gem_start_xmit()
1045 struct gem_txd *txd; in gem_start_xmit() local
1077 txd = &gp->init_block->txd[entry]; in gem_start_xmit()
1078 txd->buffer = cpu_to_le64(mapping); in gem_start_xmit()
[all …]
Dsunhme.c203 static void sbus_hme_write_txd(struct happy_meal_txd *txd, u32 flags, u32 addr) in sbus_hme_write_txd() argument
205 txd->tx_addr = (__force hme32)addr; in sbus_hme_write_txd()
207 txd->tx_flags = (__force hme32)flags; in sbus_hme_write_txd()
232 static void pci_hme_write_txd(struct happy_meal_txd *txd, u32 flags, u32 addr) in pci_hme_write_txd() argument
234 txd->tx_addr = (__force hme32)cpu_to_le32(addr); in pci_hme_write_txd()
236 txd->tx_flags = (__force hme32)cpu_to_le32(flags); in pci_hme_write_txd()
1219 struct happy_meal_txd *txd; in happy_meal_clean_rings() local
1226 txd = &hp->happy_block->happy_meal_txd[i]; in happy_meal_clean_rings()
1227 dma_addr = hme_read_desc32(hp, &txd->tx_addr); in happy_meal_clean_rings()
1230 (hme_read_desc32(hp, &txd->tx_flags) in happy_meal_clean_rings()
[all …]
Dsungem.h955 struct gem_txd txd[INIT_BLOCK_TX_RING_SIZE]; member
Dcassini.c1892 struct cas_tx_desc *txd = txds + entry; in cas_tx_ringN() local
1894 daddr = le64_to_cpu(txd->buffer); in cas_tx_ringN()
1896 le64_to_cpu(txd->control)); in cas_tx_ringN()
2739 struct cas_tx_desc *txd = cp->init_txds[ring] + entry; in cas_write_txd() local
2746 txd->control = cpu_to_le64(ctrl); in cas_write_txd()
2747 txd->buffer = cpu_to_le64(mapping); in cas_write_txd()
3896 struct cas_tx_desc *txd = cp->init_txds[ring]; in cas_clean_txd() local
3917 daddr = le64_to_cpu(txd[ent].buffer); in cas_clean_txd()
3919 le64_to_cpu(txd[ent].control)); in cas_clean_txd()
/linux-4.1.27/arch/cris/include/arch-v32/arch/hwregs/
Dser_defs.h99 unsigned int txd : 1; member
191 unsigned int txd : 1; member
212 unsigned int txd : 1; member
/linux-4.1.27/drivers/net/ethernet/icplus/
Dipg.c161 offset = (u32) &sp->txd[i].next_desc - (u32) sp->txd; in ipg_dump_tfdlist()
163 i, offset, (unsigned long)sp->txd[i].next_desc); in ipg_dump_tfdlist()
165 offset = (u32) &sp->txd[i].tfc - (u32) sp->txd; in ipg_dump_tfdlist()
167 i, offset, (unsigned long) sp->txd[i].tfc); in ipg_dump_tfdlist()
168 offset = (u32) &sp->txd[i].frag_info - (u32) sp->txd; in ipg_dump_tfdlist()
170 i, offset, (unsigned long) sp->txd[i].frag_info); in ipg_dump_tfdlist()
822 struct ipg_tx *txfd = sp->txd + i; in init_tfdlist()
834 sp->txd[i - 1].next_desc = cpu_to_le64(sp->txd_map); in init_tfdlist()
864 struct ipg_tx *txfd = sp->txd + dirty; in ipg_nic_txfree()
1707 struct ipg_tx *txfd = sp->txd + i; in ipg_tx_clear()
[all …]
Dipg.h712 struct ipg_tx *txd; member
/linux-4.1.27/drivers/ntb/
Dntb_transport.c1056 struct dma_async_tx_descriptor *txd; in ntb_async_rx() local
1099 txd = device->device_prep_dma_memcpy(chan, unmap->addr[1], in ntb_async_rx()
1102 if (!txd) in ntb_async_rx()
1105 txd->callback = ntb_rx_copy_callback; in ntb_async_rx()
1106 txd->callback_param = entry; in ntb_async_rx()
1107 dma_set_unmap(txd, unmap); in ntb_async_rx()
1109 cookie = dmaengine_submit(txd); in ntb_async_rx()
1275 struct dma_async_tx_descriptor *txd; in ntb_async_tx() local
1319 txd = device->device_prep_dma_memcpy(chan, dest, unmap->addr[0], len, in ntb_async_tx()
1321 if (!txd) in ntb_async_tx()
[all …]
/linux-4.1.27/drivers/net/wireless/rt2x00/
Drt2800mmio.c53 __le32 *txd = entry_priv->desc; in rt2800mmio_write_tx_desc() local
70 rt2x00_desc_write(txd, 0, word); in rt2800mmio_write_tx_desc()
81 rt2x00_desc_write(txd, 1, word); in rt2800mmio_write_tx_desc()
86 rt2x00_desc_write(txd, 2, word); in rt2800mmio_write_tx_desc()
92 rt2x00_desc_write(txd, 3, word); in rt2800mmio_write_tx_desc()
97 skbdesc->desc = txd; in rt2800mmio_write_tx_desc()
Drt2400pci.c1107 __le32 *txd = entry_priv->desc; in rt2400pci_write_tx_desc() local
1113 rt2x00_desc_read(txd, 1, &word); in rt2400pci_write_tx_desc()
1115 rt2x00_desc_write(txd, 1, word); in rt2400pci_write_tx_desc()
1117 rt2x00_desc_read(txd, 2, &word); in rt2400pci_write_tx_desc()
1120 rt2x00_desc_write(txd, 2, word); in rt2400pci_write_tx_desc()
1122 rt2x00_desc_read(txd, 3, &word); in rt2400pci_write_tx_desc()
1129 rt2x00_desc_write(txd, 3, word); in rt2400pci_write_tx_desc()
1131 rt2x00_desc_read(txd, 4, &word); in rt2400pci_write_tx_desc()
1140 rt2x00_desc_write(txd, 4, word); in rt2400pci_write_tx_desc()
1147 rt2x00_desc_read(txd, 0, &word); in rt2400pci_write_tx_desc()
[all …]
Drt2500pci.c1260 __le32 *txd = entry_priv->desc; in rt2500pci_write_tx_desc() local
1266 rt2x00_desc_read(txd, 1, &word); in rt2500pci_write_tx_desc()
1268 rt2x00_desc_write(txd, 1, word); in rt2500pci_write_tx_desc()
1270 rt2x00_desc_read(txd, 2, &word); in rt2500pci_write_tx_desc()
1275 rt2x00_desc_write(txd, 2, word); in rt2500pci_write_tx_desc()
1277 rt2x00_desc_read(txd, 3, &word); in rt2500pci_write_tx_desc()
1284 rt2x00_desc_write(txd, 3, word); in rt2500pci_write_tx_desc()
1286 rt2x00_desc_read(txd, 10, &word); in rt2500pci_write_tx_desc()
1289 rt2x00_desc_write(txd, 10, word); in rt2500pci_write_tx_desc()
1296 rt2x00_desc_read(txd, 0, &word); in rt2500pci_write_tx_desc()
[all …]
Drt61pci.c1873 __le32 *txd = entry_priv->desc; in rt61pci_write_tx_desc() local
1879 rt2x00_desc_read(txd, 1, &word); in rt61pci_write_tx_desc()
1888 rt2x00_desc_write(txd, 1, word); in rt61pci_write_tx_desc()
1890 rt2x00_desc_read(txd, 2, &word); in rt61pci_write_tx_desc()
1897 rt2x00_desc_write(txd, 2, word); in rt61pci_write_tx_desc()
1900 _rt2x00_desc_write(txd, 3, skbdesc->iv[0]); in rt61pci_write_tx_desc()
1901 _rt2x00_desc_write(txd, 4, skbdesc->iv[1]); in rt61pci_write_tx_desc()
1904 rt2x00_desc_read(txd, 5, &word); in rt61pci_write_tx_desc()
1911 rt2x00_desc_write(txd, 5, word); in rt61pci_write_tx_desc()
1914 rt2x00_desc_read(txd, 6, &word); in rt61pci_write_tx_desc()
[all …]
Drt2500usb.c1080 __le32 *txd = (__le32 *) entry->skb->data; in rt2500usb_write_tx_desc() local
1086 rt2x00_desc_read(txd, 0, &word); in rt2500usb_write_tx_desc()
1102 rt2x00_desc_write(txd, 0, word); in rt2500usb_write_tx_desc()
1104 rt2x00_desc_read(txd, 1, &word); in rt2500usb_write_tx_desc()
1109 rt2x00_desc_write(txd, 1, word); in rt2500usb_write_tx_desc()
1111 rt2x00_desc_read(txd, 2, &word); in rt2500usb_write_tx_desc()
1118 rt2x00_desc_write(txd, 2, word); in rt2500usb_write_tx_desc()
1121 _rt2x00_desc_write(txd, 3, skbdesc->iv[0]); in rt2500usb_write_tx_desc()
1122 _rt2x00_desc_write(txd, 4, skbdesc->iv[1]); in rt2500usb_write_tx_desc()
1129 skbdesc->desc = txd; in rt2500usb_write_tx_desc()
Drt73usb.c1456 __le32 *txd = (__le32 *) entry->skb->data; in rt73usb_write_tx_desc() local
1462 rt2x00_desc_read(txd, 0, &word); in rt73usb_write_tx_desc()
1486 rt2x00_desc_write(txd, 0, word); in rt73usb_write_tx_desc()
1488 rt2x00_desc_read(txd, 1, &word); in rt73usb_write_tx_desc()
1496 rt2x00_desc_write(txd, 1, word); in rt73usb_write_tx_desc()
1498 rt2x00_desc_read(txd, 2, &word); in rt73usb_write_tx_desc()
1505 rt2x00_desc_write(txd, 2, word); in rt73usb_write_tx_desc()
1508 _rt2x00_desc_write(txd, 3, skbdesc->iv[0]); in rt73usb_write_tx_desc()
1509 _rt2x00_desc_write(txd, 4, skbdesc->iv[1]); in rt73usb_write_tx_desc()
1512 rt2x00_desc_read(txd, 5, &word); in rt73usb_write_tx_desc()
[all …]
/linux-4.1.27/drivers/net/ethernet/3com/
Dtyphoon.c717 struct tx_desc *txd, *first_txd; in typhoon_start_tx() local
787 txd = (struct tx_desc *) (txRing->ringBase + txRing->lastWrite); in typhoon_start_tx()
796 txd->flags = TYPHOON_FRAG_DESC | TYPHOON_DESC_VALID; in typhoon_start_tx()
797 txd->len = cpu_to_le16(skb->len); in typhoon_start_tx()
798 txd->frag.addr = cpu_to_le32(skb_dma); in typhoon_start_tx()
799 txd->frag.addrHi = 0; in typhoon_start_tx()
807 txd->flags = TYPHOON_FRAG_DESC | TYPHOON_DESC_VALID; in typhoon_start_tx()
808 txd->len = cpu_to_le16(len); in typhoon_start_tx()
809 txd->frag.addr = cpu_to_le32(skb_dma); in typhoon_start_tx()
810 txd->frag.addrHi = 0; in typhoon_start_tx()
[all …]
/linux-4.1.27/drivers/ata/
Dpata_ep93xx.c703 struct dma_async_tx_descriptor *txd; in ep93xx_pata_dma_start() local
711 txd = dmaengine_prep_slave_sg(channel, qc->sg, qc->n_elem, qc->dma_dir, in ep93xx_pata_dma_start()
713 if (!txd) { in ep93xx_pata_dma_start()
717 txd->callback = NULL; in ep93xx_pata_dma_start()
718 txd->callback_param = NULL; in ep93xx_pata_dma_start()
720 if (dmaengine_submit(txd) < 0) { in ep93xx_pata_dma_start()
/linux-4.1.27/drivers/net/fddi/skfp/
Dskfddi.c125 void mac_drv_tx_complete(struct s_smc *smc, volatile struct s_smt_fp_txd *txd);
1110 struct s_smt_fp_txd *txd; // Current TxD. in send_queued_packets() local
1174 txd = (struct s_smt_fp_txd *) HWM_GET_CURR_TXD(smc, queue); in send_queued_packets()
1179 txd->txd_os.skb = skb; // save skb in send_queued_packets()
1180 txd->txd_os.dma_addr = dma_address; // save dma mapping in send_queued_packets()
1491 void mac_drv_tx_complete(struct s_smc *smc, volatile struct s_smt_fp_txd *txd) in mac_drv_tx_complete() argument
1498 if (!(skb = txd->txd_os.skb)) { in mac_drv_tx_complete()
1502 txd->txd_os.skb = NULL; in mac_drv_tx_complete()
1505 pci_unmap_single(&smc->os.pdev, txd->txd_os.dma_addr, in mac_drv_tx_complete()
1507 txd->txd_os.dma_addr = 0; in mac_drv_tx_complete()
Dhwmtm.c102 volatile struct s_smt_fp_txd *txd);
/linux-4.1.27/drivers/net/vmxnet3/
Dvmxnet3_drv.c337 BUG_ON(VMXNET3_TXDESC_GET_EOP(&(tq->tx_ring.base[eop_idx].txd)) != 1); in vmxnet3_unmap_pkt()
679 ctx->sop_txd->txd.addr = cpu_to_le64(tq->data_ring.basePA + in vmxnet3_map_pkt()
691 le64_to_cpu(ctx->sop_txd->txd.addr), in vmxnet3_map_pkt()
722 BUG_ON(gdesc->txd.gen == tq->tx_ring.gen); in vmxnet3_map_pkt()
724 gdesc->txd.addr = cpu_to_le64(tbi->dma_addr); in vmxnet3_map_pkt()
730 tq->tx_ring.next2fill, le64_to_cpu(gdesc->txd.addr), in vmxnet3_map_pkt()
762 BUG_ON(gdesc->txd.gen == tq->tx_ring.gen); in vmxnet3_map_pkt()
764 gdesc->txd.addr = cpu_to_le64(tbi->dma_addr); in vmxnet3_map_pkt()
770 tq->tx_ring.next2fill, le64_to_cpu(gdesc->txd.addr), in vmxnet3_map_pkt()
1034 gdesc->txd.hlen = ctx.eth_ip_hdr_size + ctx.l4_hdr_size; in vmxnet3_tq_xmit()
[all …]
Dvmxnet3_defs.h309 struct Vmxnet3_TxDesc txd; member
/linux-4.1.27/drivers/net/hamradio/
Dyam.c126 int txd; /* tx delay */ member
610 if ((yp->tx_state == TX_TAIL) || (yp->txd == 0)) in yam_start_tx()
613 yp->tx_count = (yp->bitrate * yp->txd) / 8000; in yam_start_tx()
825 seq_printf(seq, " TxDelay %u\n", yp->txd); in yam_seq_show()
1042 yp->txd = yi.cfg.txdelay; in yam_ioctl()
1070 yi.cfg.txdelay = yp->txd; in yam_ioctl()
1119 yp->txd = DEFAULT_TXD; in yam_setup()
/linux-4.1.27/arch/arm/mach-imx/
Dmach-pcm043.c249 iomux_v3_cfg_t txd = MX35_PAD_STXD4__AUDMUX_AUD4_TXD; in pcm043_ac97_cold_reset() local
277 mxc_iomux_v3_setup_pad(txd); in pcm043_ac97_cold_reset()
/linux-4.1.27/drivers/net/ethernet/dec/tulip/
Dde2104x.c611 struct de_desc *txd; in de_start_xmit() local
625 txd = &de->tx_ring[entry]; in de_start_xmit()
634 txd->opts2 = cpu_to_le32(flags); in de_start_xmit()
635 txd->addr1 = cpu_to_le32(mapping); in de_start_xmit()
641 txd->opts1 = cpu_to_le32(DescOwn); in de_start_xmit()
726 struct de_desc *txd; in __de_set_rx_mode() local
775 txd = &de->tx_ring[entry]; in __de_set_rx_mode()
777 txd->opts2 = cpu_to_le32(SetupFrame | RingEnd | sizeof (de->setup_frame)); in __de_set_rx_mode()
779 txd->opts2 = cpu_to_le32(SetupFrame | sizeof (de->setup_frame)); in __de_set_rx_mode()
780 txd->addr1 = cpu_to_le32(mapping); in __de_set_rx_mode()
[all …]
/linux-4.1.27/drivers/net/ethernet/cirrus/
Dep93xx_eth.c355 struct ep93xx_tdesc *txd; in ep93xx_xmit() local
367 txd = &ep->descs->tdesc[entry]; in ep93xx_xmit()
369 txd->tdesc1 = TDESC1_EOF | (entry << 16) | (skb->len & 0xfff); in ep93xx_xmit()
370 dma_sync_single_for_cpu(dev->dev.parent, txd->buf_addr, skb->len, in ep93xx_xmit()
373 dma_sync_single_for_device(dev->dev.parent, txd->buf_addr, skb->len, in ep93xx_xmit()
/linux-4.1.27/drivers/net/ethernet/sfc/
Dfarch.c294 const efx_qword_t *txd) in efx_farch_push_tx_desc() argument
305 reg.qword[0] = *txd; in efx_farch_push_tx_desc()
318 efx_qword_t *txd; in efx_farch_tx_write() local
329 txd = efx_tx_desc(tx_queue, write_ptr); in efx_farch_tx_write()
336 EFX_POPULATE_QWORD_4(*txd, in efx_farch_tx_write()
347 txd = efx_tx_desc(tx_queue, in efx_farch_tx_write()
349 efx_farch_push_tx_desc(tx_queue, txd); in efx_farch_tx_write()
363 return efx_alloc_special_buffer(efx, &tx_queue->txd, in efx_farch_tx_probe()
373 efx_init_special_buffer(efx, &tx_queue->txd); in efx_farch_tx_init()
380 FRF_AZ_TX_DESCQ_BUF_BASE_ID, tx_queue->txd.index, in efx_farch_tx_init()
[all …]
Def10.c1211 return efx_nic_alloc_buffer(tx_queue->efx, &tx_queue->txd.buf, in efx_ef10_tx_probe()
1219 const efx_qword_t *txd) in efx_ef10_push_tx_desc() argument
1226 reg.qword[0] = *txd; in efx_ef10_push_tx_desc()
1237 size_t entries = tx_queue->txd.buf.len / EFX_BUF_SIZE; in efx_ef10_tx_init()
1242 efx_qword_t *txd; in efx_ef10_tx_init() local
1256 dma_addr = tx_queue->txd.buf.dma_addr; in efx_ef10_tx_init()
1281 txd = efx_tx_desc(tx_queue, 0); in efx_ef10_tx_init()
1282 EFX_POPULATE_QWORD_4(*txd, in efx_ef10_tx_init()
1290 efx_ef10_push_tx_desc(tx_queue, txd); in efx_ef10_tx_init()
1325 efx_nic_free_buffer(tx_queue->efx, &tx_queue->txd.buf); in efx_ef10_tx_remove()
[all …]
Defx.c473 memset(&tx_queue->txd, 0, sizeof(tx_queue->txd)); in efx_copy_channel()
779 tx_queue->txd.index + in efx_realloc_channels()
780 tx_queue->txd.entries); in efx_realloc_channels()
Dnic.h71 return ((efx_qword_t *) (tx_queue->txd.buf.addr)) + index; in efx_tx_desc()
Dnet_driver.h234 struct efx_special_buffer txd; member
/linux-4.1.27/arch/cris/arch-v10/kernel/
Dhead.S585 move.b IO_FIELD (R_SERIAL0_TR_CTRL, txd, 0) \
619 move.b IO_FIELD (R_SERIAL1_TR_CTRL, txd, 0) \
654 move.b IO_FIELD (R_SERIAL2_TR_CTRL, txd, 0) \
690 move.b IO_FIELD (R_SERIAL3_TR_CTRL, txd, 0) \
Ddebugport.c270 IO_FIELD(R_SERIAL0_TR_CTRL, txd, 0) | in start_port()
/linux-4.1.27/drivers/tty/serial/
Detraxfs-uart.c468 tr_ctrl.txd = 0; in etraxfs_uart_break_ctl()
474 tr_ctrl.txd = 1; in etraxfs_uart_break_ctl()
710 tx_ctrl.txd = 1; in etraxfs_uart_set_termios()
/linux-4.1.27/drivers/net/ethernet/intel/i40e/
Di40e_debugfs.c779 struct i40e_tx_desc *txd; in i40e_dbg_dump_desc() local
812 txd = I40E_TX_DESC(ring, i); in i40e_dbg_dump_desc()
815 i, txd->buffer_addr, in i40e_dbg_dump_desc()
816 txd->cmd_type_offset_bsz); in i40e_dbg_dump_desc()
840 txd = I40E_TX_DESC(ring, desc_n); in i40e_dbg_dump_desc()
844 txd->buffer_addr, txd->cmd_type_offset_bsz); in i40e_dbg_dump_desc()
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmsmac/
Ddma.c313 static uint txd(struct dma_info *di, uint x) in txd() function
325 return txd(di, i + 1); in nexttxd()
330 return txd(di, i - 1); in prevtxd()
340 return txd(di, t-h); in ntxdactive()
/linux-4.1.27/drivers/net/wireless/
Dmwl8k.c162 struct mwl8k_tx_desc *txd; member
1453 txq->txd = pci_zalloc_consistent(priv->pdev, size, &txq->txd_dma); in mwl8k_txq_init()
1454 if (txq->txd == NULL) { in mwl8k_txq_init()
1461 pci_free_consistent(priv->pdev, size, txq->txd, txq->txd_dma); in mwl8k_txq_init()
1469 tx_desc = txq->txd + i; in mwl8k_txq_init()
1502 struct mwl8k_tx_desc *tx_desc = txq->txd + desc; in mwl8k_dump_tx_rings()
1678 tx_desc = txq->txd + tx; in mwl8k_txq_reclaim()
1759 if (txq->txd == NULL) in mwl8k_txq_deinit()
1769 txq->txd, txq->txd_dma); in mwl8k_txq_deinit()
1770 txq->txd = NULL; in mwl8k_txq_deinit()
[all …]
/linux-4.1.27/drivers/net/ethernet/sis/
Dsis190.c700 struct TxDesc *txd = tp->TxDescRing + entry; in sis190_tx_interrupt() local
701 u32 status = le32_to_cpu(txd->status); in sis190_tx_interrupt()
715 sis190_unmap_tx_skb(tp->pci_dev, skb, txd); in sis190_tx_interrupt()
/linux-4.1.27/drivers/net/ethernet/micrel/
Dks8851.c114 u8 txd[8]; member
250 __le16 *txb = (__le16 *)ks->txd; in ks8851_rdreg()
/linux-4.1.27/drivers/net/ethernet/marvell/
Dmv643xx_eth.c2060 struct tx_desc *txd = tx_desc + i; in txq_init() local
2067 txd->cmd_sts = 0; in txq_init()
2068 txd->next_desc_ptr = txq->tx_desc_dma + in txq_init()
/linux-4.1.27/arch/arm/boot/dts/
Dexynos5250-pinctrl.dtsi790 c2c_txd: c2c-txd {
/linux-4.1.27/Documentation/networking/
Dz8530drv.txt459 Example: sccparam /dev/scc0 txd 20