Lines Matching refs:txd
72 struct dma_async_tx_descriptor txd; member
207 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
217 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
231 struct dma_async_tx_descriptor *txd; in __td_finish() local
240 txd = &td_desc->txd; in __td_finish()
243 txd->cookie); in __td_finish()
252 dma_cookie_complete(txd); in __td_finish()
255 callback = txd->callback; in __td_finish()
256 param = txd->callback_param; in __td_finish()
260 dma_descriptor_unmap(txd); in __td_finish()
299 __func__, td_desc->txd.cookie); in __td_start_next()
305 static dma_cookie_t td_tx_submit(struct dma_async_tx_descriptor *txd) in td_tx_submit() argument
307 struct timb_dma_desc *td_desc = container_of(txd, struct timb_dma_desc, in td_tx_submit()
308 txd); in td_tx_submit()
309 struct timb_dma_chan *td_chan = container_of(txd->chan, in td_tx_submit()
314 cookie = dma_cookie_assign(txd); in td_tx_submit()
317 dev_dbg(chan2dev(txd->chan), "%s: started %u\n", __func__, in td_tx_submit()
318 txd->cookie); in td_tx_submit()
322 dev_dbg(chan2dev(txd->chan), "tx_submit: queued %u\n", in td_tx_submit()
323 txd->cookie); in td_tx_submit()
353 dma_async_tx_descriptor_init(&td_desc->txd, chan); in td_alloc_init_desc()
354 td_desc->txd.tx_submit = td_tx_submit; in td_alloc_init_desc()
355 td_desc->txd.flags = DMA_CTRL_ACK; in td_alloc_init_desc()
357 td_desc->txd.phys = dma_map_single(chan2dmadev(chan), in td_alloc_init_desc()
360 err = dma_mapping_error(chan2dmadev(chan), td_desc->txd.phys); in td_alloc_init_desc()
377 dev_dbg(chan2dev(td_desc->txd.chan), "Freeing desc: %p\n", td_desc); in td_free_desc()
378 dma_unmap_single(chan2dmadev(td_desc->txd.chan), td_desc->txd.phys, in td_free_desc()
403 if (async_tx_test_ack(&td_desc->txd)) { in td_desc_get()
554 dma_sync_single_for_device(chan2dmadev(chan), td_desc->txd.phys, in td_prep_slave_sg()
557 return &td_desc->txd; in td_prep_slave_sg()