Lines Matching refs:txd
76 struct dma_async_tx_descriptor txd; member
211 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
221 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
235 struct dma_async_tx_descriptor *txd; in __td_finish() local
244 txd = &td_desc->txd; in __td_finish()
247 txd->cookie); in __td_finish()
256 dma_cookie_complete(txd); in __td_finish()
259 callback = txd->callback; in __td_finish()
260 param = txd->callback_param; in __td_finish()
264 dma_descriptor_unmap(txd); in __td_finish()
303 __func__, td_desc->txd.cookie); in __td_start_next()
309 static dma_cookie_t td_tx_submit(struct dma_async_tx_descriptor *txd) in td_tx_submit() argument
311 struct timb_dma_desc *td_desc = container_of(txd, struct timb_dma_desc, in td_tx_submit()
312 txd); in td_tx_submit()
313 struct timb_dma_chan *td_chan = container_of(txd->chan, in td_tx_submit()
318 cookie = dma_cookie_assign(txd); in td_tx_submit()
321 dev_dbg(chan2dev(txd->chan), "%s: started %u\n", __func__, in td_tx_submit()
322 txd->cookie); in td_tx_submit()
326 dev_dbg(chan2dev(txd->chan), "tx_submit: queued %u\n", in td_tx_submit()
327 txd->cookie); in td_tx_submit()
357 dma_async_tx_descriptor_init(&td_desc->txd, chan); in td_alloc_init_desc()
358 td_desc->txd.tx_submit = td_tx_submit; in td_alloc_init_desc()
359 td_desc->txd.flags = DMA_CTRL_ACK; in td_alloc_init_desc()
361 td_desc->txd.phys = dma_map_single(chan2dmadev(chan), in td_alloc_init_desc()
364 err = dma_mapping_error(chan2dmadev(chan), td_desc->txd.phys); in td_alloc_init_desc()
381 dev_dbg(chan2dev(td_desc->txd.chan), "Freeing desc: %p\n", td_desc); in td_free_desc()
382 dma_unmap_single(chan2dmadev(td_desc->txd.chan), td_desc->txd.phys, in td_free_desc()
407 if (async_tx_test_ack(&td_desc->txd)) { in td_desc_get()
558 dma_sync_single_for_device(chan2dmadev(chan), td_desc->txd.phys, in td_prep_slave_sg()
561 return &td_desc->txd; in td_prep_slave_sg()