Lines Matching refs:async_tx

45 	container_of(tx, struct mv_xor_desc_slot, async_tx)
223 mv_chan_set_next_descriptor(mv_chan, sw_desc->async_tx.phys); in mv_chan_start_new_chain()
234 BUG_ON(desc->async_tx.cookie < 0); in mv_desc_run_tx_complete_actions()
236 if (desc->async_tx.cookie > 0) { in mv_desc_run_tx_complete_actions()
237 cookie = desc->async_tx.cookie; in mv_desc_run_tx_complete_actions()
242 if (desc->async_tx.callback) in mv_desc_run_tx_complete_actions()
243 desc->async_tx.callback( in mv_desc_run_tx_complete_actions()
244 desc->async_tx.callback_param); in mv_desc_run_tx_complete_actions()
246 dma_descriptor_unmap(&desc->async_tx); in mv_desc_run_tx_complete_actions()
250 dma_run_dependencies(&desc->async_tx); in mv_desc_run_tx_complete_actions()
264 if (async_tx_test_ack(&iter->async_tx)) in mv_chan_clean_completed_slots()
275 __func__, __LINE__, desc, desc->async_tx.flags); in mv_desc_clean_slot()
280 if (!async_tx_test_ack(&desc->async_tx)) in mv_desc_clean_slot()
320 if (iter->async_tx.phys == current_desc) { in mv_chan_slot_cleanup()
325 if (iter->async_tx.phys == current_desc) { in mv_chan_slot_cleanup()
392 async_tx_ack(&iter->async_tx); in mv_chan_alloc_slot()
393 iter->async_tx.cookie = -EBUSY; in mv_chan_alloc_slot()
419 __func__, sw_desc, &sw_desc->async_tx); in mv_xor_tx_submit()
435 &old_chain_tail->async_tx.phys); in mv_xor_tx_submit()
438 mv_desc_set_next_desc(old_chain_tail, sw_desc->async_tx.phys); in mv_xor_tx_submit()
447 if (current_desc == old_chain_tail->async_tx.phys) in mv_xor_tx_submit()
483 dma_async_tx_descriptor_init(&slot->async_tx, chan); in mv_xor_alloc_chan_resources()
484 slot->async_tx.tx_submit = mv_xor_tx_submit; in mv_xor_alloc_chan_resources()
487 slot->async_tx.phys = dma_desc + idx * MV_XOR_SLOT_SIZE; in mv_xor_alloc_chan_resources()
522 sw_desc->async_tx.flags = flags; in mv_xor_prep_dma_xor()
532 __func__, sw_desc, &sw_desc->async_tx); in mv_xor_prep_dma_xor()
533 return sw_desc ? &sw_desc->async_tx : NULL; in mv_xor_prep_dma_xor()