Lines Matching refs:tx
395 struct dma_async_tx_descriptor *tx; in __cleanup() local
406 tx = &desc->txd; in __cleanup()
407 if (tx->cookie) { in __cleanup()
408 dma_cookie_complete(tx); in __cleanup()
409 dma_descriptor_unmap(tx); in __cleanup()
410 if (tx->callback) { in __cleanup()
411 tx->callback(tx->callback_param); in __cleanup()
412 tx->callback = NULL; in __cleanup()
416 if (tx->phys == phys_complete) in __cleanup()
497 struct dma_async_tx_descriptor *tx; in ioat3_eh() local
544 tx = &desc->txd; in ioat3_eh()
545 if (tx->cookie) { in ioat3_eh()
546 dma_cookie_complete(tx); in ioat3_eh()
547 dma_descriptor_unmap(tx); in ioat3_eh()
548 if (tx->callback) { in ioat3_eh()
549 tx->callback(tx->callback_param); in ioat3_eh()
550 tx->callback = NULL; in ioat3_eh()
1224 struct dma_async_tx_descriptor *tx; in ioat_xor_val_self_test() local
1295 tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs, in ioat_xor_val_self_test()
1299 if (!tx) { in ioat_xor_val_self_test()
1305 async_tx_ack(tx); in ioat_xor_val_self_test()
1307 tx->callback = ioat3_dma_test_callback; in ioat_xor_val_self_test()
1308 tx->callback_param = &cmp; in ioat_xor_val_self_test()
1309 cookie = tx->tx_submit(tx); in ioat_xor_val_self_test()
1363 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs, in ioat_xor_val_self_test()
1366 if (!tx) { in ioat_xor_val_self_test()
1372 async_tx_ack(tx); in ioat_xor_val_self_test()
1374 tx->callback = ioat3_dma_test_callback; in ioat_xor_val_self_test()
1375 tx->callback_param = &cmp; in ioat_xor_val_self_test()
1376 cookie = tx->tx_submit(tx); in ioat_xor_val_self_test()
1416 tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs, in ioat_xor_val_self_test()
1419 if (!tx) { in ioat_xor_val_self_test()
1425 async_tx_ack(tx); in ioat_xor_val_self_test()
1427 tx->callback = ioat3_dma_test_callback; in ioat_xor_val_self_test()
1428 tx->callback_param = &cmp; in ioat_xor_val_self_test()
1429 cookie = tx->tx_submit(tx); in ioat_xor_val_self_test()