Lines Matching refs:tx
443 dma_cookie_t (*tx_submit)(struct dma_async_tx_descriptor *tx);
455 static inline void dma_set_unmap(struct dma_async_tx_descriptor *tx, in dma_set_unmap() argument
459 tx->unmap = unmap; in dma_set_unmap()
466 static inline void dma_set_unmap(struct dma_async_tx_descriptor *tx, in dma_set_unmap() argument
480 static inline void dma_descriptor_unmap(struct dma_async_tx_descriptor *tx) in dma_descriptor_unmap() argument
482 if (tx->unmap) { in dma_descriptor_unmap()
483 dmaengine_unmap_put(tx->unmap); in dma_descriptor_unmap()
484 tx->unmap = NULL; in dma_descriptor_unmap()
912 void dma_async_tx_descriptor_init(struct dma_async_tx_descriptor *tx,
915 static inline void async_tx_ack(struct dma_async_tx_descriptor *tx) in async_tx_ack() argument
917 tx->flags |= DMA_CTRL_ACK; in async_tx_ack()
920 static inline void async_tx_clear_ack(struct dma_async_tx_descriptor *tx) in async_tx_clear_ack() argument
922 tx->flags &= ~DMA_CTRL_ACK; in async_tx_clear_ack()
925 static inline bool async_tx_test_ack(struct dma_async_tx_descriptor *tx) in async_tx_test_ack() argument
927 return (tx->flags & DMA_CTRL_ACK) == DMA_CTRL_ACK; in async_tx_test_ack()
930 #define dma_cap_set(tx, mask) __dma_cap_set((tx), &(mask)) argument
937 #define dma_cap_clear(tx, mask) __dma_cap_clear((tx), &(mask)) argument
950 #define dma_has_cap(tx, mask) __dma_has_cap((tx), &(mask)) argument
1032 enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx);
1050 static inline enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx) in dma_wait_for_async_tx() argument
1086 void dma_run_dependencies(struct dma_async_tx_descriptor *tx);