Lines Matching refs:dma_async_tx_descriptor
471 struct dma_async_tx_descriptor { struct
476 dma_cookie_t (*tx_submit)(struct dma_async_tx_descriptor *tx); argument
477 int (*desc_free)(struct dma_async_tx_descriptor *tx); argument
482 struct dma_async_tx_descriptor *next; argument
483 struct dma_async_tx_descriptor *parent; argument
489 static inline void dma_set_unmap(struct dma_async_tx_descriptor *tx, in dma_set_unmap() argument
500 static inline void dma_set_unmap(struct dma_async_tx_descriptor *tx, in dma_set_unmap()
514 static inline void dma_descriptor_unmap(struct dma_async_tx_descriptor *tx) in dma_descriptor_unmap()
523 static inline void txd_lock(struct dma_async_tx_descriptor *txd) in txd_lock()
526 static inline void txd_unlock(struct dma_async_tx_descriptor *txd) in txd_unlock()
529 static inline void txd_chain(struct dma_async_tx_descriptor *txd, struct dma_async_tx_descriptor *n… in txd_chain()
533 static inline void txd_clear_parent(struct dma_async_tx_descriptor *txd) in txd_clear_parent()
536 static inline void txd_clear_next(struct dma_async_tx_descriptor *txd) in txd_clear_next()
539 static inline struct dma_async_tx_descriptor *txd_next(struct dma_async_tx_descriptor *txd) in txd_next()
543 static inline struct dma_async_tx_descriptor *txd_parent(struct dma_async_tx_descriptor *txd) in txd_parent()
549 static inline void txd_lock(struct dma_async_tx_descriptor *txd) in txd_lock()
553 static inline void txd_unlock(struct dma_async_tx_descriptor *txd) in txd_unlock()
557 static inline void txd_chain(struct dma_async_tx_descriptor *txd, struct dma_async_tx_descriptor *n… in txd_chain()
562 static inline void txd_clear_parent(struct dma_async_tx_descriptor *txd) in txd_clear_parent()
566 static inline void txd_clear_next(struct dma_async_tx_descriptor *txd) in txd_clear_next()
570 static inline struct dma_async_tx_descriptor *txd_parent(struct dma_async_tx_descriptor *txd) in txd_parent()
574 static inline struct dma_async_tx_descriptor *txd_next(struct dma_async_tx_descriptor *txd) in txd_next()
689 struct dma_async_tx_descriptor *(*device_prep_dma_memcpy)(
692 struct dma_async_tx_descriptor *(*device_prep_dma_xor)(
695 struct dma_async_tx_descriptor *(*device_prep_dma_xor_val)(
698 struct dma_async_tx_descriptor *(*device_prep_dma_pq)(
702 struct dma_async_tx_descriptor *(*device_prep_dma_pq_val)(
706 struct dma_async_tx_descriptor *(*device_prep_dma_memset)(
709 struct dma_async_tx_descriptor *(*device_prep_dma_memset_sg)(
712 struct dma_async_tx_descriptor *(*device_prep_dma_interrupt)(
714 struct dma_async_tx_descriptor *(*device_prep_dma_sg)(
720 struct dma_async_tx_descriptor *(*device_prep_slave_sg)(
724 struct dma_async_tx_descriptor *(*device_prep_dma_cyclic)(
728 struct dma_async_tx_descriptor *(*device_prep_interleaved_dma)(
731 struct dma_async_tx_descriptor *(*device_prep_dma_imm_data)(
761 static inline struct dma_async_tx_descriptor *dmaengine_prep_slave_single( in dmaengine_prep_slave_single()
774 static inline struct dma_async_tx_descriptor *dmaengine_prep_slave_sg( in dmaengine_prep_slave_sg()
784 static inline struct dma_async_tx_descriptor *dmaengine_prep_rio_sg( in dmaengine_prep_rio_sg()
794 static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_cyclic( in dmaengine_prep_dma_cyclic()
803 static inline struct dma_async_tx_descriptor *dmaengine_prep_interleaved_dma( in dmaengine_prep_interleaved_dma()
810 static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_memset( in dmaengine_prep_dma_memset()
821 static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_sg( in dmaengine_prep_dma_sg()
861 static inline dma_cookie_t dmaengine_submit(struct dma_async_tx_descriptor *desc) in dmaengine_submit()
1019 void dma_async_tx_descriptor_init(struct dma_async_tx_descriptor *tx,
1022 static inline void async_tx_ack(struct dma_async_tx_descriptor *tx) in async_tx_ack()
1027 static inline void async_tx_clear_ack(struct dma_async_tx_descriptor *tx) in async_tx_clear_ack()
1032 static inline bool async_tx_test_ack(struct dma_async_tx_descriptor *tx) in async_tx_test_ack()
1139 enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx);
1157 static inline enum dma_status dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx) in dma_wait_for_async_tx()
1189 static inline int dmaengine_desc_set_reuse(struct dma_async_tx_descriptor *tx) in dmaengine_desc_set_reuse()
1203 static inline void dmaengine_desc_clear_reuse(struct dma_async_tx_descriptor *tx) in dmaengine_desc_clear_reuse()
1208 static inline bool dmaengine_desc_test_reuse(struct dma_async_tx_descriptor *tx) in dmaengine_desc_test_reuse()
1213 static inline int dmaengine_desc_free(struct dma_async_tx_descriptor *desc) in dmaengine_desc_free()
1226 void dma_run_dependencies(struct dma_async_tx_descriptor *tx);