Lines Matching refs:desc

148 				 const struct txx9dmac_desc *desc)  in desc_read_CHAR()  argument
150 return is_dmac64(dc) ? desc->hwdesc.CHAR : desc->hwdesc32.CHAR; in desc_read_CHAR()
154 struct txx9dmac_desc *desc, dma_addr_t val) in desc_write_CHAR() argument
157 desc->hwdesc.CHAR = val; in desc_write_CHAR()
159 desc->hwdesc32.CHAR = val; in desc_write_CHAR()
183 static struct txx9dmac_desc *txx9dmac_last_child(struct txx9dmac_desc *desc) in txx9dmac_last_child() argument
185 if (!list_empty(&desc->tx_list)) in txx9dmac_last_child()
186 desc = list_entry(desc->tx_list.prev, typeof(*desc), desc_node); in txx9dmac_last_child()
187 return desc; in txx9dmac_last_child()
196 struct txx9dmac_desc *desc; in txx9dmac_desc_alloc() local
198 desc = kzalloc(sizeof(*desc), flags); in txx9dmac_desc_alloc()
199 if (!desc) in txx9dmac_desc_alloc()
201 INIT_LIST_HEAD(&desc->tx_list); in txx9dmac_desc_alloc()
202 dma_async_tx_descriptor_init(&desc->txd, &dc->chan); in txx9dmac_desc_alloc()
203 desc->txd.tx_submit = txx9dmac_tx_submit; in txx9dmac_desc_alloc()
205 desc->txd.flags = DMA_CTRL_ACK; in txx9dmac_desc_alloc()
206 desc->txd.phys = dma_map_single(chan2parent(&dc->chan), &desc->hwdesc, in txx9dmac_desc_alloc()
208 return desc; in txx9dmac_desc_alloc()
213 struct txx9dmac_desc *desc, *_desc; in txx9dmac_desc_get() local
218 list_for_each_entry_safe(desc, _desc, &dc->free_list, desc_node) { in txx9dmac_desc_get()
219 if (async_tx_test_ack(&desc->txd)) { in txx9dmac_desc_get()
220 list_del(&desc->desc_node); in txx9dmac_desc_get()
221 ret = desc; in txx9dmac_desc_get()
224 dev_dbg(chan2dev(&dc->chan), "desc %p not ACKed\n", desc); in txx9dmac_desc_get()
245 struct txx9dmac_desc *desc) in txx9dmac_sync_desc_for_cpu() argument
250 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_sync_desc_for_cpu()
255 desc->txd.phys, ddev->descsize, in txx9dmac_sync_desc_for_cpu()
264 struct txx9dmac_desc *desc) in txx9dmac_desc_put() argument
266 if (desc) { in txx9dmac_desc_put()
269 txx9dmac_sync_desc_for_cpu(dc, desc); in txx9dmac_desc_put()
272 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_desc_put()
276 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_desc_put()
278 desc); in txx9dmac_desc_put()
279 list_add(&desc->desc_node, &dc->free_list); in txx9dmac_desc_put()
404 struct txx9dmac_desc *desc) in txx9dmac_descriptor_complete() argument
408 struct dma_async_tx_descriptor *txd = &desc->txd; in txx9dmac_descriptor_complete()
411 txd->cookie, desc); in txx9dmac_descriptor_complete()
417 txx9dmac_sync_desc_for_cpu(dc, desc); in txx9dmac_descriptor_complete()
418 list_splice_init(&desc->tx_list, &dc->free_list); in txx9dmac_descriptor_complete()
419 list_move(&desc->desc_node, &dc->free_list); in txx9dmac_descriptor_complete()
434 struct txx9dmac_desc *desc; in txx9dmac_dequeue() local
439 desc = txx9dmac_first_queued(dc); in txx9dmac_dequeue()
441 desc_write_CHAR(dc, prev, desc->txd.phys); in txx9dmac_dequeue()
446 prev = txx9dmac_last_child(desc); in txx9dmac_dequeue()
447 list_move_tail(&desc->desc_node, list); in txx9dmac_dequeue()
449 if ((desc->txd.flags & DMA_PREP_INTERRUPT) && in txx9dmac_dequeue()
457 struct txx9dmac_desc *desc, *_desc; in txx9dmac_complete_all() local
470 list_for_each_entry_safe(desc, _desc, &list, desc_node) in txx9dmac_complete_all()
471 txx9dmac_descriptor_complete(dc, desc); in txx9dmac_complete_all()
475 struct txx9dmac_hwdesc *desc) in txx9dmac_dump_desc() argument
481 (u64)desc->CHAR, desc->SAR, desc->DAR, desc->CNTR); in txx9dmac_dump_desc()
486 (u64)desc->CHAR, desc->SAR, desc->DAR, desc->CNTR, in txx9dmac_dump_desc()
487 desc->SAIR, desc->DAIR, desc->CCR, desc->CSR); in txx9dmac_dump_desc()
490 struct txx9dmac_hwdesc32 *d = (struct txx9dmac_hwdesc32 *)desc; in txx9dmac_dump_desc()
546 struct txx9dmac_desc *desc, *_desc; in txx9dmac_scan_descriptors() local
571 list_for_each_entry_safe(desc, _desc, &dc->active_list, desc_node) { in txx9dmac_scan_descriptors()
572 if (desc_read_CHAR(dc, desc) == chain) { in txx9dmac_scan_descriptors()
579 list_for_each_entry(child, &desc->tx_list, desc_node) in txx9dmac_scan_descriptors()
591 txx9dmac_descriptor_complete(dc, desc); in txx9dmac_scan_descriptors()
699 struct txx9dmac_desc *desc = txd_to_txx9dmac_desc(tx); in txx9dmac_tx_submit() local
707 desc->txd.cookie, desc); in txx9dmac_tx_submit()
709 list_add_tail(&desc->desc_node, &dc->queue); in txx9dmac_tx_submit()
721 struct txx9dmac_desc *desc; in txx9dmac_prep_dma_memcpy() local
755 desc = txx9dmac_desc_get(dc); in txx9dmac_prep_dma_memcpy()
756 if (!desc) { in txx9dmac_prep_dma_memcpy()
762 desc->hwdesc.SAR = src + offset; in txx9dmac_prep_dma_memcpy()
763 desc->hwdesc.DAR = dest + offset; in txx9dmac_prep_dma_memcpy()
764 desc->hwdesc.CNTR = xfer_count; in txx9dmac_prep_dma_memcpy()
765 txx9dmac_desc_set_nosimple(ddev, desc, 8, 8, in txx9dmac_prep_dma_memcpy()
768 desc->hwdesc32.SAR = src + offset; in txx9dmac_prep_dma_memcpy()
769 desc->hwdesc32.DAR = dest + offset; in txx9dmac_prep_dma_memcpy()
770 desc->hwdesc32.CNTR = xfer_count; in txx9dmac_prep_dma_memcpy()
771 txx9dmac_desc_set_nosimple(ddev, desc, 4, 4, in txx9dmac_prep_dma_memcpy()
783 first = desc; in txx9dmac_prep_dma_memcpy()
785 desc_write_CHAR(dc, prev, desc->txd.phys); in txx9dmac_prep_dma_memcpy()
789 list_add_tail(&desc->desc_node, &first->tx_list); in txx9dmac_prep_dma_memcpy()
791 prev = desc; in txx9dmac_prep_dma_memcpy()
835 struct txx9dmac_desc *desc; in txx9dmac_prep_slave_sg() local
839 desc = txx9dmac_desc_get(dc); in txx9dmac_prep_slave_sg()
840 if (!desc) { in txx9dmac_prep_slave_sg()
849 desc->hwdesc.SAR = mem; in txx9dmac_prep_slave_sg()
850 desc->hwdesc.DAR = ds->tx_reg; in txx9dmac_prep_slave_sg()
852 desc->hwdesc.SAR = ds->rx_reg; in txx9dmac_prep_slave_sg()
853 desc->hwdesc.DAR = mem; in txx9dmac_prep_slave_sg()
855 desc->hwdesc.CNTR = sg_dma_len(sg); in txx9dmac_prep_slave_sg()
858 desc->hwdesc32.SAR = mem; in txx9dmac_prep_slave_sg()
859 desc->hwdesc32.DAR = ds->tx_reg; in txx9dmac_prep_slave_sg()
861 desc->hwdesc32.SAR = ds->rx_reg; in txx9dmac_prep_slave_sg()
862 desc->hwdesc32.DAR = mem; in txx9dmac_prep_slave_sg()
864 desc->hwdesc32.CNTR = sg_dma_len(sg); in txx9dmac_prep_slave_sg()
873 txx9dmac_desc_set_nosimple(ddev, desc, sai, dai, in txx9dmac_prep_slave_sg()
877 first = desc; in txx9dmac_prep_slave_sg()
879 desc_write_CHAR(dc, prev, desc->txd.phys); in txx9dmac_prep_slave_sg()
884 list_add_tail(&desc->desc_node, &first->tx_list); in txx9dmac_prep_slave_sg()
886 prev = desc; in txx9dmac_prep_slave_sg()
907 struct txx9dmac_desc *desc, *_desc; in txx9dmac_terminate_all() local
922 list_for_each_entry_safe(desc, _desc, &list, desc_node) in txx9dmac_terminate_all()
923 txx9dmac_descriptor_complete(dc, desc); in txx9dmac_terminate_all()
950 struct txx9dmac_desc *desc; in txx9dmac_chain_dynamic() local
955 desc = list_entry(list.next, struct txx9dmac_desc, desc_node); in txx9dmac_chain_dynamic()
956 desc_write_CHAR(dc, prev, desc->txd.phys); in txx9dmac_chain_dynamic()
964 channel_write_CHAR(dc, desc->txd.phys); in txx9dmac_chain_dynamic()
996 struct txx9dmac_desc *desc; in txx9dmac_alloc_chan_resources() local
1031 desc = txx9dmac_desc_alloc(dc, GFP_KERNEL); in txx9dmac_alloc_chan_resources()
1032 if (!desc) { in txx9dmac_alloc_chan_resources()
1038 txx9dmac_desc_put(dc, desc); in txx9dmac_alloc_chan_resources()
1055 struct txx9dmac_desc *desc, *_desc; in txx9dmac_free_chan_resources() local
1071 list_for_each_entry_safe(desc, _desc, &list, desc_node) { in txx9dmac_free_chan_resources()
1072 dev_vdbg(chan2dev(chan), " freeing descriptor %p\n", desc); in txx9dmac_free_chan_resources()
1073 dma_unmap_single(chan2parent(chan), desc->txd.phys, in txx9dmac_free_chan_resources()
1075 kfree(desc); in txx9dmac_free_chan_resources()