Lines Matching refs:desc
237 struct ep93xx_dma_desc *desc) in ep93xx_dma_set_active() argument
241 list_add_tail(&desc->node, &edmac->active); in ep93xx_dma_set_active()
244 while (!list_empty(&desc->tx_list)) { in ep93xx_dma_set_active()
245 struct ep93xx_dma_desc *d = list_first_entry(&desc->tx_list, in ep93xx_dma_set_active()
254 d->txd.callback = desc->txd.callback; in ep93xx_dma_set_active()
255 d->txd.callback_param = desc->txd.callback_param; in ep93xx_dma_set_active()
285 struct ep93xx_dma_desc *desc; in ep93xx_dma_advance_active() local
292 desc = ep93xx_dma_get_active(edmac); in ep93xx_dma_advance_active()
293 if (!desc) in ep93xx_dma_advance_active()
300 return !desc->txd.cookie; in ep93xx_dma_advance_active()
355 struct ep93xx_dma_desc *desc; in m2p_fill_desc() local
358 desc = ep93xx_dma_get_active(edmac); in m2p_fill_desc()
359 if (!desc) { in m2p_fill_desc()
365 bus_addr = desc->src_addr; in m2p_fill_desc()
367 bus_addr = desc->dst_addr; in m2p_fill_desc()
370 writel(desc->size, edmac->regs + M2P_MAXCNT0); in m2p_fill_desc()
373 writel(desc->size, edmac->regs + M2P_MAXCNT1); in m2p_fill_desc()
401 struct ep93xx_dma_desc *desc = ep93xx_dma_get_active(edmac); in m2p_hw_interrupt() local
420 desc->txd.cookie, desc->src_addr, desc->dst_addr, in m2p_hw_interrupt()
421 desc->size); in m2p_hw_interrupt()
516 struct ep93xx_dma_desc *desc; in m2m_fill_desc() local
518 desc = ep93xx_dma_get_active(edmac); in m2m_fill_desc()
519 if (!desc) { in m2m_fill_desc()
525 writel(desc->src_addr, edmac->regs + M2M_SAR_BASE0); in m2m_fill_desc()
526 writel(desc->dst_addr, edmac->regs + M2M_DAR_BASE0); in m2m_fill_desc()
527 writel(desc->size, edmac->regs + M2M_BCR0); in m2m_fill_desc()
529 writel(desc->src_addr, edmac->regs + M2M_SAR_BASE1); in m2m_fill_desc()
530 writel(desc->dst_addr, edmac->regs + M2M_DAR_BASE1); in m2m_fill_desc()
531 writel(desc->size, edmac->regs + M2M_BCR1); in m2m_fill_desc()
593 struct ep93xx_dma_desc *desc; in m2m_hw_interrupt() local
608 desc = ep93xx_dma_get_active(edmac); in m2m_hw_interrupt()
609 last_done = !desc || desc->txd.cookie; in m2m_hw_interrupt()
667 struct ep93xx_dma_desc *desc, *_desc; in ep93xx_dma_desc_get() local
672 list_for_each_entry_safe(desc, _desc, &edmac->free_list, node) { in ep93xx_dma_desc_get()
673 if (async_tx_test_ack(&desc->txd)) { in ep93xx_dma_desc_get()
674 list_del_init(&desc->node); in ep93xx_dma_desc_get()
677 desc->src_addr = 0; in ep93xx_dma_desc_get()
678 desc->dst_addr = 0; in ep93xx_dma_desc_get()
679 desc->size = 0; in ep93xx_dma_desc_get()
680 desc->complete = false; in ep93xx_dma_desc_get()
681 desc->txd.cookie = 0; in ep93xx_dma_desc_get()
682 desc->txd.callback = NULL; in ep93xx_dma_desc_get()
683 desc->txd.callback_param = NULL; in ep93xx_dma_desc_get()
685 ret = desc; in ep93xx_dma_desc_get()
694 struct ep93xx_dma_desc *desc) in ep93xx_dma_desc_put() argument
696 if (desc) { in ep93xx_dma_desc_put()
700 list_splice_init(&desc->tx_list, &edmac->free_list); in ep93xx_dma_desc_put()
701 list_add(&desc->node, &edmac->free_list); in ep93xx_dma_desc_put()
739 struct ep93xx_dma_desc *desc, *d; in ep93xx_dma_tasklet() local
750 desc = ep93xx_dma_get_active(edmac); in ep93xx_dma_tasklet()
751 if (desc) { in ep93xx_dma_tasklet()
752 if (desc->complete) { in ep93xx_dma_tasklet()
755 dma_cookie_complete(&desc->txd); in ep93xx_dma_tasklet()
758 callback = desc->txd.callback; in ep93xx_dma_tasklet()
759 callback_param = desc->txd.callback_param; in ep93xx_dma_tasklet()
767 list_for_each_entry_safe(desc, d, &list, node) { in ep93xx_dma_tasklet()
768 dma_descriptor_unmap(&desc->txd); in ep93xx_dma_tasklet()
769 ep93xx_dma_desc_put(edmac, desc); in ep93xx_dma_tasklet()
779 struct ep93xx_dma_desc *desc; in ep93xx_dma_interrupt() local
784 desc = ep93xx_dma_get_active(edmac); in ep93xx_dma_interrupt()
785 if (!desc) { in ep93xx_dma_interrupt()
794 desc->complete = true; in ep93xx_dma_interrupt()
824 struct ep93xx_dma_desc *desc; in ep93xx_dma_tx_submit() local
831 desc = container_of(tx, struct ep93xx_dma_desc, txd); in ep93xx_dma_tx_submit()
839 ep93xx_dma_set_active(edmac, desc); in ep93xx_dma_tx_submit()
842 list_add_tail(&desc->node, &edmac->queue); in ep93xx_dma_tx_submit()
907 struct ep93xx_dma_desc *desc; in ep93xx_dma_alloc_chan_resources() local
909 desc = kzalloc(sizeof(*desc), GFP_KERNEL); in ep93xx_dma_alloc_chan_resources()
910 if (!desc) { in ep93xx_dma_alloc_chan_resources()
915 INIT_LIST_HEAD(&desc->tx_list); in ep93xx_dma_alloc_chan_resources()
917 dma_async_tx_descriptor_init(&desc->txd, chan); in ep93xx_dma_alloc_chan_resources()
918 desc->txd.flags = DMA_CTRL_ACK; in ep93xx_dma_alloc_chan_resources()
919 desc->txd.tx_submit = ep93xx_dma_tx_submit; in ep93xx_dma_alloc_chan_resources()
921 ep93xx_dma_desc_put(edmac, desc); in ep93xx_dma_alloc_chan_resources()
944 struct ep93xx_dma_desc *desc, *d; in ep93xx_dma_free_chan_resources() local
959 list_for_each_entry_safe(desc, d, &list, node) in ep93xx_dma_free_chan_resources()
960 kfree(desc); in ep93xx_dma_free_chan_resources()
981 struct ep93xx_dma_desc *desc, *first; in ep93xx_dma_prep_dma_memcpy() local
986 desc = ep93xx_dma_desc_get(edmac); in ep93xx_dma_prep_dma_memcpy()
987 if (!desc) { in ep93xx_dma_prep_dma_memcpy()
994 desc->src_addr = src + offset; in ep93xx_dma_prep_dma_memcpy()
995 desc->dst_addr = dest + offset; in ep93xx_dma_prep_dma_memcpy()
996 desc->size = bytes; in ep93xx_dma_prep_dma_memcpy()
999 first = desc; in ep93xx_dma_prep_dma_memcpy()
1001 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_memcpy()
1030 struct ep93xx_dma_desc *desc, *first; in ep93xx_dma_prep_slave_sg() local
1056 desc = ep93xx_dma_desc_get(edmac); in ep93xx_dma_prep_slave_sg()
1057 if (!desc) { in ep93xx_dma_prep_slave_sg()
1063 desc->src_addr = sg_dma_address(sg); in ep93xx_dma_prep_slave_sg()
1064 desc->dst_addr = edmac->runtime_addr; in ep93xx_dma_prep_slave_sg()
1066 desc->src_addr = edmac->runtime_addr; in ep93xx_dma_prep_slave_sg()
1067 desc->dst_addr = sg_dma_address(sg); in ep93xx_dma_prep_slave_sg()
1069 desc->size = sg_len; in ep93xx_dma_prep_slave_sg()
1072 first = desc; in ep93xx_dma_prep_slave_sg()
1074 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_slave_sg()
1110 struct ep93xx_dma_desc *desc, *first; in ep93xx_dma_prep_dma_cyclic() local
1134 desc = ep93xx_dma_desc_get(edmac); in ep93xx_dma_prep_dma_cyclic()
1135 if (!desc) { in ep93xx_dma_prep_dma_cyclic()
1141 desc->src_addr = dma_addr + offset; in ep93xx_dma_prep_dma_cyclic()
1142 desc->dst_addr = edmac->runtime_addr; in ep93xx_dma_prep_dma_cyclic()
1144 desc->src_addr = edmac->runtime_addr; in ep93xx_dma_prep_dma_cyclic()
1145 desc->dst_addr = dma_addr + offset; in ep93xx_dma_prep_dma_cyclic()
1148 desc->size = period_len; in ep93xx_dma_prep_dma_cyclic()
1151 first = desc; in ep93xx_dma_prep_dma_cyclic()
1153 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_cyclic()
1175 struct ep93xx_dma_desc *desc, *_d; in ep93xx_dma_terminate_all() local
1192 list_for_each_entry_safe(desc, _d, &list, node) in ep93xx_dma_terminate_all()
1193 ep93xx_dma_desc_put(edmac, desc); in ep93xx_dma_terminate_all()