Lines Matching refs:desc
75 struct shdma_desc *chunk, *c, *desc = in shdma_tx_submit() local
89 list_for_each_entry_safe(chunk, c, desc->node.prev, node) { in shdma_tx_submit()
94 if (chunk != desc && (chunk->mark == DESC_IDLE || in shdma_tx_submit()
211 struct shdma_desc *desc; in shdma_alloc_chan_resources() local
230 schan->desc = kcalloc(NR_DESCS_PER_CHANNEL, in shdma_alloc_chan_resources()
232 if (!schan->desc) { in shdma_alloc_chan_resources()
239 desc = ops->embedded_desc(schan->desc, i); in shdma_alloc_chan_resources()
240 dma_async_tx_descriptor_init(&desc->async_tx, in shdma_alloc_chan_resources()
242 desc->async_tx.tx_submit = shdma_tx_submit; in shdma_alloc_chan_resources()
243 desc->mark = DESC_IDLE; in shdma_alloc_chan_resources()
245 list_add(&desc->node, &schan->ld_free); in shdma_alloc_chan_resources()
328 struct shdma_desc *desc, *_desc; in __ld_cleanup() local
338 list_for_each_entry_safe(desc, _desc, &schan->ld_queue, node) { in __ld_cleanup()
339 struct dma_async_tx_descriptor *tx = &desc->async_tx; in __ld_cleanup()
341 BUG_ON(tx->cookie > 0 && tx->cookie != desc->cookie); in __ld_cleanup()
342 BUG_ON(desc->mark != DESC_SUBMITTED && in __ld_cleanup()
343 desc->mark != DESC_COMPLETED && in __ld_cleanup()
344 desc->mark != DESC_WAITING); in __ld_cleanup()
351 if (!all && desc->mark == DESC_SUBMITTED && in __ld_cleanup()
352 desc->cookie != cookie) in __ld_cleanup()
358 if (desc->mark == DESC_COMPLETED && desc->chunks == 1) { in __ld_cleanup()
359 if (schan->dma_chan.completed_cookie != desc->cookie - 1) in __ld_cleanup()
362 desc->cookie, in __ld_cleanup()
364 schan->dma_chan.completed_cookie = desc->cookie; in __ld_cleanup()
368 if (desc->mark == DESC_COMPLETED && tx->callback) { in __ld_cleanup()
369 desc->mark = DESC_WAITING; in __ld_cleanup()
374 BUG_ON(desc->chunks != 1); in __ld_cleanup()
379 if (desc->mark == DESC_COMPLETED) { in __ld_cleanup()
381 desc->mark = DESC_WAITING; in __ld_cleanup()
385 switch (desc->mark) { in __ld_cleanup()
387 desc->mark = DESC_WAITING; in __ld_cleanup()
391 async_tx_ack(&desc->async_tx); in __ld_cleanup()
398 if (((desc->mark == DESC_COMPLETED || in __ld_cleanup()
399 desc->mark == DESC_WAITING) && in __ld_cleanup()
400 async_tx_test_ack(&desc->async_tx)) || all) { in __ld_cleanup()
402 if (all || !desc->cyclic) { in __ld_cleanup()
404 desc->mark = DESC_IDLE; in __ld_cleanup()
405 list_move(&desc->node, &schan->ld_free); in __ld_cleanup()
408 desc->mark = DESC_SUBMITTED; in __ld_cleanup()
409 list_move_tail(&desc->node, &cyclic_list); in __ld_cleanup()
486 kfree(schan->desc); in shdma_free_chan_resources()
701 struct dma_async_tx_descriptor *desc; in shdma_prep_dma_cyclic() local
749 desc = shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_dma_cyclic()
753 return desc; in shdma_prep_dma_cyclic()
768 struct shdma_desc *desc = list_first_entry(&schan->ld_queue, in shdma_terminate_all() local
770 desc->partial = ops->get_partial(schan, desc); in shdma_terminate_all()