Lines Matching refs:vc

78 	struct virt_dma_chan	vc;  member
111 return container_of(chan, struct k3_dma_chan, vc.chan); in to_k3_chan()
208 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_int_handler()
211 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_int_handler()
232 struct k3_dma_dev *d = to_k3_dma(c->vc.chan.device); in k3_dma_start_txd()
233 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in k3_dma_start_txd()
268 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_tasklet()
269 spin_lock_irq(&c->vc.lock); in k3_dma_tasklet()
280 spin_unlock_irq(&c->vc.lock); in k3_dma_tasklet()
297 dev_dbg(d->slave.dev, "pchan %u: alloc vchan %p\n", pch, &c->vc); in k3_dma_tasklet()
307 spin_lock_irq(&c->vc.lock); in k3_dma_tasklet()
309 spin_unlock_irq(&c->vc.lock); in k3_dma_tasklet()
325 vchan_free_chan_resources(&c->vc); in k3_dma_free_chan_resources()
340 ret = dma_cookie_status(&c->vc.chan, cookie, state); in k3_dma_tx_status()
344 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_tx_status()
352 vd = vchan_find_desc(&c->vc, cookie); in k3_dma_tx_status()
371 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_tx_status()
382 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_issue_pending()
384 if (vchan_issue_pending(&c->vc)) { in k3_dma_issue_pending()
392 dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc); in k3_dma_issue_pending()
397 dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc); in k3_dma_issue_pending()
398 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_issue_pending()
429 dev_dbg(chan->device->dev, "vchan %p: kzalloc fail\n", &c->vc); in k3_dma_prep_memcpy()
460 return vchan_tx_prep(&c->vc, &ds->vd, flags); in k3_dma_prep_memcpy()
485 dev_dbg(chan->device->dev, "vchan %p: kzalloc fail\n", &c->vc); in k3_dma_prep_slave_sg()
517 return vchan_tx_prep(&c->vc, &ds->vd, flags); in k3_dma_prep_slave_sg()
562 c->ccfg |= c->vc.chan.chan_id << 4; in k3_dma_config()
575 dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc); in k3_dma_terminate_all()
583 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_terminate_all()
584 vchan_get_all_descriptors(&c->vc, &head); in k3_dma_terminate_all()
592 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_terminate_all()
593 vchan_dma_desc_free_list(&c->vc, &head); in k3_dma_terminate_all()
604 dev_dbg(d->slave.dev, "vchan %p: pause\n", &c->vc); in k3_dma_transfer_pause()
626 dev_dbg(d->slave.dev, "vchan %p: resume\n", &c->vc); in k3_dma_transfer_resume()
627 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_transfer_resume()
632 } else if (!list_empty(&c->vc.desc_issued)) { in k3_dma_transfer_resume()
638 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_transfer_resume()
666 return dma_get_slave_channel(&(d->chans[request].vc.chan)); in k3_of_dma_simple_xlate()
747 c->vc.desc_free = k3_dma_free_desc; in k3_dma_probe()
748 vchan_init(&c->vc, &d->slave); in k3_dma_probe()
790 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_remove()
791 list_del(&c->vc.chan.device_node); in k3_dma_remove()
792 tasklet_kill(&c->vc.task); in k3_dma_remove()