Lines Matching refs:vc

79 	struct virt_dma_chan	vc;  member
112 return container_of(chan, struct k3_dma_chan, vc.chan); in to_k3_chan()
209 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_int_handler()
212 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_int_handler()
233 struct k3_dma_dev *d = to_k3_dma(c->vc.chan.device); in k3_dma_start_txd()
234 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in k3_dma_start_txd()
269 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_tasklet()
270 spin_lock_irq(&c->vc.lock); in k3_dma_tasklet()
281 spin_unlock_irq(&c->vc.lock); in k3_dma_tasklet()
298 dev_dbg(d->slave.dev, "pchan %u: alloc vchan %p\n", pch, &c->vc); in k3_dma_tasklet()
308 spin_lock_irq(&c->vc.lock); in k3_dma_tasklet()
310 spin_unlock_irq(&c->vc.lock); in k3_dma_tasklet()
326 vchan_free_chan_resources(&c->vc); in k3_dma_free_chan_resources()
341 ret = dma_cookie_status(&c->vc.chan, cookie, state); in k3_dma_tx_status()
345 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_tx_status()
353 vd = vchan_find_desc(&c->vc, cookie); in k3_dma_tx_status()
372 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_tx_status()
383 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_issue_pending()
385 if (vchan_issue_pending(&c->vc)) { in k3_dma_issue_pending()
393 dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc); in k3_dma_issue_pending()
398 dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc); in k3_dma_issue_pending()
399 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_issue_pending()
430 dev_dbg(chan->device->dev, "vchan %p: kzalloc fail\n", &c->vc); in k3_dma_prep_memcpy()
461 return vchan_tx_prep(&c->vc, &ds->vd, flags); in k3_dma_prep_memcpy()
486 dev_dbg(chan->device->dev, "vchan %p: kzalloc fail\n", &c->vc); in k3_dma_prep_slave_sg()
518 return vchan_tx_prep(&c->vc, &ds->vd, flags); in k3_dma_prep_slave_sg()
563 c->ccfg |= c->vc.chan.chan_id << 4; in k3_dma_config()
576 dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc); in k3_dma_terminate_all()
584 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_terminate_all()
585 vchan_get_all_descriptors(&c->vc, &head); in k3_dma_terminate_all()
593 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_terminate_all()
594 vchan_dma_desc_free_list(&c->vc, &head); in k3_dma_terminate_all()
605 dev_dbg(d->slave.dev, "vchan %p: pause\n", &c->vc); in k3_dma_transfer_pause()
627 dev_dbg(d->slave.dev, "vchan %p: resume\n", &c->vc); in k3_dma_transfer_resume()
628 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_transfer_resume()
633 } else if (!list_empty(&c->vc.desc_issued)) { in k3_dma_transfer_resume()
639 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_transfer_resume()
667 return dma_get_slave_channel(&(d->chans[request].vc.chan)); in k3_of_dma_simple_xlate()
748 c->vc.desc_free = k3_dma_free_desc; in k3_dma_probe()
749 vchan_init(&c->vc, &d->slave); in k3_dma_probe()
791 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_remove()
792 list_del(&c->vc.chan.device_node); in k3_dma_remove()
793 tasklet_kill(&c->vc.task); in k3_dma_remove()