Lines Matching refs:c

140 static struct sa11x0_dma_desc *sa11x0_dma_next_desc(struct sa11x0_dma_chan *c)  in sa11x0_dma_next_desc()  argument
142 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in sa11x0_dma_next_desc()
163 struct sa11x0_dma_chan *c) in sa11x0_dma_start_sg() argument
182 struct sa11x0_dma_desc *txn = sa11x0_dma_next_desc(c); in sa11x0_dma_start_sg()
227 struct sa11x0_dma_chan *c) in sa11x0_dma_complete() argument
249 sa11x0_dma_start_sg(p, c); in sa11x0_dma_complete()
256 struct sa11x0_dma_chan *c; in sa11x0_dma_irq() local
279 c = p->vchan; in sa11x0_dma_irq()
280 if (c) { in sa11x0_dma_irq()
283 spin_lock_irqsave(&c->vc.lock, flags); in sa11x0_dma_irq()
291 if (c->phy == p) { in sa11x0_dma_irq()
293 sa11x0_dma_complete(p, c); in sa11x0_dma_irq()
295 sa11x0_dma_complete(p, c); in sa11x0_dma_irq()
297 spin_unlock_irqrestore(&c->vc.lock, flags); in sa11x0_dma_irq()
303 static void sa11x0_dma_start_txd(struct sa11x0_dma_chan *c) in sa11x0_dma_start_txd() argument
305 struct sa11x0_dma_desc *txd = sa11x0_dma_next_desc(c); in sa11x0_dma_start_txd()
309 struct sa11x0_dma_phy *p = c->phy; in sa11x0_dma_start_txd()
325 sa11x0_dma_start_sg(p, c); in sa11x0_dma_start_txd()
326 sa11x0_dma_start_sg(p, c); in sa11x0_dma_start_txd()
334 struct sa11x0_dma_chan *c; in sa11x0_dma_tasklet() local
339 list_for_each_entry(c, &d->slave.channels, vc.chan.device_node) { in sa11x0_dma_tasklet()
340 spin_lock_irq(&c->vc.lock); in sa11x0_dma_tasklet()
341 p = c->phy; in sa11x0_dma_tasklet()
343 sa11x0_dma_start_txd(c); in sa11x0_dma_tasklet()
349 c->phy = NULL; in sa11x0_dma_tasklet()
353 spin_unlock_irq(&c->vc.lock); in sa11x0_dma_tasklet()
361 c = list_first_entry(&d->chan_pending, in sa11x0_dma_tasklet()
363 list_del_init(&c->node); in sa11x0_dma_tasklet()
368 p->vchan = c; in sa11x0_dma_tasklet()
370 dev_dbg(d->slave.dev, "pchan %u: alloc vchan %p\n", pch, &c->vc); in sa11x0_dma_tasklet()
378 c = p->vchan; in sa11x0_dma_tasklet()
380 spin_lock_irq(&c->vc.lock); in sa11x0_dma_tasklet()
381 c->phy = p; in sa11x0_dma_tasklet()
383 sa11x0_dma_start_txd(c); in sa11x0_dma_tasklet()
384 spin_unlock_irq(&c->vc.lock); in sa11x0_dma_tasklet()
394 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_free_chan_resources() local
399 list_del_init(&c->node); in sa11x0_dma_free_chan_resources()
402 vchan_free_chan_resources(&c->vc); in sa11x0_dma_free_chan_resources()
424 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_tx_status() local
431 ret = dma_cookie_status(&c->vc.chan, cookie, state); in sa11x0_dma_tx_status()
436 return c->status; in sa11x0_dma_tx_status()
438 spin_lock_irqsave(&c->vc.lock, flags); in sa11x0_dma_tx_status()
439 p = c->phy; in sa11x0_dma_tx_status()
445 vd = vchan_find_desc(&c->vc, cookie); in sa11x0_dma_tx_status()
461 ret = c->status; in sa11x0_dma_tx_status()
492 spin_unlock_irqrestore(&c->vc.lock, flags); in sa11x0_dma_tx_status()
506 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_issue_pending() local
510 spin_lock_irqsave(&c->vc.lock, flags); in sa11x0_dma_issue_pending()
511 if (vchan_issue_pending(&c->vc)) { in sa11x0_dma_issue_pending()
512 if (!c->phy) { in sa11x0_dma_issue_pending()
514 if (list_empty(&c->node)) { in sa11x0_dma_issue_pending()
515 list_add_tail(&c->node, &d->chan_pending); in sa11x0_dma_issue_pending()
517 dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc); in sa11x0_dma_issue_pending()
522 dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc); in sa11x0_dma_issue_pending()
523 spin_unlock_irqrestore(&c->vc.lock, flags); in sa11x0_dma_issue_pending()
530 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_prep_slave_sg() local
537 if (dir != (c->ddar & DDAR_RW ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV)) { in sa11x0_dma_prep_slave_sg()
539 &c->vc, c->ddar, dir); in sa11x0_dma_prep_slave_sg()
555 &c->vc, addr); in sa11x0_dma_prep_slave_sg()
562 dev_dbg(chan->device->dev, "vchan %p: kzalloc failed\n", &c->vc); in sa11x0_dma_prep_slave_sg()
598 txd->ddar = c->ddar; in sa11x0_dma_prep_slave_sg()
603 &c->vc, &txd->vd, txd->size, txd->sglen); in sa11x0_dma_prep_slave_sg()
605 return vchan_tx_prep(&c->vc, &txd->vd, flags); in sa11x0_dma_prep_slave_sg()
612 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_prep_dma_cyclic() local
617 if (dir != (c->ddar & DDAR_RW ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV)) { in sa11x0_dma_prep_dma_cyclic()
619 &c->vc, c->ddar, dir); in sa11x0_dma_prep_dma_cyclic()
632 dev_dbg(chan->device->dev, "vchan %p: kzalloc failed\n", &c->vc); in sa11x0_dma_prep_dma_cyclic()
658 txd->ddar = c->ddar; in sa11x0_dma_prep_dma_cyclic()
664 return vchan_tx_prep(&c->vc, &txd->vd, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); in sa11x0_dma_prep_dma_cyclic()
670 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_device_config() local
671 u32 ddar = c->ddar & ((0xf << 4) | DDAR_RW); in sa11x0_dma_device_config()
696 dev_dbg(c->vc.chan.device->dev, "vchan %p: dma_slave_config addr %x width %u burst %u\n", in sa11x0_dma_device_config()
697 &c->vc, addr, width, maxburst); in sa11x0_dma_device_config()
699 c->ddar = ddar | (addr & 0xf0000000) | (addr & 0x003ffffc) << 6; in sa11x0_dma_device_config()
706 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_device_pause() local
712 dev_dbg(d->slave.dev, "vchan %p: pause\n", &c->vc); in sa11x0_dma_device_pause()
713 spin_lock_irqsave(&c->vc.lock, flags); in sa11x0_dma_device_pause()
714 if (c->status == DMA_IN_PROGRESS) { in sa11x0_dma_device_pause()
715 c->status = DMA_PAUSED; in sa11x0_dma_device_pause()
717 p = c->phy; in sa11x0_dma_device_pause()
722 list_del_init(&c->node); in sa11x0_dma_device_pause()
726 spin_unlock_irqrestore(&c->vc.lock, flags); in sa11x0_dma_device_pause()
733 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_device_resume() local
739 dev_dbg(d->slave.dev, "vchan %p: resume\n", &c->vc); in sa11x0_dma_device_resume()
740 spin_lock_irqsave(&c->vc.lock, flags); in sa11x0_dma_device_resume()
741 if (c->status == DMA_PAUSED) { in sa11x0_dma_device_resume()
742 c->status = DMA_IN_PROGRESS; in sa11x0_dma_device_resume()
744 p = c->phy; in sa11x0_dma_device_resume()
747 } else if (!list_empty(&c->vc.desc_issued)) { in sa11x0_dma_device_resume()
749 list_add_tail(&c->node, &d->chan_pending); in sa11x0_dma_device_resume()
753 spin_unlock_irqrestore(&c->vc.lock, flags); in sa11x0_dma_device_resume()
760 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_device_terminate_all() local
766 dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc); in sa11x0_dma_device_terminate_all()
768 spin_lock_irqsave(&c->vc.lock, flags); in sa11x0_dma_device_terminate_all()
769 vchan_get_all_descriptors(&c->vc, &head); in sa11x0_dma_device_terminate_all()
771 p = c->phy; in sa11x0_dma_device_terminate_all()
789 c->phy = NULL; in sa11x0_dma_device_terminate_all()
795 spin_unlock_irqrestore(&c->vc.lock, flags); in sa11x0_dma_device_terminate_all()
796 vchan_dma_desc_free_list(&c->vc, &head); in sa11x0_dma_device_terminate_all()
842 struct sa11x0_dma_chan *c; in sa11x0_dma_init_dmadev() local
844 c = kzalloc(sizeof(*c), GFP_KERNEL); in sa11x0_dma_init_dmadev()
845 if (!c) { in sa11x0_dma_init_dmadev()
850 c->status = DMA_IN_PROGRESS; in sa11x0_dma_init_dmadev()
851 c->ddar = chan_desc[i].ddar; in sa11x0_dma_init_dmadev()
852 c->name = chan_desc[i].name; in sa11x0_dma_init_dmadev()
853 INIT_LIST_HEAD(&c->node); in sa11x0_dma_init_dmadev()
855 c->vc.desc_free = sa11x0_dma_free_desc; in sa11x0_dma_init_dmadev()
856 vchan_init(&c->vc, dmadev); in sa11x0_dma_init_dmadev()
883 struct sa11x0_dma_chan *c, *cn; in sa11x0_dma_free_channels() local
885 list_for_each_entry_safe(c, cn, &dmadev->channels, vc.chan.device_node) { in sa11x0_dma_free_channels()
886 list_del(&c->vc.chan.device_node); in sa11x0_dma_free_channels()
887 tasklet_kill(&c->vc.task); in sa11x0_dma_free_channels()
888 kfree(c); in sa11x0_dma_free_channels()
1082 struct sa11x0_dma_chan *c = to_sa11x0_dma_chan(chan); in sa11x0_dma_filter_fn() local
1085 return !strcmp(c->name, p); in sa11x0_dma_filter_fn()