Lines Matching refs:c
193 struct k3_dma_chan *c; in k3_dma_int_handler() local
205 c = p->vchan; in k3_dma_int_handler()
206 if (c) { in k3_dma_int_handler()
209 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_int_handler()
212 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_int_handler()
231 static int k3_dma_start_txd(struct k3_dma_chan *c) in k3_dma_start_txd() argument
233 struct k3_dma_dev *d = to_k3_dma(c->vc.chan.device); in k3_dma_start_txd()
234 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in k3_dma_start_txd()
236 if (!c->phy) in k3_dma_start_txd()
239 if (BIT(c->phy->idx) & k3_dma_get_chan_stat(d)) in k3_dma_start_txd()
250 c->phy->ds_run = ds; in k3_dma_start_txd()
251 c->phy->ds_done = NULL; in k3_dma_start_txd()
253 k3_dma_set_desc(c->phy, &ds->desc_hw[0]); in k3_dma_start_txd()
256 c->phy->ds_done = NULL; in k3_dma_start_txd()
257 c->phy->ds_run = NULL; in k3_dma_start_txd()
265 struct k3_dma_chan *c, *cn; in k3_dma_tasklet() local
269 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_tasklet()
270 spin_lock_irq(&c->vc.lock); in k3_dma_tasklet()
271 p = c->phy; in k3_dma_tasklet()
273 if (k3_dma_start_txd(c)) { in k3_dma_tasklet()
277 c->phy = NULL; in k3_dma_tasklet()
281 spin_unlock_irq(&c->vc.lock); in k3_dma_tasklet()
290 c = list_first_entry(&d->chan_pending, in k3_dma_tasklet()
293 list_del_init(&c->node); in k3_dma_tasklet()
296 p->vchan = c; in k3_dma_tasklet()
297 c->phy = p; in k3_dma_tasklet()
298 dev_dbg(d->slave.dev, "pchan %u: alloc vchan %p\n", pch, &c->vc); in k3_dma_tasklet()
306 c = p->vchan; in k3_dma_tasklet()
307 if (c) { in k3_dma_tasklet()
308 spin_lock_irq(&c->vc.lock); in k3_dma_tasklet()
309 k3_dma_start_txd(c); in k3_dma_tasklet()
310 spin_unlock_irq(&c->vc.lock); in k3_dma_tasklet()
318 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_free_chan_resources() local
323 list_del_init(&c->node); in k3_dma_free_chan_resources()
326 vchan_free_chan_resources(&c->vc); in k3_dma_free_chan_resources()
327 c->ccfg = 0; in k3_dma_free_chan_resources()
333 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_tx_status() local
341 ret = dma_cookie_status(&c->vc.chan, cookie, state); in k3_dma_tx_status()
345 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_tx_status()
346 p = c->phy; in k3_dma_tx_status()
347 ret = c->status; in k3_dma_tx_status()
353 vd = vchan_find_desc(&c->vc, cookie); in k3_dma_tx_status()
372 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_tx_status()
379 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_issue_pending() local
383 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_issue_pending()
385 if (vchan_issue_pending(&c->vc)) { in k3_dma_issue_pending()
387 if (!c->phy) { in k3_dma_issue_pending()
388 if (list_empty(&c->node)) { in k3_dma_issue_pending()
390 list_add_tail(&c->node, &d->chan_pending); in k3_dma_issue_pending()
393 dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc); in k3_dma_issue_pending()
398 dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc); in k3_dma_issue_pending()
399 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_issue_pending()
419 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_prep_memcpy() local
430 dev_dbg(chan->device->dev, "vchan %p: kzalloc fail\n", &c->vc); in k3_dma_prep_memcpy()
438 if (!c->ccfg) { in k3_dma_prep_memcpy()
440 c->ccfg = CX_CFG_SRCINCR | CX_CFG_DSTINCR | CX_CFG_EN; in k3_dma_prep_memcpy()
441 c->ccfg |= (0xf << 20) | (0xf << 24); /* burst = 16 */ in k3_dma_prep_memcpy()
442 c->ccfg |= (0x3 << 12) | (0x3 << 16); /* width = 64 bit */ in k3_dma_prep_memcpy()
447 k3_dma_fill_desc(ds, dst, src, copy, num++, c->ccfg); in k3_dma_prep_memcpy()
449 if (c->dir == DMA_MEM_TO_DEV) { in k3_dma_prep_memcpy()
451 } else if (c->dir == DMA_DEV_TO_MEM) { in k3_dma_prep_memcpy()
461 return vchan_tx_prep(&c->vc, &ds->vd, flags); in k3_dma_prep_memcpy()
468 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_prep_slave_sg() local
486 dev_dbg(chan->device->dev, "vchan %p: kzalloc fail\n", &c->vc); in k3_dma_prep_slave_sg()
503 dst = c->dev_addr; in k3_dma_prep_slave_sg()
505 src = c->dev_addr; in k3_dma_prep_slave_sg()
509 k3_dma_fill_desc(ds, dst, src, len, num++, c->ccfg); in k3_dma_prep_slave_sg()
518 return vchan_tx_prep(&c->vc, &ds->vd, flags); in k3_dma_prep_slave_sg()
524 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_config() local
530 c->dir = cfg->direction; in k3_dma_config()
531 if (c->dir == DMA_DEV_TO_MEM) { in k3_dma_config()
532 c->ccfg = CX_CFG_DSTINCR; in k3_dma_config()
533 c->dev_addr = cfg->src_addr; in k3_dma_config()
536 } else if (c->dir == DMA_MEM_TO_DEV) { in k3_dma_config()
537 c->ccfg = CX_CFG_SRCINCR; in k3_dma_config()
538 c->dev_addr = cfg->dst_addr; in k3_dma_config()
553 c->ccfg |= (val << 12) | (val << 16); in k3_dma_config()
559 c->ccfg |= (val << 20) | (val << 24); in k3_dma_config()
560 c->ccfg |= CX_CFG_MEM2PER | CX_CFG_EN; in k3_dma_config()
563 c->ccfg |= c->vc.chan.chan_id << 4; in k3_dma_config()
570 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_terminate_all() local
572 struct k3_dma_phy *p = c->phy; in k3_dma_terminate_all()
576 dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc); in k3_dma_terminate_all()
580 list_del_init(&c->node); in k3_dma_terminate_all()
584 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_terminate_all()
585 vchan_get_all_descriptors(&c->vc, &head); in k3_dma_terminate_all()
589 c->phy = NULL; in k3_dma_terminate_all()
593 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_terminate_all()
594 vchan_dma_desc_free_list(&c->vc, &head); in k3_dma_terminate_all()
601 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_transfer_pause() local
603 struct k3_dma_phy *p = c->phy; in k3_dma_transfer_pause()
605 dev_dbg(d->slave.dev, "vchan %p: pause\n", &c->vc); in k3_dma_transfer_pause()
606 if (c->status == DMA_IN_PROGRESS) { in k3_dma_transfer_pause()
607 c->status = DMA_PAUSED; in k3_dma_transfer_pause()
612 list_del_init(&c->node); in k3_dma_transfer_pause()
622 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_transfer_resume() local
624 struct k3_dma_phy *p = c->phy; in k3_dma_transfer_resume()
627 dev_dbg(d->slave.dev, "vchan %p: resume\n", &c->vc); in k3_dma_transfer_resume()
628 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_transfer_resume()
629 if (c->status == DMA_PAUSED) { in k3_dma_transfer_resume()
630 c->status = DMA_IN_PROGRESS; in k3_dma_transfer_resume()
633 } else if (!list_empty(&c->vc.desc_issued)) { in k3_dma_transfer_resume()
635 list_add_tail(&c->node, &d->chan_pending); in k3_dma_transfer_resume()
639 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_transfer_resume()
744 struct k3_dma_chan *c = &d->chans[i]; in k3_dma_probe() local
746 c->status = DMA_IN_PROGRESS; in k3_dma_probe()
747 INIT_LIST_HEAD(&c->node); in k3_dma_probe()
748 c->vc.desc_free = k3_dma_free_desc; in k3_dma_probe()
749 vchan_init(&c->vc, &d->slave); in k3_dma_probe()
785 struct k3_dma_chan *c, *cn; in k3_dma_remove() local
791 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_remove()
792 list_del(&c->vc.chan.device_node); in k3_dma_remove()
793 tasklet_kill(&c->vc.task); in k3_dma_remove()