Lines Matching refs:c

192 	struct k3_dma_chan *c;  in k3_dma_int_handler()  local
204 c = p->vchan; in k3_dma_int_handler()
205 if (c) { in k3_dma_int_handler()
208 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_int_handler()
211 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_int_handler()
230 static int k3_dma_start_txd(struct k3_dma_chan *c) in k3_dma_start_txd() argument
232 struct k3_dma_dev *d = to_k3_dma(c->vc.chan.device); in k3_dma_start_txd()
233 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in k3_dma_start_txd()
235 if (!c->phy) in k3_dma_start_txd()
238 if (BIT(c->phy->idx) & k3_dma_get_chan_stat(d)) in k3_dma_start_txd()
249 c->phy->ds_run = ds; in k3_dma_start_txd()
250 c->phy->ds_done = NULL; in k3_dma_start_txd()
252 k3_dma_set_desc(c->phy, &ds->desc_hw[0]); in k3_dma_start_txd()
255 c->phy->ds_done = NULL; in k3_dma_start_txd()
256 c->phy->ds_run = NULL; in k3_dma_start_txd()
264 struct k3_dma_chan *c, *cn; in k3_dma_tasklet() local
268 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_tasklet()
269 spin_lock_irq(&c->vc.lock); in k3_dma_tasklet()
270 p = c->phy; in k3_dma_tasklet()
272 if (k3_dma_start_txd(c)) { in k3_dma_tasklet()
276 c->phy = NULL; in k3_dma_tasklet()
280 spin_unlock_irq(&c->vc.lock); in k3_dma_tasklet()
289 c = list_first_entry(&d->chan_pending, in k3_dma_tasklet()
292 list_del_init(&c->node); in k3_dma_tasklet()
295 p->vchan = c; in k3_dma_tasklet()
296 c->phy = p; in k3_dma_tasklet()
297 dev_dbg(d->slave.dev, "pchan %u: alloc vchan %p\n", pch, &c->vc); in k3_dma_tasklet()
305 c = p->vchan; in k3_dma_tasklet()
306 if (c) { in k3_dma_tasklet()
307 spin_lock_irq(&c->vc.lock); in k3_dma_tasklet()
308 k3_dma_start_txd(c); in k3_dma_tasklet()
309 spin_unlock_irq(&c->vc.lock); in k3_dma_tasklet()
317 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_free_chan_resources() local
322 list_del_init(&c->node); in k3_dma_free_chan_resources()
325 vchan_free_chan_resources(&c->vc); in k3_dma_free_chan_resources()
326 c->ccfg = 0; in k3_dma_free_chan_resources()
332 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_tx_status() local
340 ret = dma_cookie_status(&c->vc.chan, cookie, state); in k3_dma_tx_status()
344 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_tx_status()
345 p = c->phy; in k3_dma_tx_status()
346 ret = c->status; in k3_dma_tx_status()
352 vd = vchan_find_desc(&c->vc, cookie); in k3_dma_tx_status()
371 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_tx_status()
378 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_issue_pending() local
382 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_issue_pending()
384 if (vchan_issue_pending(&c->vc)) { in k3_dma_issue_pending()
386 if (!c->phy) { in k3_dma_issue_pending()
387 if (list_empty(&c->node)) { in k3_dma_issue_pending()
389 list_add_tail(&c->node, &d->chan_pending); in k3_dma_issue_pending()
392 dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc); in k3_dma_issue_pending()
397 dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc); in k3_dma_issue_pending()
398 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_issue_pending()
418 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_prep_memcpy() local
429 dev_dbg(chan->device->dev, "vchan %p: kzalloc fail\n", &c->vc); in k3_dma_prep_memcpy()
437 if (!c->ccfg) { in k3_dma_prep_memcpy()
439 c->ccfg = CX_CFG_SRCINCR | CX_CFG_DSTINCR | CX_CFG_EN; in k3_dma_prep_memcpy()
440 c->ccfg |= (0xf << 20) | (0xf << 24); /* burst = 16 */ in k3_dma_prep_memcpy()
441 c->ccfg |= (0x3 << 12) | (0x3 << 16); /* width = 64 bit */ in k3_dma_prep_memcpy()
446 k3_dma_fill_desc(ds, dst, src, copy, num++, c->ccfg); in k3_dma_prep_memcpy()
448 if (c->dir == DMA_MEM_TO_DEV) { in k3_dma_prep_memcpy()
450 } else if (c->dir == DMA_DEV_TO_MEM) { in k3_dma_prep_memcpy()
460 return vchan_tx_prep(&c->vc, &ds->vd, flags); in k3_dma_prep_memcpy()
467 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_prep_slave_sg() local
485 dev_dbg(chan->device->dev, "vchan %p: kzalloc fail\n", &c->vc); in k3_dma_prep_slave_sg()
502 dst = c->dev_addr; in k3_dma_prep_slave_sg()
504 src = c->dev_addr; in k3_dma_prep_slave_sg()
508 k3_dma_fill_desc(ds, dst, src, len, num++, c->ccfg); in k3_dma_prep_slave_sg()
517 return vchan_tx_prep(&c->vc, &ds->vd, flags); in k3_dma_prep_slave_sg()
523 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_config() local
529 c->dir = cfg->direction; in k3_dma_config()
530 if (c->dir == DMA_DEV_TO_MEM) { in k3_dma_config()
531 c->ccfg = CX_CFG_DSTINCR; in k3_dma_config()
532 c->dev_addr = cfg->src_addr; in k3_dma_config()
535 } else if (c->dir == DMA_MEM_TO_DEV) { in k3_dma_config()
536 c->ccfg = CX_CFG_SRCINCR; in k3_dma_config()
537 c->dev_addr = cfg->dst_addr; in k3_dma_config()
552 c->ccfg |= (val << 12) | (val << 16); in k3_dma_config()
558 c->ccfg |= (val << 20) | (val << 24); in k3_dma_config()
559 c->ccfg |= CX_CFG_MEM2PER | CX_CFG_EN; in k3_dma_config()
562 c->ccfg |= c->vc.chan.chan_id << 4; in k3_dma_config()
569 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_terminate_all() local
571 struct k3_dma_phy *p = c->phy; in k3_dma_terminate_all()
575 dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc); in k3_dma_terminate_all()
579 list_del_init(&c->node); in k3_dma_terminate_all()
583 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_terminate_all()
584 vchan_get_all_descriptors(&c->vc, &head); in k3_dma_terminate_all()
588 c->phy = NULL; in k3_dma_terminate_all()
592 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_terminate_all()
593 vchan_dma_desc_free_list(&c->vc, &head); in k3_dma_terminate_all()
600 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_transfer_pause() local
602 struct k3_dma_phy *p = c->phy; in k3_dma_transfer_pause()
604 dev_dbg(d->slave.dev, "vchan %p: pause\n", &c->vc); in k3_dma_transfer_pause()
605 if (c->status == DMA_IN_PROGRESS) { in k3_dma_transfer_pause()
606 c->status = DMA_PAUSED; in k3_dma_transfer_pause()
611 list_del_init(&c->node); in k3_dma_transfer_pause()
621 struct k3_dma_chan *c = to_k3_chan(chan); in k3_dma_transfer_resume() local
623 struct k3_dma_phy *p = c->phy; in k3_dma_transfer_resume()
626 dev_dbg(d->slave.dev, "vchan %p: resume\n", &c->vc); in k3_dma_transfer_resume()
627 spin_lock_irqsave(&c->vc.lock, flags); in k3_dma_transfer_resume()
628 if (c->status == DMA_PAUSED) { in k3_dma_transfer_resume()
629 c->status = DMA_IN_PROGRESS; in k3_dma_transfer_resume()
632 } else if (!list_empty(&c->vc.desc_issued)) { in k3_dma_transfer_resume()
634 list_add_tail(&c->node, &d->chan_pending); in k3_dma_transfer_resume()
638 spin_unlock_irqrestore(&c->vc.lock, flags); in k3_dma_transfer_resume()
743 struct k3_dma_chan *c = &d->chans[i]; in k3_dma_probe() local
745 c->status = DMA_IN_PROGRESS; in k3_dma_probe()
746 INIT_LIST_HEAD(&c->node); in k3_dma_probe()
747 c->vc.desc_free = k3_dma_free_desc; in k3_dma_probe()
748 vchan_init(&c->vc, &d->slave); in k3_dma_probe()
784 struct k3_dma_chan *c, *cn; in k3_dma_remove() local
790 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_remove()
791 list_del(&c->vc.chan.device_node); in k3_dma_remove()
792 tasklet_kill(&c->vc.task); in k3_dma_remove()