Lines Matching refs:c

190 static int zx_dma_start_txd(struct zx_dma_chan *c)  in zx_dma_start_txd()  argument
192 struct zx_dma_dev *d = to_zx_dma(c->vc.chan.device); in zx_dma_start_txd()
193 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in zx_dma_start_txd()
195 if (!c->phy) in zx_dma_start_txd()
198 if (BIT(c->phy->idx) & zx_dma_get_chan_stat(d)) in zx_dma_start_txd()
209 c->phy->ds_run = ds; in zx_dma_start_txd()
210 c->phy->ds_done = NULL; in zx_dma_start_txd()
212 zx_dma_set_desc(c->phy, ds->desc_hw); in zx_dma_start_txd()
215 c->phy->ds_done = NULL; in zx_dma_start_txd()
216 c->phy->ds_run = NULL; in zx_dma_start_txd()
223 struct zx_dma_chan *c, *cn; in zx_dma_task() local
228 list_for_each_entry_safe(c, cn, &d->slave.channels, in zx_dma_task()
230 spin_lock_irqsave(&c->vc.lock, flags); in zx_dma_task()
231 p = c->phy; in zx_dma_task()
232 if (p && p->ds_done && zx_dma_start_txd(c)) { in zx_dma_task()
236 c->phy = NULL; in zx_dma_task()
239 spin_unlock_irqrestore(&c->vc.lock, flags); in zx_dma_task()
245 c = list_first_entry(&d->chan_pending, in zx_dma_task()
247 p = &d->phy[c->id]; in zx_dma_task()
250 list_del_init(&c->node); in zx_dma_task()
251 pch_alloc |= 1 << c->id; in zx_dma_task()
253 p->vchan = c; in zx_dma_task()
254 c->phy = p; in zx_dma_task()
256 dev_dbg(d->slave.dev, "pchan %u: busy!\n", c->id); in zx_dma_task()
264 c = p->vchan; in zx_dma_task()
265 if (c) { in zx_dma_task()
266 spin_lock_irqsave(&c->vc.lock, flags); in zx_dma_task()
267 zx_dma_start_txd(c); in zx_dma_task()
268 spin_unlock_irqrestore(&c->vc.lock, flags); in zx_dma_task()
278 struct zx_dma_chan *c; in zx_dma_int_handler() local
289 c = p->vchan; in zx_dma_int_handler()
290 if (c) { in zx_dma_int_handler()
293 spin_lock_irqsave(&c->vc.lock, flags); in zx_dma_int_handler()
294 if (c->cyclic) { in zx_dma_int_handler()
301 spin_unlock_irqrestore(&c->vc.lock, flags); in zx_dma_int_handler()
322 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_free_chan_resources() local
327 list_del_init(&c->node); in zx_dma_free_chan_resources()
330 vchan_free_chan_resources(&c->vc); in zx_dma_free_chan_resources()
331 c->ccfg = 0; in zx_dma_free_chan_resources()
338 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_tx_status() local
345 ret = dma_cookie_status(&c->vc.chan, cookie, state); in zx_dma_tx_status()
349 spin_lock_irqsave(&c->vc.lock, flags); in zx_dma_tx_status()
350 p = c->phy; in zx_dma_tx_status()
351 ret = c->status; in zx_dma_tx_status()
357 vd = vchan_find_desc(&c->vc, cookie); in zx_dma_tx_status()
376 spin_unlock_irqrestore(&c->vc.lock, flags); in zx_dma_tx_status()
383 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_issue_pending() local
388 spin_lock_irqsave(&c->vc.lock, flags); in zx_dma_issue_pending()
390 if (vchan_issue_pending(&c->vc)) { in zx_dma_issue_pending()
392 if (!c->phy && list_empty(&c->node)) { in zx_dma_issue_pending()
394 list_add_tail(&c->node, &d->chan_pending); in zx_dma_issue_pending()
396 dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc); in zx_dma_issue_pending()
400 dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc); in zx_dma_issue_pending()
402 spin_unlock_irqrestore(&c->vc.lock, flags); in zx_dma_issue_pending()
423 struct zx_dma_chan *c = to_zx_chan(chan); in zx_alloc_desc_resource() local
430 &c->vc, num, lli_limit); in zx_alloc_desc_resource()
440 dev_dbg(chan->device->dev, "vch %p: dma alloc fail\n", &c->vc); in zx_alloc_desc_resource()
462 static int zx_pre_config(struct zx_dma_chan *c, enum dma_transfer_direction dir) in zx_pre_config() argument
464 struct dma_slave_config *cfg = &c->slave_cfg; in zx_pre_config()
471 c->ccfg = ZX_CH_ENABLE | ZX_SOFT_REQ in zx_pre_config()
477 c->dev_addr = cfg->dst_addr; in zx_pre_config()
487 c->ccfg = ZX_DST_FIFO_MODE | ZX_CH_ENABLE in zx_pre_config()
493 c->dev_addr = cfg->src_addr; in zx_pre_config()
498 c->ccfg = ZX_SRC_FIFO_MODE | ZX_CH_ENABLE in zx_pre_config()
513 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_prep_memcpy() local
521 if (zx_pre_config(c, DMA_MEM_TO_MEM)) in zx_dma_prep_memcpy()
535 zx_dma_fill_desc(ds, dst, src, copy, num++, c->ccfg); in zx_dma_prep_memcpy()
542 c->cyclic = 0; in zx_dma_prep_memcpy()
545 return vchan_tx_prep(&c->vc, &ds->vd, flags); in zx_dma_prep_memcpy()
552 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_prep_slave_sg() local
562 if (zx_pre_config(c, dir)) in zx_dma_prep_slave_sg()
575 c->cyclic = 0; in zx_dma_prep_slave_sg()
587 dst = c->dev_addr; in zx_dma_prep_slave_sg()
589 src = c->dev_addr; in zx_dma_prep_slave_sg()
593 zx_dma_fill_desc(ds, dst, src, len, num++, c->ccfg); in zx_dma_prep_slave_sg()
603 return vchan_tx_prep(&c->vc, &ds->vd, flags); in zx_dma_prep_slave_sg()
611 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_prep_dma_cyclic() local
622 if (zx_pre_config(c, dir)) in zx_dma_prep_dma_cyclic()
628 c->cyclic = 1; in zx_dma_prep_dma_cyclic()
633 dst = c->dev_addr; in zx_dma_prep_dma_cyclic()
635 src = c->dev_addr; in zx_dma_prep_dma_cyclic()
639 c->ccfg | ZX_IRQ_ENABLE_ALL); in zx_dma_prep_dma_cyclic()
646 return vchan_tx_prep(&c->vc, &ds->vd, flags); in zx_dma_prep_dma_cyclic()
652 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_config() local
657 memcpy(&c->slave_cfg, cfg, sizeof(*cfg)); in zx_dma_config()
664 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_terminate_all() local
666 struct zx_dma_phy *p = c->phy; in zx_dma_terminate_all()
670 dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc); in zx_dma_terminate_all()
674 list_del_init(&c->node); in zx_dma_terminate_all()
678 spin_lock_irqsave(&c->vc.lock, flags); in zx_dma_terminate_all()
679 vchan_get_all_descriptors(&c->vc, &head); in zx_dma_terminate_all()
683 c->phy = NULL; in zx_dma_terminate_all()
688 spin_unlock_irqrestore(&c->vc.lock, flags); in zx_dma_terminate_all()
689 vchan_dma_desc_free_list(&c->vc, &head); in zx_dma_terminate_all()
696 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_transfer_pause() local
699 val = readl_relaxed(c->phy->base + REG_ZX_CTRL); in zx_dma_transfer_pause()
701 writel_relaxed(val, c->phy->base + REG_ZX_CTRL); in zx_dma_transfer_pause()
708 struct zx_dma_chan *c = to_zx_chan(chan); in zx_dma_transfer_resume() local
711 val = readl_relaxed(c->phy->base + REG_ZX_CTRL); in zx_dma_transfer_resume()
713 writel_relaxed(val, c->phy->base + REG_ZX_CTRL); in zx_dma_transfer_resume()
740 struct zx_dma_chan *c; in zx_of_dma_simple_xlate() local
750 c = to_zx_chan(chan); in zx_of_dma_simple_xlate()
751 c->id = request; in zx_of_dma_simple_xlate()
753 c->id, &c->vc); in zx_of_dma_simple_xlate()
842 struct zx_dma_chan *c = &d->chans[i]; in zx_dma_probe() local
844 c->status = DMA_IN_PROGRESS; in zx_dma_probe()
845 INIT_LIST_HEAD(&c->node); in zx_dma_probe()
846 c->vc.desc_free = zx_dma_free_desc; in zx_dma_probe()
847 vchan_init(&c->vc, &d->slave); in zx_dma_probe()
885 struct zx_dma_chan *c, *cn; in zx_dma_remove() local
894 list_for_each_entry_safe(c, cn, &d->slave.channels, in zx_dma_remove()
896 list_del(&c->vc.chan.device_node); in zx_dma_remove()