Lines Matching refs:d

141 static void zx_dma_terminate_chan(struct zx_dma_phy *phy, struct zx_dma_dev *d)  in zx_dma_terminate_chan()  argument
151 writel_relaxed(val, d->base + REG_ZX_TC_IRQ_RAW); in zx_dma_terminate_chan()
152 writel_relaxed(val, d->base + REG_ZX_SRC_ERR_IRQ_RAW); in zx_dma_terminate_chan()
153 writel_relaxed(val, d->base + REG_ZX_DST_ERR_IRQ_RAW); in zx_dma_terminate_chan()
154 writel_relaxed(val, d->base + REG_ZX_CFG_ERR_IRQ_RAW); in zx_dma_terminate_chan()
174 static u32 zx_dma_get_chan_stat(struct zx_dma_dev *d) in zx_dma_get_chan_stat() argument
176 return readl_relaxed(d->base + REG_ZX_STATUS); in zx_dma_get_chan_stat()
179 static void zx_dma_init_state(struct zx_dma_dev *d) in zx_dma_init_state() argument
182 writel_relaxed(0x0, d->base + REG_ZX_DMA_ARB); in zx_dma_init_state()
184 writel_relaxed(0xffffffff, d->base + REG_ZX_TC_IRQ_RAW); in zx_dma_init_state()
185 writel_relaxed(0xffffffff, d->base + REG_ZX_SRC_ERR_IRQ_RAW); in zx_dma_init_state()
186 writel_relaxed(0xffffffff, d->base + REG_ZX_DST_ERR_IRQ_RAW); in zx_dma_init_state()
187 writel_relaxed(0xffffffff, d->base + REG_ZX_CFG_ERR_IRQ_RAW); in zx_dma_init_state()
192 struct zx_dma_dev *d = to_zx_dma(c->vc.chan.device); in zx_dma_start_txd() local
198 if (BIT(c->phy->idx) & zx_dma_get_chan_stat(d)) in zx_dma_start_txd()
220 static void zx_dma_task(struct zx_dma_dev *d) in zx_dma_task() argument
228 list_for_each_entry_safe(c, cn, &d->slave.channels, in zx_dma_task()
234 dev_dbg(d->slave.dev, "pchan %u: free\n", p->idx); in zx_dma_task()
243 spin_lock_irqsave(&d->lock, flags); in zx_dma_task()
244 while (!list_empty(&d->chan_pending)) { in zx_dma_task()
245 c = list_first_entry(&d->chan_pending, in zx_dma_task()
247 p = &d->phy[c->id]; in zx_dma_task()
256 dev_dbg(d->slave.dev, "pchan %u: busy!\n", c->id); in zx_dma_task()
259 spin_unlock_irqrestore(&d->lock, flags); in zx_dma_task()
261 for (pch = 0; pch < d->dma_channels; pch++) { in zx_dma_task()
263 p = &d->phy[pch]; in zx_dma_task()
276 struct zx_dma_dev *d = (struct zx_dma_dev *)dev_id; in zx_dma_int_handler() local
279 u32 tc = readl_relaxed(d->base + REG_ZX_TC_IRQ); in zx_dma_int_handler()
280 u32 serr = readl_relaxed(d->base + REG_ZX_SRC_ERR_IRQ); in zx_dma_int_handler()
281 u32 derr = readl_relaxed(d->base + REG_ZX_DST_ERR_IRQ); in zx_dma_int_handler()
282 u32 cfg = readl_relaxed(d->base + REG_ZX_CFG_ERR_IRQ); in zx_dma_int_handler()
288 p = &d->phy[i]; in zx_dma_int_handler()
307 dev_warn(d->slave.dev, "DMA ERR src 0x%x, dst 0x%x, cfg 0x%x\n", in zx_dma_int_handler()
310 writel_relaxed(irq_chan, d->base + REG_ZX_TC_IRQ_RAW); in zx_dma_int_handler()
311 writel_relaxed(serr, d->base + REG_ZX_SRC_ERR_IRQ_RAW); in zx_dma_int_handler()
312 writel_relaxed(derr, d->base + REG_ZX_DST_ERR_IRQ_RAW); in zx_dma_int_handler()
313 writel_relaxed(cfg, d->base + REG_ZX_CFG_ERR_IRQ_RAW); in zx_dma_int_handler()
316 zx_dma_task(d); in zx_dma_int_handler()
323 struct zx_dma_dev *d = to_zx_dma(chan->device); in zx_dma_free_chan_resources() local
326 spin_lock_irqsave(&d->lock, flags); in zx_dma_free_chan_resources()
328 spin_unlock_irqrestore(&d->lock, flags); in zx_dma_free_chan_resources()
384 struct zx_dma_dev *d = to_zx_dma(chan->device); in zx_dma_issue_pending() local
391 spin_lock(&d->lock); in zx_dma_issue_pending()
394 list_add_tail(&c->node, &d->chan_pending); in zx_dma_issue_pending()
396 dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc); in zx_dma_issue_pending()
398 spin_unlock(&d->lock); in zx_dma_issue_pending()
400 dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc); in zx_dma_issue_pending()
405 zx_dma_task(d); in zx_dma_issue_pending()
425 struct zx_dma_dev *d = to_zx_dma(chan->device); in zx_alloc_desc_resource() local
438 ds->desc_hw = dma_pool_alloc(d->pool, GFP_NOWAIT, &ds->desc_hw_lli); in zx_alloc_desc_resource()
665 struct zx_dma_dev *d = to_zx_dma(chan->device); in zx_dma_terminate_all() local
670 dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc); in zx_dma_terminate_all()
673 spin_lock(&d->lock); in zx_dma_terminate_all()
675 spin_unlock(&d->lock); in zx_dma_terminate_all()
682 zx_dma_terminate_chan(p, d); in zx_dma_terminate_all()
722 struct zx_dma_dev *d = to_zx_dma(vd->tx.chan->device); in zx_dma_free_desc() local
724 dma_pool_free(d->pool, ds->desc_hw, ds->desc_hw_lli); in zx_dma_free_desc()
737 struct zx_dma_dev *d = ofdma->of_dma_data; in zx_of_dma_simple_xlate() local
742 if (request >= d->dma_requests) in zx_of_dma_simple_xlate()
745 chan = dma_get_any_slave_channel(&d->slave); in zx_of_dma_simple_xlate()
747 dev_err(d->slave.dev, "get channel fail in %s.\n", __func__); in zx_of_dma_simple_xlate()
752 dev_info(d->slave.dev, "zx_dma: pchan %u: alloc vchan %p\n", in zx_of_dma_simple_xlate()
759 struct zx_dma_dev *d; in zx_dma_probe() local
767 d = devm_kzalloc(&op->dev, sizeof(*d), GFP_KERNEL); in zx_dma_probe()
768 if (!d) in zx_dma_probe()
771 d->base = devm_ioremap_resource(&op->dev, iores); in zx_dma_probe()
772 if (IS_ERR(d->base)) in zx_dma_probe()
773 return PTR_ERR(d->base); in zx_dma_probe()
776 "dma-channels", &d->dma_channels); in zx_dma_probe()
778 "dma-requests", &d->dma_requests); in zx_dma_probe()
779 if (!d->dma_requests || !d->dma_channels) in zx_dma_probe()
782 d->clk = devm_clk_get(&op->dev, NULL); in zx_dma_probe()
783 if (IS_ERR(d->clk)) { in zx_dma_probe()
785 return PTR_ERR(d->clk); in zx_dma_probe()
788 d->irq = platform_get_irq(op, 0); in zx_dma_probe()
789 ret = devm_request_irq(&op->dev, d->irq, zx_dma_int_handler, in zx_dma_probe()
790 0, DRIVER_NAME, d); in zx_dma_probe()
795 d->pool = dmam_pool_create(DRIVER_NAME, &op->dev, in zx_dma_probe()
797 if (!d->pool) in zx_dma_probe()
801 d->phy = devm_kzalloc(&op->dev, in zx_dma_probe()
802 d->dma_channels * sizeof(struct zx_dma_phy), GFP_KERNEL); in zx_dma_probe()
803 if (!d->phy) in zx_dma_probe()
806 for (i = 0; i < d->dma_channels; i++) { in zx_dma_probe()
807 struct zx_dma_phy *p = &d->phy[i]; in zx_dma_probe()
810 p->base = d->base + i * 0x40; in zx_dma_probe()
813 INIT_LIST_HEAD(&d->slave.channels); in zx_dma_probe()
814 dma_cap_set(DMA_SLAVE, d->slave.cap_mask); in zx_dma_probe()
815 dma_cap_set(DMA_MEMCPY, d->slave.cap_mask); in zx_dma_probe()
816 dma_cap_set(DMA_PRIVATE, d->slave.cap_mask); in zx_dma_probe()
817 d->slave.dev = &op->dev; in zx_dma_probe()
818 d->slave.device_free_chan_resources = zx_dma_free_chan_resources; in zx_dma_probe()
819 d->slave.device_tx_status = zx_dma_tx_status; in zx_dma_probe()
820 d->slave.device_prep_dma_memcpy = zx_dma_prep_memcpy; in zx_dma_probe()
821 d->slave.device_prep_slave_sg = zx_dma_prep_slave_sg; in zx_dma_probe()
822 d->slave.device_prep_dma_cyclic = zx_dma_prep_dma_cyclic; in zx_dma_probe()
823 d->slave.device_issue_pending = zx_dma_issue_pending; in zx_dma_probe()
824 d->slave.device_config = zx_dma_config; in zx_dma_probe()
825 d->slave.device_terminate_all = zx_dma_terminate_all; in zx_dma_probe()
826 d->slave.device_pause = zx_dma_transfer_pause; in zx_dma_probe()
827 d->slave.device_resume = zx_dma_transfer_resume; in zx_dma_probe()
828 d->slave.copy_align = DMA_ALIGN; in zx_dma_probe()
829 d->slave.src_addr_widths = ZX_DMA_BUSWIDTHS; in zx_dma_probe()
830 d->slave.dst_addr_widths = ZX_DMA_BUSWIDTHS; in zx_dma_probe()
831 d->slave.directions = BIT(DMA_MEM_TO_MEM) | BIT(DMA_MEM_TO_DEV) in zx_dma_probe()
833 d->slave.residue_granularity = DMA_RESIDUE_GRANULARITY_SEGMENT; in zx_dma_probe()
836 d->chans = devm_kzalloc(&op->dev, in zx_dma_probe()
837 d->dma_requests * sizeof(struct zx_dma_chan), GFP_KERNEL); in zx_dma_probe()
838 if (!d->chans) in zx_dma_probe()
841 for (i = 0; i < d->dma_requests; i++) { in zx_dma_probe()
842 struct zx_dma_chan *c = &d->chans[i]; in zx_dma_probe()
847 vchan_init(&c->vc, &d->slave); in zx_dma_probe()
851 ret = clk_prepare_enable(d->clk); in zx_dma_probe()
857 zx_dma_init_state(d); in zx_dma_probe()
859 spin_lock_init(&d->lock); in zx_dma_probe()
860 INIT_LIST_HEAD(&d->chan_pending); in zx_dma_probe()
861 platform_set_drvdata(op, d); in zx_dma_probe()
863 ret = dma_async_device_register(&d->slave); in zx_dma_probe()
868 zx_of_dma_simple_xlate, d); in zx_dma_probe()
876 dma_async_device_unregister(&d->slave); in zx_dma_probe()
878 clk_disable_unprepare(d->clk); in zx_dma_probe()
886 struct zx_dma_dev *d = platform_get_drvdata(op); in zx_dma_remove() local
889 devm_free_irq(&op->dev, d->irq, d); in zx_dma_remove()
891 dma_async_device_unregister(&d->slave); in zx_dma_remove()
894 list_for_each_entry_safe(c, cn, &d->slave.channels, in zx_dma_remove()
898 clk_disable_unprepare(d->clk); in zx_dma_remove()
899 dmam_pool_destroy(d->pool); in zx_dma_remove()
907 struct zx_dma_dev *d = dev_get_drvdata(dev); in zx_dma_suspend_dev() local
910 stat = zx_dma_get_chan_stat(d); in zx_dma_suspend_dev()
912 dev_warn(d->slave.dev, in zx_dma_suspend_dev()
916 clk_disable_unprepare(d->clk); in zx_dma_suspend_dev()
922 struct zx_dma_dev *d = dev_get_drvdata(dev); in zx_dma_resume_dev() local
925 ret = clk_prepare_enable(d->clk); in zx_dma_resume_dev()
927 dev_err(d->slave.dev, "clk_prepare_enable failed: %d\n", ret); in zx_dma_resume_dev()
930 zx_dma_init_state(d); in zx_dma_resume_dev()