Lines Matching refs:imxdma

150 	struct imxdma_engine		*imxdma;  member
192 struct imxdma_engine *imxdma; member
228 static inline int is_imx1_dma(struct imxdma_engine *imxdma) in is_imx1_dma() argument
230 return imxdma->devtype == IMX1_DMA; in is_imx1_dma()
233 static inline int is_imx27_dma(struct imxdma_engine *imxdma) in is_imx27_dma() argument
235 return imxdma->devtype == IMX27_DMA; in is_imx27_dma()
258 static void imx_dmav1_writel(struct imxdma_engine *imxdma, unsigned val, in imx_dmav1_writel() argument
261 __raw_writel(val, imxdma->base + offset); in imx_dmav1_writel()
264 static unsigned imx_dmav1_readl(struct imxdma_engine *imxdma, unsigned offset) in imx_dmav1_readl() argument
266 return __raw_readl(imxdma->base + offset); in imx_dmav1_readl()
271 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_hw_chain() local
273 if (is_imx27_dma(imxdma)) in imxdma_hw_chain()
285 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_sg_next() local
294 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
297 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
300 imx_dmav1_writel(imxdma, now, DMA_CNTR(imxdmac->channel)); in imxdma_sg_next()
302 dev_dbg(imxdma->dev, " %s channel: %d dst 0x%08x, src 0x%08x, " in imxdma_sg_next()
304 imx_dmav1_readl(imxdma, DMA_DAR(imxdmac->channel)), in imxdma_sg_next()
305 imx_dmav1_readl(imxdma, DMA_SAR(imxdmac->channel)), in imxdma_sg_next()
306 imx_dmav1_readl(imxdma, DMA_CNTR(imxdmac->channel))); in imxdma_sg_next()
314 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_enable_hw() local
318 dev_dbg(imxdma->dev, "%s channel %d\n", __func__, channel); in imxdma_enable_hw()
322 imx_dmav1_writel(imxdma, 1 << channel, DMA_DISR); in imxdma_enable_hw()
323 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_DIMR) & in imxdma_enable_hw()
325 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_CCR(channel)) | in imxdma_enable_hw()
328 if (!is_imx1_dma(imxdma) && in imxdma_enable_hw()
334 tmp = imx_dmav1_readl(imxdma, DMA_CCR(channel)); in imxdma_enable_hw()
335 imx_dmav1_writel(imxdma, tmp | CCR_RPT | CCR_ACRPT, in imxdma_enable_hw()
345 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_disable_hw() local
349 dev_dbg(imxdma->dev, "%s channel %d\n", __func__, channel); in imxdma_disable_hw()
355 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_DIMR) | in imxdma_disable_hw()
357 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_CCR(channel)) & in imxdma_disable_hw()
359 imx_dmav1_writel(imxdma, 1 << channel, DMA_DISR); in imxdma_disable_hw()
366 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_watchdog() local
369 imx_dmav1_writel(imxdma, 0, DMA_CCR(channel)); in imxdma_watchdog()
373 dev_dbg(imxdma->dev, "channel %d: watchdog timeout!\n", in imxdma_watchdog()
379 struct imxdma_engine *imxdma = dev_id; in imxdma_err_handler() local
384 disr = imx_dmav1_readl(imxdma, DMA_DISR); in imxdma_err_handler()
386 err_mask = imx_dmav1_readl(imxdma, DMA_DBTOSR) | in imxdma_err_handler()
387 imx_dmav1_readl(imxdma, DMA_DRTOSR) | in imxdma_err_handler()
388 imx_dmav1_readl(imxdma, DMA_DSESR) | in imxdma_err_handler()
389 imx_dmav1_readl(imxdma, DMA_DBOSR); in imxdma_err_handler()
394 imx_dmav1_writel(imxdma, disr & err_mask, DMA_DISR); in imxdma_err_handler()
401 if (imx_dmav1_readl(imxdma, DMA_DBTOSR) & (1 << i)) { in imxdma_err_handler()
402 imx_dmav1_writel(imxdma, 1 << i, DMA_DBTOSR); in imxdma_err_handler()
405 if (imx_dmav1_readl(imxdma, DMA_DRTOSR) & (1 << i)) { in imxdma_err_handler()
406 imx_dmav1_writel(imxdma, 1 << i, DMA_DRTOSR); in imxdma_err_handler()
409 if (imx_dmav1_readl(imxdma, DMA_DSESR) & (1 << i)) { in imxdma_err_handler()
410 imx_dmav1_writel(imxdma, 1 << i, DMA_DSESR); in imxdma_err_handler()
413 if (imx_dmav1_readl(imxdma, DMA_DBOSR) & (1 << i)) { in imxdma_err_handler()
414 imx_dmav1_writel(imxdma, 1 << i, DMA_DBOSR); in imxdma_err_handler()
418 tasklet_schedule(&imxdma->channel[i].dma_tasklet); in imxdma_err_handler()
420 dev_warn(imxdma->dev, in imxdma_err_handler()
432 struct imxdma_engine *imxdma = imxdmac->imxdma; in dma_irq_handle_channel() local
437 spin_lock_irqsave(&imxdma->lock, flags); in dma_irq_handle_channel()
439 spin_unlock_irqrestore(&imxdma->lock, flags); in dma_irq_handle_channel()
446 spin_unlock_irqrestore(&imxdma->lock, flags); in dma_irq_handle_channel()
455 tmp = imx_dmav1_readl(imxdma, DMA_CCR(chno)); in dma_irq_handle_channel()
465 imx_dmav1_writel(imxdma, tmp, DMA_CCR(chno)); in dma_irq_handle_channel()
467 imx_dmav1_writel(imxdma, tmp & ~CCR_CEN, in dma_irq_handle_channel()
472 imx_dmav1_writel(imxdma, tmp, DMA_CCR(chno)); in dma_irq_handle_channel()
488 imx_dmav1_writel(imxdma, 0, DMA_CCR(chno)); in dma_irq_handle_channel()
495 struct imxdma_engine *imxdma = dev_id; in dma_irq_handler() local
498 if (!is_imx1_dma(imxdma)) in dma_irq_handler()
501 disr = imx_dmav1_readl(imxdma, DMA_DISR); in dma_irq_handler()
503 dev_dbg(imxdma->dev, "%s called, disr=0x%08x\n", __func__, disr); in dma_irq_handler()
505 imx_dmav1_writel(imxdma, disr, DMA_DISR); in dma_irq_handler()
508 dma_irq_handle_channel(&imxdma->channel[i]); in dma_irq_handler()
517 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_xfer_desc() local
526 if ((imxdma->slots_2d[i].count > 0) && in imxdma_xfer_desc()
527 ((imxdma->slots_2d[i].xsr != d->x) || in imxdma_xfer_desc()
528 (imxdma->slots_2d[i].ysr != d->y) || in imxdma_xfer_desc()
529 (imxdma->slots_2d[i].wsr != d->w))) in imxdma_xfer_desc()
537 imxdma->slots_2d[slot].xsr = d->x; in imxdma_xfer_desc()
538 imxdma->slots_2d[slot].ysr = d->y; in imxdma_xfer_desc()
539 imxdma->slots_2d[slot].wsr = d->w; in imxdma_xfer_desc()
540 imxdma->slots_2d[slot].count++; in imxdma_xfer_desc()
548 imx_dmav1_writel(imxdma, d->x, DMA_XSRA); in imxdma_xfer_desc()
549 imx_dmav1_writel(imxdma, d->y, DMA_YSRA); in imxdma_xfer_desc()
550 imx_dmav1_writel(imxdma, d->w, DMA_WSRA); in imxdma_xfer_desc()
554 imx_dmav1_writel(imxdma, d->x, DMA_XSRB); in imxdma_xfer_desc()
555 imx_dmav1_writel(imxdma, d->y, DMA_YSRB); in imxdma_xfer_desc()
556 imx_dmav1_writel(imxdma, d->w, DMA_WSRB); in imxdma_xfer_desc()
563 imx_dmav1_writel(imxdma, d->src, DMA_SAR(imxdmac->channel)); in imxdma_xfer_desc()
564 imx_dmav1_writel(imxdma, d->dest, DMA_DAR(imxdmac->channel)); in imxdma_xfer_desc()
565 imx_dmav1_writel(imxdma, d->config_mem | (d->config_port << 2), in imxdma_xfer_desc()
568 imx_dmav1_writel(imxdma, d->len, DMA_CNTR(imxdmac->channel)); in imxdma_xfer_desc()
570 dev_dbg(imxdma->dev, in imxdma_xfer_desc()
581 imx_dmav1_writel(imxdma, imxdmac->per_address, in imxdma_xfer_desc()
583 imx_dmav1_writel(imxdma, imxdmac->ccr_from_device, in imxdma_xfer_desc()
586 dev_dbg(imxdma->dev, in imxdma_xfer_desc()
592 imx_dmav1_writel(imxdma, imxdmac->per_address, in imxdma_xfer_desc()
594 imx_dmav1_writel(imxdma, imxdmac->ccr_to_device, in imxdma_xfer_desc()
597 dev_dbg(imxdma->dev, in imxdma_xfer_desc()
603 dev_err(imxdma->dev, "%s channel: %d bad dma mode\n", in imxdma_xfer_desc()
621 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_tasklet() local
625 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_tasklet()
629 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_tasklet()
645 imxdma->slots_2d[imxdmac->slot_2d].count--; in imxdma_tasklet()
656 dev_warn(imxdma->dev, "%s: channel: %d couldn't xfer desc\n", in imxdma_tasklet()
660 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_tasklet()
670 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_terminate_all() local
675 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_terminate_all()
678 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_terminate_all()
686 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_config() local
720 imx_dmav1_writel(imxdma, imxdmac->dma_request, in imxdma_config()
724 imx_dmav1_writel(imxdma, imxdmac->watermark_level * in imxdma_config()
740 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_tx_submit() local
744 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_tx_submit()
747 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_tx_submit()
786 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_free_chan_resources() local
790 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_free_chan_resources()
796 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_free_chan_resources()
865 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_cyclic() local
870 dev_dbg(imxdma->dev, "%s channel: %d buf_len=%zu period_len=%zu\n", in imxdma_prep_dma_cyclic()
923 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_memcpy() local
926 dev_dbg(imxdma->dev, "%s channel: %d src=0x%llx dst=0x%llx len=%zu\n", in imxdma_prep_dma_memcpy()
954 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_interleaved() local
957 dev_dbg(imxdma->dev, "%s channel: %d src_start=0x%llx dst_start=0x%llx\n" in imxdma_prep_dma_interleaved()
996 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_issue_pending() local
1000 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_issue_pending()
1007 dev_warn(imxdma->dev, in imxdma_issue_pending()
1015 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_issue_pending()
1023 if (chan->device->dev != fdata->imxdma->dev) in imxdma_filter_fn()
1036 struct imxdma_engine *imxdma = ofdma->of_dma_data; in imxdma_xlate() local
1038 .imxdma = imxdma, in imxdma_xlate()
1046 return dma_request_channel(imxdma->dma_device.cap_mask, in imxdma_xlate()
1052 struct imxdma_engine *imxdma; in imxdma_probe() local
1062 imxdma = devm_kzalloc(&pdev->dev, sizeof(*imxdma), GFP_KERNEL); in imxdma_probe()
1063 if (!imxdma) in imxdma_probe()
1066 imxdma->dev = &pdev->dev; in imxdma_probe()
1067 imxdma->devtype = pdev->id_entry->driver_data; in imxdma_probe()
1070 imxdma->base = devm_ioremap_resource(&pdev->dev, res); in imxdma_probe()
1071 if (IS_ERR(imxdma->base)) in imxdma_probe()
1072 return PTR_ERR(imxdma->base); in imxdma_probe()
1078 imxdma->dma_ipg = devm_clk_get(&pdev->dev, "ipg"); in imxdma_probe()
1079 if (IS_ERR(imxdma->dma_ipg)) in imxdma_probe()
1080 return PTR_ERR(imxdma->dma_ipg); in imxdma_probe()
1082 imxdma->dma_ahb = devm_clk_get(&pdev->dev, "ahb"); in imxdma_probe()
1083 if (IS_ERR(imxdma->dma_ahb)) in imxdma_probe()
1084 return PTR_ERR(imxdma->dma_ahb); in imxdma_probe()
1086 ret = clk_prepare_enable(imxdma->dma_ipg); in imxdma_probe()
1089 ret = clk_prepare_enable(imxdma->dma_ahb); in imxdma_probe()
1094 imx_dmav1_writel(imxdma, DCR_DRST, DMA_DCR); in imxdma_probe()
1096 if (is_imx1_dma(imxdma)) { in imxdma_probe()
1098 dma_irq_handler, 0, "DMA", imxdma); in imxdma_probe()
1100 dev_warn(imxdma->dev, "Can't register IRQ for DMA\n"); in imxdma_probe()
1111 imxdma_err_handler, 0, "DMA", imxdma); in imxdma_probe()
1113 dev_warn(imxdma->dev, "Can't register ERRIRQ for DMA\n"); in imxdma_probe()
1119 imx_dmav1_writel(imxdma, DCR_DEN, DMA_DCR); in imxdma_probe()
1122 imx_dmav1_writel(imxdma, (1 << IMX_DMA_CHANNELS) - 1, DMA_DISR); in imxdma_probe()
1125 imx_dmav1_writel(imxdma, (1 << IMX_DMA_CHANNELS) - 1, DMA_DIMR); in imxdma_probe()
1127 INIT_LIST_HEAD(&imxdma->dma_device.channels); in imxdma_probe()
1129 dma_cap_set(DMA_SLAVE, imxdma->dma_device.cap_mask); in imxdma_probe()
1130 dma_cap_set(DMA_CYCLIC, imxdma->dma_device.cap_mask); in imxdma_probe()
1131 dma_cap_set(DMA_MEMCPY, imxdma->dma_device.cap_mask); in imxdma_probe()
1132 dma_cap_set(DMA_INTERLEAVE, imxdma->dma_device.cap_mask); in imxdma_probe()
1136 imxdma->slots_2d[i].count = 0; in imxdma_probe()
1138 spin_lock_init(&imxdma->lock); in imxdma_probe()
1142 struct imxdma_channel *imxdmac = &imxdma->channel[i]; in imxdma_probe()
1144 if (!is_imx1_dma(imxdma)) { in imxdma_probe()
1146 dma_irq_handler, 0, "DMA", imxdma); in imxdma_probe()
1148 dev_warn(imxdma->dev, "Can't register IRQ %d " in imxdma_probe()
1158 imxdmac->imxdma = imxdma; in imxdma_probe()
1166 imxdmac->chan.device = &imxdma->dma_device; in imxdma_probe()
1172 &imxdma->dma_device.channels); in imxdma_probe()
1175 imxdma->dma_device.dev = &pdev->dev; in imxdma_probe()
1177 imxdma->dma_device.device_alloc_chan_resources = imxdma_alloc_chan_resources; in imxdma_probe()
1178 imxdma->dma_device.device_free_chan_resources = imxdma_free_chan_resources; in imxdma_probe()
1179 imxdma->dma_device.device_tx_status = imxdma_tx_status; in imxdma_probe()
1180 imxdma->dma_device.device_prep_slave_sg = imxdma_prep_slave_sg; in imxdma_probe()
1181 imxdma->dma_device.device_prep_dma_cyclic = imxdma_prep_dma_cyclic; in imxdma_probe()
1182 imxdma->dma_device.device_prep_dma_memcpy = imxdma_prep_dma_memcpy; in imxdma_probe()
1183 imxdma->dma_device.device_prep_interleaved_dma = imxdma_prep_dma_interleaved; in imxdma_probe()
1184 imxdma->dma_device.device_config = imxdma_config; in imxdma_probe()
1185 imxdma->dma_device.device_terminate_all = imxdma_terminate_all; in imxdma_probe()
1186 imxdma->dma_device.device_issue_pending = imxdma_issue_pending; in imxdma_probe()
1188 platform_set_drvdata(pdev, imxdma); in imxdma_probe()
1190 imxdma->dma_device.copy_align = DMAENGINE_ALIGN_4_BYTES; in imxdma_probe()
1191 imxdma->dma_device.dev->dma_parms = &imxdma->dma_parms; in imxdma_probe()
1192 dma_set_max_seg_size(imxdma->dma_device.dev, 0xffffff); in imxdma_probe()
1194 ret = dma_async_device_register(&imxdma->dma_device); in imxdma_probe()
1202 imxdma_xlate, imxdma); in imxdma_probe()
1212 dma_async_device_unregister(&imxdma->dma_device); in imxdma_probe()
1214 clk_disable_unprepare(imxdma->dma_ahb); in imxdma_probe()
1216 clk_disable_unprepare(imxdma->dma_ipg); in imxdma_probe()
1222 struct imxdma_engine *imxdma = platform_get_drvdata(pdev); in imxdma_remove() local
1224 dma_async_device_unregister(&imxdma->dma_device); in imxdma_remove()
1229 clk_disable_unprepare(imxdma->dma_ipg); in imxdma_remove()
1230 clk_disable_unprepare(imxdma->dma_ahb); in imxdma_remove()