H A D | imx-dma.c | 150 struct imxdma_engine *imxdma; member in struct:imxdma_channel 192 struct imxdma_engine *imxdma; member in struct:imxdma_filter_data 228 static inline int is_imx1_dma(struct imxdma_engine *imxdma) is_imx1_dma() argument 230 return imxdma->devtype == IMX1_DMA; is_imx1_dma() 233 static inline int is_imx27_dma(struct imxdma_engine *imxdma) is_imx27_dma() argument 235 return imxdma->devtype == IMX27_DMA; is_imx27_dma() 258 static void imx_dmav1_writel(struct imxdma_engine *imxdma, unsigned val, imx_dmav1_writel() argument 261 __raw_writel(val, imxdma->base + offset); imx_dmav1_writel() 264 static unsigned imx_dmav1_readl(struct imxdma_engine *imxdma, unsigned offset) imx_dmav1_readl() argument 266 return __raw_readl(imxdma->base + offset); imx_dmav1_readl() 271 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_hw_chain() local 273 if (is_imx27_dma(imxdma)) imxdma_hw_chain() 285 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_sg_next() local 294 imx_dmav1_writel(imxdma, sg->dma_address, imxdma_sg_next() 297 imx_dmav1_writel(imxdma, sg->dma_address, imxdma_sg_next() 300 imx_dmav1_writel(imxdma, now, DMA_CNTR(imxdmac->channel)); imxdma_sg_next() 302 dev_dbg(imxdma->dev, " %s channel: %d dst 0x%08x, src 0x%08x, " imxdma_sg_next() 304 imx_dmav1_readl(imxdma, DMA_DAR(imxdmac->channel)), imxdma_sg_next() 305 imx_dmav1_readl(imxdma, DMA_SAR(imxdmac->channel)), imxdma_sg_next() 306 imx_dmav1_readl(imxdma, DMA_CNTR(imxdmac->channel))); imxdma_sg_next() 314 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_enable_hw() local 318 dev_dbg(imxdma->dev, "%s channel %d\n", __func__, channel); imxdma_enable_hw() 322 imx_dmav1_writel(imxdma, 1 << channel, DMA_DISR); imxdma_enable_hw() 323 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_DIMR) & imxdma_enable_hw() 325 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_CCR(channel)) | imxdma_enable_hw() 328 if (!is_imx1_dma(imxdma) && imxdma_enable_hw() 334 tmp = imx_dmav1_readl(imxdma, DMA_CCR(channel)); imxdma_enable_hw() 335 imx_dmav1_writel(imxdma, tmp | CCR_RPT | CCR_ACRPT, imxdma_enable_hw() 345 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_disable_hw() local 349 dev_dbg(imxdma->dev, "%s channel %d\n", __func__, channel); imxdma_disable_hw() 355 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_DIMR) | imxdma_disable_hw() 357 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_CCR(channel)) & imxdma_disable_hw() 359 imx_dmav1_writel(imxdma, 1 << channel, DMA_DISR); imxdma_disable_hw() 366 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_watchdog() local 369 imx_dmav1_writel(imxdma, 0, DMA_CCR(channel)); imxdma_watchdog() 373 dev_dbg(imxdma->dev, "channel %d: watchdog timeout!\n", imxdma_watchdog() 379 struct imxdma_engine *imxdma = dev_id; imxdma_err_handler() local 384 disr = imx_dmav1_readl(imxdma, DMA_DISR); imxdma_err_handler() 386 err_mask = imx_dmav1_readl(imxdma, DMA_DBTOSR) | imxdma_err_handler() 387 imx_dmav1_readl(imxdma, DMA_DRTOSR) | imxdma_err_handler() 388 imx_dmav1_readl(imxdma, DMA_DSESR) | imxdma_err_handler() 389 imx_dmav1_readl(imxdma, DMA_DBOSR); imxdma_err_handler() 394 imx_dmav1_writel(imxdma, disr & err_mask, DMA_DISR); imxdma_err_handler() 401 if (imx_dmav1_readl(imxdma, DMA_DBTOSR) & (1 << i)) { imxdma_err_handler() 402 imx_dmav1_writel(imxdma, 1 << i, DMA_DBTOSR); imxdma_err_handler() 405 if (imx_dmav1_readl(imxdma, DMA_DRTOSR) & (1 << i)) { imxdma_err_handler() 406 imx_dmav1_writel(imxdma, 1 << i, DMA_DRTOSR); imxdma_err_handler() 409 if (imx_dmav1_readl(imxdma, DMA_DSESR) & (1 << i)) { imxdma_err_handler() 410 imx_dmav1_writel(imxdma, 1 << i, DMA_DSESR); imxdma_err_handler() 413 if (imx_dmav1_readl(imxdma, DMA_DBOSR) & (1 << i)) { imxdma_err_handler() 414 imx_dmav1_writel(imxdma, 1 << i, DMA_DBOSR); imxdma_err_handler() 418 tasklet_schedule(&imxdma->channel[i].dma_tasklet); imxdma_err_handler() 420 dev_warn(imxdma->dev, imxdma_err_handler() 432 struct imxdma_engine *imxdma = imxdmac->imxdma; dma_irq_handle_channel() local 437 spin_lock_irqsave(&imxdma->lock, flags); dma_irq_handle_channel() 439 spin_unlock_irqrestore(&imxdma->lock, flags); dma_irq_handle_channel() 446 spin_unlock_irqrestore(&imxdma->lock, flags); dma_irq_handle_channel() 455 tmp = imx_dmav1_readl(imxdma, DMA_CCR(chno)); dma_irq_handle_channel() 465 imx_dmav1_writel(imxdma, tmp, DMA_CCR(chno)); dma_irq_handle_channel() 467 imx_dmav1_writel(imxdma, tmp & ~CCR_CEN, dma_irq_handle_channel() 472 imx_dmav1_writel(imxdma, tmp, DMA_CCR(chno)); dma_irq_handle_channel() 488 imx_dmav1_writel(imxdma, 0, DMA_CCR(chno)); dma_irq_handle_channel() 495 struct imxdma_engine *imxdma = dev_id; dma_irq_handler() local 498 if (!is_imx1_dma(imxdma)) dma_irq_handler() 501 disr = imx_dmav1_readl(imxdma, DMA_DISR); dma_irq_handler() 503 dev_dbg(imxdma->dev, "%s called, disr=0x%08x\n", __func__, disr); dma_irq_handler() 505 imx_dmav1_writel(imxdma, disr, DMA_DISR); dma_irq_handler() 508 dma_irq_handle_channel(&imxdma->channel[i]); dma_irq_handler() 517 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_xfer_desc() local 526 if ((imxdma->slots_2d[i].count > 0) && imxdma_xfer_desc() 527 ((imxdma->slots_2d[i].xsr != d->x) || imxdma_xfer_desc() 528 (imxdma->slots_2d[i].ysr != d->y) || imxdma_xfer_desc() 529 (imxdma->slots_2d[i].wsr != d->w))) imxdma_xfer_desc() 537 imxdma->slots_2d[slot].xsr = d->x; imxdma_xfer_desc() 538 imxdma->slots_2d[slot].ysr = d->y; imxdma_xfer_desc() 539 imxdma->slots_2d[slot].wsr = d->w; imxdma_xfer_desc() 540 imxdma->slots_2d[slot].count++; imxdma_xfer_desc() 548 imx_dmav1_writel(imxdma, d->x, DMA_XSRA); imxdma_xfer_desc() 549 imx_dmav1_writel(imxdma, d->y, DMA_YSRA); imxdma_xfer_desc() 550 imx_dmav1_writel(imxdma, d->w, DMA_WSRA); imxdma_xfer_desc() 554 imx_dmav1_writel(imxdma, d->x, DMA_XSRB); imxdma_xfer_desc() 555 imx_dmav1_writel(imxdma, d->y, DMA_YSRB); imxdma_xfer_desc() 556 imx_dmav1_writel(imxdma, d->w, DMA_WSRB); imxdma_xfer_desc() 563 imx_dmav1_writel(imxdma, d->src, DMA_SAR(imxdmac->channel)); imxdma_xfer_desc() 564 imx_dmav1_writel(imxdma, d->dest, DMA_DAR(imxdmac->channel)); imxdma_xfer_desc() 565 imx_dmav1_writel(imxdma, d->config_mem | (d->config_port << 2), imxdma_xfer_desc() 568 imx_dmav1_writel(imxdma, d->len, DMA_CNTR(imxdmac->channel)); imxdma_xfer_desc() 570 dev_dbg(imxdma->dev, imxdma_xfer_desc() 581 imx_dmav1_writel(imxdma, imxdmac->per_address, imxdma_xfer_desc() 583 imx_dmav1_writel(imxdma, imxdmac->ccr_from_device, imxdma_xfer_desc() 586 dev_dbg(imxdma->dev, imxdma_xfer_desc() 592 imx_dmav1_writel(imxdma, imxdmac->per_address, imxdma_xfer_desc() 594 imx_dmav1_writel(imxdma, imxdmac->ccr_to_device, imxdma_xfer_desc() 597 dev_dbg(imxdma->dev, imxdma_xfer_desc() 603 dev_err(imxdma->dev, "%s channel: %d bad dma mode\n", imxdma_xfer_desc() 621 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_tasklet() local 625 spin_lock_irqsave(&imxdma->lock, flags); imxdma_tasklet() 629 spin_unlock_irqrestore(&imxdma->lock, flags); imxdma_tasklet() 645 imxdma->slots_2d[imxdmac->slot_2d].count--; imxdma_tasklet() 656 dev_warn(imxdma->dev, "%s: channel: %d couldn't xfer desc\n", imxdma_tasklet() 660 spin_unlock_irqrestore(&imxdma->lock, flags); imxdma_tasklet() 670 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_terminate_all() local 675 spin_lock_irqsave(&imxdma->lock, flags); imxdma_terminate_all() 678 spin_unlock_irqrestore(&imxdma->lock, flags); imxdma_terminate_all() 686 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_config() local 720 imx_dmav1_writel(imxdma, imxdmac->dma_request, imxdma_config() 724 imx_dmav1_writel(imxdma, imxdmac->watermark_level * imxdma_config() 740 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_tx_submit() local 744 spin_lock_irqsave(&imxdma->lock, flags); imxdma_tx_submit() 747 spin_unlock_irqrestore(&imxdma->lock, flags); imxdma_tx_submit() 786 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_free_chan_resources() local 790 spin_lock_irqsave(&imxdma->lock, flags); imxdma_free_chan_resources() 796 spin_unlock_irqrestore(&imxdma->lock, flags); imxdma_free_chan_resources() 865 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_prep_dma_cyclic() local 870 dev_dbg(imxdma->dev, "%s channel: %d buf_len=%zu period_len=%zu\n", imxdma_prep_dma_cyclic() 923 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_prep_dma_memcpy() local 926 dev_dbg(imxdma->dev, "%s channel: %d src=0x%llx dst=0x%llx len=%zu\n", imxdma_prep_dma_memcpy() 954 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_prep_dma_interleaved() local 957 dev_dbg(imxdma->dev, "%s channel: %d src_start=0x%llx dst_start=0x%llx\n" imxdma_prep_dma_interleaved() 996 struct imxdma_engine *imxdma = imxdmac->imxdma; imxdma_issue_pending() local 1000 spin_lock_irqsave(&imxdma->lock, flags); imxdma_issue_pending() 1007 dev_warn(imxdma->dev, imxdma_issue_pending() 1015 spin_unlock_irqrestore(&imxdma->lock, flags); imxdma_issue_pending() 1023 if (chan->device->dev != fdata->imxdma->dev) imxdma_filter_fn() 1036 struct imxdma_engine *imxdma = ofdma->of_dma_data; imxdma_xlate() local 1038 .imxdma = imxdma, imxdma_xlate() 1046 return dma_request_channel(imxdma->dma_device.cap_mask, imxdma_xlate() 1052 struct imxdma_engine *imxdma; imxdma_probe() local 1062 imxdma = devm_kzalloc(&pdev->dev, sizeof(*imxdma), GFP_KERNEL); imxdma_probe() 1063 if (!imxdma) imxdma_probe() 1066 imxdma->dev = &pdev->dev; imxdma_probe() 1067 imxdma->devtype = pdev->id_entry->driver_data; imxdma_probe() 1070 imxdma->base = devm_ioremap_resource(&pdev->dev, res); imxdma_probe() 1071 if (IS_ERR(imxdma->base)) imxdma_probe() 1072 return PTR_ERR(imxdma->base); imxdma_probe() 1078 imxdma->dma_ipg = devm_clk_get(&pdev->dev, "ipg"); imxdma_probe() 1079 if (IS_ERR(imxdma->dma_ipg)) imxdma_probe() 1080 return PTR_ERR(imxdma->dma_ipg); imxdma_probe() 1082 imxdma->dma_ahb = devm_clk_get(&pdev->dev, "ahb"); imxdma_probe() 1083 if (IS_ERR(imxdma->dma_ahb)) imxdma_probe() 1084 return PTR_ERR(imxdma->dma_ahb); imxdma_probe() 1086 clk_prepare_enable(imxdma->dma_ipg); imxdma_probe() 1087 clk_prepare_enable(imxdma->dma_ahb); imxdma_probe() 1090 imx_dmav1_writel(imxdma, DCR_DRST, DMA_DCR); imxdma_probe() 1092 if (is_imx1_dma(imxdma)) { imxdma_probe() 1094 dma_irq_handler, 0, "DMA", imxdma); imxdma_probe() 1096 dev_warn(imxdma->dev, "Can't register IRQ for DMA\n"); imxdma_probe() 1107 imxdma_err_handler, 0, "DMA", imxdma); imxdma_probe() 1109 dev_warn(imxdma->dev, "Can't register ERRIRQ for DMA\n"); imxdma_probe() 1115 imx_dmav1_writel(imxdma, DCR_DEN, DMA_DCR); imxdma_probe() 1118 imx_dmav1_writel(imxdma, (1 << IMX_DMA_CHANNELS) - 1, DMA_DISR); imxdma_probe() 1121 imx_dmav1_writel(imxdma, (1 << IMX_DMA_CHANNELS) - 1, DMA_DIMR); imxdma_probe() 1123 INIT_LIST_HEAD(&imxdma->dma_device.channels); imxdma_probe() 1125 dma_cap_set(DMA_SLAVE, imxdma->dma_device.cap_mask); imxdma_probe() 1126 dma_cap_set(DMA_CYCLIC, imxdma->dma_device.cap_mask); imxdma_probe() 1127 dma_cap_set(DMA_MEMCPY, imxdma->dma_device.cap_mask); imxdma_probe() 1128 dma_cap_set(DMA_INTERLEAVE, imxdma->dma_device.cap_mask); imxdma_probe() 1132 imxdma->slots_2d[i].count = 0; imxdma_probe() 1134 spin_lock_init(&imxdma->lock); imxdma_probe() 1138 struct imxdma_channel *imxdmac = &imxdma->channel[i]; imxdma_probe() 1140 if (!is_imx1_dma(imxdma)) { imxdma_probe() 1142 dma_irq_handler, 0, "DMA", imxdma); imxdma_probe() 1144 dev_warn(imxdma->dev, "Can't register IRQ %d " imxdma_probe() 1154 imxdmac->imxdma = imxdma; imxdma_probe() 1162 imxdmac->chan.device = &imxdma->dma_device; imxdma_probe() 1168 &imxdma->dma_device.channels); imxdma_probe() 1171 imxdma->dma_device.dev = &pdev->dev; imxdma_probe() 1173 imxdma->dma_device.device_alloc_chan_resources = imxdma_alloc_chan_resources; imxdma_probe() 1174 imxdma->dma_device.device_free_chan_resources = imxdma_free_chan_resources; imxdma_probe() 1175 imxdma->dma_device.device_tx_status = imxdma_tx_status; imxdma_probe() 1176 imxdma->dma_device.device_prep_slave_sg = imxdma_prep_slave_sg; imxdma_probe() 1177 imxdma->dma_device.device_prep_dma_cyclic = imxdma_prep_dma_cyclic; imxdma_probe() 1178 imxdma->dma_device.device_prep_dma_memcpy = imxdma_prep_dma_memcpy; imxdma_probe() 1179 imxdma->dma_device.device_prep_interleaved_dma = imxdma_prep_dma_interleaved; imxdma_probe() 1180 imxdma->dma_device.device_config = imxdma_config; imxdma_probe() 1181 imxdma->dma_device.device_terminate_all = imxdma_terminate_all; imxdma_probe() 1182 imxdma->dma_device.device_issue_pending = imxdma_issue_pending; imxdma_probe() 1184 platform_set_drvdata(pdev, imxdma); imxdma_probe() 1186 imxdma->dma_device.copy_align = 2; /* 2^2 = 4 bytes alignment */ imxdma_probe() 1187 imxdma->dma_device.dev->dma_parms = &imxdma->dma_parms; imxdma_probe() 1188 dma_set_max_seg_size(imxdma->dma_device.dev, 0xffffff); imxdma_probe() 1190 ret = dma_async_device_register(&imxdma->dma_device); imxdma_probe() 1198 imxdma_xlate, imxdma); imxdma_probe() 1208 dma_async_device_unregister(&imxdma->dma_device); imxdma_probe() 1210 clk_disable_unprepare(imxdma->dma_ipg); imxdma_probe() 1211 clk_disable_unprepare(imxdma->dma_ahb); imxdma_probe() 1217 struct imxdma_engine *imxdma = platform_get_drvdata(pdev); imxdma_remove() local 1219 dma_async_device_unregister(&imxdma->dma_device); imxdma_remove() 1224 clk_disable_unprepare(imxdma->dma_ipg); imxdma_remove() 1225 clk_disable_unprepare(imxdma->dma_ahb); imxdma_remove()
|