Lines Matching refs:od

226 static void omap_dma_glbl_write(struct omap_dmadev *od, unsigned reg, unsigned val)  in omap_dma_glbl_write()  argument
228 const struct omap_dma_reg *r = od->reg_map + reg; in omap_dma_glbl_write()
232 omap_dma_write(val, r->type, od->base + r->offset); in omap_dma_glbl_write()
235 static unsigned omap_dma_glbl_read(struct omap_dmadev *od, unsigned reg) in omap_dma_glbl_read() argument
237 const struct omap_dma_reg *r = od->reg_map + reg; in omap_dma_glbl_read()
241 return omap_dma_read(r->type, od->base + r->offset); in omap_dma_glbl_read()
276 static void omap_dma_assign(struct omap_dmadev *od, struct omap_chan *c, in omap_dma_assign() argument
279 c->channel_base = od->base + od->plat->channel_stride * lch; in omap_dma_assign()
281 od->lch_map[lch] = c; in omap_dma_assign()
286 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_start() local
288 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_start()
304 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_stop() local
313 if (od->plat->errata & DMA_ERRATA_i541 && val & CCR_TRIGGER_SRC) { in omap_dma_stop()
317 sysconfig = omap_dma_glbl_read(od, OCP_SYSCONFIG); in omap_dma_stop()
320 omap_dma_glbl_write(od, OCP_SYSCONFIG, val); in omap_dma_stop()
343 omap_dma_glbl_write(od, OCP_SYSCONFIG, sysconfig); in omap_dma_stop()
351 if (!__dma_omap15xx(od->plat->dma_attr) && c->cyclic) { in omap_dma_stop()
486 struct omap_dmadev *od = devid; in omap_dma_irq() local
489 spin_lock(&od->irq_lock); in omap_dma_irq()
491 status = omap_dma_glbl_read(od, IRQSTATUS_L1); in omap_dma_irq()
492 status &= od->irq_enable_mask; in omap_dma_irq()
494 spin_unlock(&od->irq_lock); in omap_dma_irq()
506 c = od->lch_map[channel]; in omap_dma_irq()
509 dev_err(od->ddev.dev, "invalid channel %u\n", channel); in omap_dma_irq()
514 omap_dma_glbl_write(od, IRQSTATUS_L1, mask); in omap_dma_irq()
519 spin_unlock(&od->irq_lock); in omap_dma_irq()
526 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_alloc_chan_resources() local
530 if (od->legacy) { in omap_dma_alloc_chan_resources()
538 dev_dbg(od->ddev.dev, "allocating channel %u for %u\n", in omap_dma_alloc_chan_resources()
542 omap_dma_assign(od, c, c->dma_ch); in omap_dma_alloc_chan_resources()
544 if (!od->legacy) { in omap_dma_alloc_chan_resources()
547 spin_lock_irq(&od->irq_lock); in omap_dma_alloc_chan_resources()
549 omap_dma_glbl_write(od, IRQSTATUS_L1, val); in omap_dma_alloc_chan_resources()
550 od->irq_enable_mask |= val; in omap_dma_alloc_chan_resources()
551 omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); in omap_dma_alloc_chan_resources()
553 val = omap_dma_glbl_read(od, IRQENABLE_L0); in omap_dma_alloc_chan_resources()
555 omap_dma_glbl_write(od, IRQENABLE_L0, val); in omap_dma_alloc_chan_resources()
556 spin_unlock_irq(&od->irq_lock); in omap_dma_alloc_chan_resources()
561 if (__dma_omap16xx(od->plat->dma_attr)) { in omap_dma_alloc_chan_resources()
572 if (od->plat->errata & DMA_ERRATA_IFRAME_BUFFERING) in omap_dma_alloc_chan_resources()
580 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_free_chan_resources() local
583 if (!od->legacy) { in omap_dma_free_chan_resources()
584 spin_lock_irq(&od->irq_lock); in omap_dma_free_chan_resources()
585 od->irq_enable_mask &= ~BIT(c->dma_ch); in omap_dma_free_chan_resources()
586 omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); in omap_dma_free_chan_resources()
587 spin_unlock_irq(&od->irq_lock); in omap_dma_free_chan_resources()
591 od->lch_map[c->dma_ch] = NULL; in omap_dma_free_chan_resources()
595 dev_dbg(od->ddev.dev, "freeing channel for %u\n", c->dma_sig); in omap_dma_free_chan_resources()
638 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_chan_read_3_3() local
642 if (val == 0 && od->plat->errata & DMA_ERRATA_3_3) in omap_dma_chan_read_3_3()
650 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_get_src_pos() local
653 if (__dma_omap15xx(od->plat->dma_attr)) { in omap_dma_get_src_pos()
676 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_get_dst_pos() local
679 if (__dma_omap15xx(od->plat->dma_attr)) { in omap_dma_get_dst_pos()
765 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_prep_slave_sg() local
833 if (od->plat->errata & DMA_ERRATA_PARALLEL_CHANNELS) in omap_dma_prep_slave_sg()
863 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_prep_dma_cyclic() local
950 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_prep_dma_cyclic()
1094 static int omap_dma_chan_init(struct omap_dmadev *od) in omap_dma_chan_init() argument
1102 c->reg_map = od->reg_map; in omap_dma_chan_init()
1104 vchan_init(&c->vc, &od->ddev); in omap_dma_chan_init()
1110 static void omap_dma_free(struct omap_dmadev *od) in omap_dma_free() argument
1112 tasklet_kill(&od->task); in omap_dma_free()
1113 while (!list_empty(&od->ddev.channels)) { in omap_dma_free()
1114 struct omap_chan *c = list_first_entry(&od->ddev.channels, in omap_dma_free()
1129 struct omap_dmadev *od; in omap_dma_probe() local
1133 od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); in omap_dma_probe()
1134 if (!od) in omap_dma_probe()
1138 od->base = devm_ioremap_resource(&pdev->dev, res); in omap_dma_probe()
1139 if (IS_ERR(od->base)) in omap_dma_probe()
1140 return PTR_ERR(od->base); in omap_dma_probe()
1142 od->plat = omap_get_plat_info(); in omap_dma_probe()
1143 if (!od->plat) in omap_dma_probe()
1146 od->reg_map = od->plat->reg_map; in omap_dma_probe()
1148 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask); in omap_dma_probe()
1149 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask); in omap_dma_probe()
1150 dma_cap_set(DMA_MEMCPY, od->ddev.cap_mask); in omap_dma_probe()
1151 od->ddev.device_alloc_chan_resources = omap_dma_alloc_chan_resources; in omap_dma_probe()
1152 od->ddev.device_free_chan_resources = omap_dma_free_chan_resources; in omap_dma_probe()
1153 od->ddev.device_tx_status = omap_dma_tx_status; in omap_dma_probe()
1154 od->ddev.device_issue_pending = omap_dma_issue_pending; in omap_dma_probe()
1155 od->ddev.device_prep_slave_sg = omap_dma_prep_slave_sg; in omap_dma_probe()
1156 od->ddev.device_prep_dma_cyclic = omap_dma_prep_dma_cyclic; in omap_dma_probe()
1157 od->ddev.device_prep_dma_memcpy = omap_dma_prep_dma_memcpy; in omap_dma_probe()
1158 od->ddev.device_config = omap_dma_slave_config; in omap_dma_probe()
1159 od->ddev.device_pause = omap_dma_pause; in omap_dma_probe()
1160 od->ddev.device_resume = omap_dma_resume; in omap_dma_probe()
1161 od->ddev.device_terminate_all = omap_dma_terminate_all; in omap_dma_probe()
1162 od->ddev.src_addr_widths = OMAP_DMA_BUSWIDTHS; in omap_dma_probe()
1163 od->ddev.dst_addr_widths = OMAP_DMA_BUSWIDTHS; in omap_dma_probe()
1164 od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in omap_dma_probe()
1165 od->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in omap_dma_probe()
1166 od->ddev.dev = &pdev->dev; in omap_dma_probe()
1167 INIT_LIST_HEAD(&od->ddev.channels); in omap_dma_probe()
1168 INIT_LIST_HEAD(&od->pending); in omap_dma_probe()
1169 spin_lock_init(&od->lock); in omap_dma_probe()
1170 spin_lock_init(&od->irq_lock); in omap_dma_probe()
1172 tasklet_init(&od->task, omap_dma_sched, (unsigned long)od); in omap_dma_probe()
1174 od->dma_requests = OMAP_SDMA_REQUESTS; in omap_dma_probe()
1177 &od->dma_requests)) { in omap_dma_probe()
1184 rc = omap_dma_chan_init(od); in omap_dma_probe()
1186 omap_dma_free(od); in omap_dma_probe()
1194 od->legacy = true; in omap_dma_probe()
1197 od->irq_enable_mask = 0; in omap_dma_probe()
1198 omap_dma_glbl_write(od, IRQENABLE_L1, 0); in omap_dma_probe()
1201 IRQF_SHARED, "omap-dma-engine", od); in omap_dma_probe()
1206 rc = dma_async_device_register(&od->ddev); in omap_dma_probe()
1210 omap_dma_free(od); in omap_dma_probe()
1214 platform_set_drvdata(pdev, od); in omap_dma_probe()
1217 omap_dma_info.dma_cap = od->ddev.cap_mask; in omap_dma_probe()
1224 dma_async_device_unregister(&od->ddev); in omap_dma_probe()
1225 omap_dma_free(od); in omap_dma_probe()
1236 struct omap_dmadev *od = platform_get_drvdata(pdev); in omap_dma_remove() local
1241 dma_async_device_unregister(&od->ddev); in omap_dma_remove()
1243 if (!od->legacy) { in omap_dma_remove()
1245 omap_dma_glbl_write(od, IRQENABLE_L0, 0); in omap_dma_remove()
1248 omap_dma_free(od); in omap_dma_remove()
1275 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_filter_fn() local
1279 if (req <= od->dma_requests) { in omap_dma_filter_fn()