Lines Matching refs:nbpf

202 	struct nbpf_device *nbpf;  member
315 static inline u32 nbpf_read(struct nbpf_device *nbpf, in nbpf_read() argument
318 u32 data = ioread32(nbpf->base + offset); in nbpf_read()
319 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_read()
320 __func__, nbpf->base, offset, data); in nbpf_read()
324 static inline void nbpf_write(struct nbpf_device *nbpf, in nbpf_write() argument
327 iowrite32(data, nbpf->base + offset); in nbpf_write()
328 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_write()
329 __func__, nbpf->base, offset, data); in nbpf_write()
339 u32 status = nbpf_read(chan->nbpf, NBPF_DSTAT_END); in nbpf_status_get()
341 return status & BIT(chan - chan->nbpf->chan); in nbpf_status_get()
349 static u32 nbpf_error_get(struct nbpf_device *nbpf) in nbpf_error_get() argument
351 return nbpf_read(nbpf, NBPF_DSTAT_ER); in nbpf_error_get()
354 static struct nbpf_channel *nbpf_error_get_channel(struct nbpf_device *nbpf, u32 error) in nbpf_error_get_channel() argument
356 return nbpf->chan + __ffs(error); in nbpf_error_get_channel()
394 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): next 0x%x, cur 0x%x\n", __func__, in nbpf_start()
427 static u32 nbpf_xfer_ds(struct nbpf_device *nbpf, size_t size) in nbpf_xfer_ds() argument
430 return min_t(int, __ffs(size), ilog2(nbpf->config->buffer_size * 8)); in nbpf_xfer_ds()
433 static size_t nbpf_xfer_size(struct nbpf_device *nbpf, in nbpf_xfer_size() argument
460 return nbpf_xfer_ds(nbpf, size); in nbpf_xfer_size()
509 mem_xfer = nbpf_xfer_ds(chan->nbpf, size); in nbpf_prep_one()
563 static void nbpf_configure(struct nbpf_device *nbpf) in nbpf_configure() argument
565 nbpf_write(nbpf, NBPF_CTRL, NBPF_CTRL_LVINT); in nbpf_configure()
826 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): force-free desc %p cookie %d\n", in nbpf_chan_idle()
874 chan->slave_dst_width = nbpf_xfer_size(chan->nbpf, in nbpf_config()
876 chan->slave_dst_burst = nbpf_xfer_size(chan->nbpf, in nbpf_config()
880 chan->slave_src_width = nbpf_xfer_size(chan->nbpf, in nbpf_config()
882 chan->slave_src_burst = nbpf_xfer_size(chan->nbpf, in nbpf_config()
1075 struct nbpf_device *nbpf = ofdma->of_dma_data; in nbpf_of_xlate() local
1082 dchan = dma_get_any_slave_channel(&nbpf->dma_dev); in nbpf_of_xlate()
1213 struct nbpf_device *nbpf = dev; in nbpf_err_irq() local
1214 u32 error = nbpf_error_get(nbpf); in nbpf_err_irq()
1216 dev_warn(nbpf->dma_dev.dev, "DMA error IRQ %u\n", irq); in nbpf_err_irq()
1222 struct nbpf_channel *chan = nbpf_error_get_channel(nbpf, error); in nbpf_err_irq()
1226 error = nbpf_error_get(nbpf); in nbpf_err_irq()
1232 static int nbpf_chan_probe(struct nbpf_device *nbpf, int n) in nbpf_chan_probe() argument
1234 struct dma_device *dma_dev = &nbpf->dma_dev; in nbpf_chan_probe()
1235 struct nbpf_channel *chan = nbpf->chan + n; in nbpf_chan_probe()
1238 chan->nbpf = nbpf; in nbpf_chan_probe()
1239 chan->base = nbpf->base + NBPF_REG_CHAN_OFFSET + NBPF_REG_CHAN_SIZE * n; in nbpf_chan_probe()
1283 struct nbpf_device *nbpf; in nbpf_probe() local
1301 nbpf = devm_kzalloc(dev, sizeof(*nbpf) + num_channels * in nbpf_probe()
1302 sizeof(nbpf->chan[0]), GFP_KERNEL); in nbpf_probe()
1303 if (!nbpf) { in nbpf_probe()
1307 dma_dev = &nbpf->dma_dev; in nbpf_probe()
1311 nbpf->base = devm_ioremap_resource(dev, iomem); in nbpf_probe()
1312 if (IS_ERR(nbpf->base)) in nbpf_probe()
1313 return PTR_ERR(nbpf->base); in nbpf_probe()
1315 nbpf->clk = devm_clk_get(dev, NULL); in nbpf_probe()
1316 if (IS_ERR(nbpf->clk)) in nbpf_probe()
1317 return PTR_ERR(nbpf->clk); in nbpf_probe()
1319 nbpf->config = cfg; in nbpf_probe()
1344 nbpf->chan[i].irq = irqbuf[0]; in nbpf_probe()
1353 for (i = 0, chan = nbpf->chan; i <= num_channels; in nbpf_probe()
1361 if (chan != nbpf->chan + num_channels) in nbpf_probe()
1371 nbpf->chan[i].irq = irq; in nbpf_probe()
1376 IRQF_SHARED, "dma error", nbpf); in nbpf_probe()
1384 ret = nbpf_chan_probe(nbpf, i); in nbpf_probe()
1421 platform_set_drvdata(pdev, nbpf); in nbpf_probe()
1423 ret = clk_prepare_enable(nbpf->clk); in nbpf_probe()
1427 nbpf_configure(nbpf); in nbpf_probe()
1433 ret = of_dma_controller_register(np, nbpf_of_xlate, nbpf); in nbpf_probe()
1442 clk_disable_unprepare(nbpf->clk); in nbpf_probe()
1449 struct nbpf_device *nbpf = platform_get_drvdata(pdev); in nbpf_remove() local
1452 dma_async_device_unregister(&nbpf->dma_dev); in nbpf_remove()
1453 clk_disable_unprepare(nbpf->clk); in nbpf_remove()
1475 struct nbpf_device *nbpf = platform_get_drvdata(to_platform_device(dev)); in nbpf_runtime_suspend() local
1476 clk_disable_unprepare(nbpf->clk); in nbpf_runtime_suspend()
1482 struct nbpf_device *nbpf = platform_get_drvdata(to_platform_device(dev)); in nbpf_runtime_resume() local
1483 return clk_prepare_enable(nbpf->clk); in nbpf_runtime_resume()