nbpf              199 drivers/dma/nbpfaxi.c 	struct nbpf_device *nbpf;
nbpf              315 drivers/dma/nbpfaxi.c static inline u32 nbpf_read(struct nbpf_device *nbpf,
nbpf              318 drivers/dma/nbpfaxi.c 	u32 data = ioread32(nbpf->base + offset);
nbpf              319 drivers/dma/nbpfaxi.c 	dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n",
nbpf              320 drivers/dma/nbpfaxi.c 		__func__, nbpf->base, offset, data);
nbpf              324 drivers/dma/nbpfaxi.c static inline void nbpf_write(struct nbpf_device *nbpf,
nbpf              327 drivers/dma/nbpfaxi.c 	iowrite32(data, nbpf->base + offset);
nbpf              328 drivers/dma/nbpfaxi.c 	dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n",
nbpf              329 drivers/dma/nbpfaxi.c 		__func__, nbpf->base, offset, data);
nbpf              339 drivers/dma/nbpfaxi.c 	u32 status = nbpf_read(chan->nbpf, NBPF_DSTAT_END);
nbpf              341 drivers/dma/nbpfaxi.c 	return status & BIT(chan - chan->nbpf->chan);
nbpf              349 drivers/dma/nbpfaxi.c static u32 nbpf_error_get(struct nbpf_device *nbpf)
nbpf              351 drivers/dma/nbpfaxi.c 	return nbpf_read(nbpf, NBPF_DSTAT_ER);
nbpf              354 drivers/dma/nbpfaxi.c static struct nbpf_channel *nbpf_error_get_channel(struct nbpf_device *nbpf, u32 error)
nbpf              356 drivers/dma/nbpfaxi.c 	return nbpf->chan + __ffs(error);
nbpf              394 drivers/dma/nbpfaxi.c 	dev_dbg(chan->nbpf->dma_dev.dev, "%s(): next 0x%x, cur 0x%x\n", __func__,
nbpf              427 drivers/dma/nbpfaxi.c static u32 nbpf_xfer_ds(struct nbpf_device *nbpf, size_t size,
nbpf              430 drivers/dma/nbpfaxi.c 	int max_burst = nbpf->config->buffer_size * 8;
nbpf              432 drivers/dma/nbpfaxi.c 	if (nbpf->max_burst_mem_read || nbpf->max_burst_mem_write) {
nbpf              435 drivers/dma/nbpfaxi.c 			max_burst = min_not_zero(nbpf->max_burst_mem_read,
nbpf              436 drivers/dma/nbpfaxi.c 						 nbpf->max_burst_mem_write);
nbpf              439 drivers/dma/nbpfaxi.c 			if (nbpf->max_burst_mem_read)
nbpf              440 drivers/dma/nbpfaxi.c 				max_burst = nbpf->max_burst_mem_read;
nbpf              443 drivers/dma/nbpfaxi.c 			if (nbpf->max_burst_mem_write)
nbpf              444 drivers/dma/nbpfaxi.c 				max_burst = nbpf->max_burst_mem_write;
nbpf              456 drivers/dma/nbpfaxi.c static size_t nbpf_xfer_size(struct nbpf_device *nbpf,
nbpf              484 drivers/dma/nbpfaxi.c 	return nbpf_xfer_ds(nbpf, size, DMA_TRANS_NONE);
nbpf              533 drivers/dma/nbpfaxi.c 	mem_xfer = nbpf_xfer_ds(chan->nbpf, size, direction);
nbpf              587 drivers/dma/nbpfaxi.c static void nbpf_configure(struct nbpf_device *nbpf)
nbpf              589 drivers/dma/nbpfaxi.c 	nbpf_write(nbpf, NBPF_CTRL, NBPF_CTRL_LVINT);
nbpf              850 drivers/dma/nbpfaxi.c 		dev_dbg(chan->nbpf->dma_dev.dev, "%s(): force-free desc %p cookie %d\n",
nbpf              898 drivers/dma/nbpfaxi.c 	chan->slave_dst_width = nbpf_xfer_size(chan->nbpf,
nbpf              900 drivers/dma/nbpfaxi.c 	chan->slave_dst_burst = nbpf_xfer_size(chan->nbpf,
nbpf              904 drivers/dma/nbpfaxi.c 	chan->slave_src_width = nbpf_xfer_size(chan->nbpf,
nbpf              906 drivers/dma/nbpfaxi.c 	chan->slave_src_burst = nbpf_xfer_size(chan->nbpf,
nbpf             1084 drivers/dma/nbpfaxi.c 	struct nbpf_device *nbpf = ofdma->of_dma_data;
nbpf             1091 drivers/dma/nbpfaxi.c 	dchan = dma_get_any_slave_channel(&nbpf->dma_dev);
nbpf             1219 drivers/dma/nbpfaxi.c 	struct nbpf_device *nbpf = dev;
nbpf             1220 drivers/dma/nbpfaxi.c 	u32 error = nbpf_error_get(nbpf);
nbpf             1222 drivers/dma/nbpfaxi.c 	dev_warn(nbpf->dma_dev.dev, "DMA error IRQ %u\n", irq);
nbpf             1228 drivers/dma/nbpfaxi.c 		struct nbpf_channel *chan = nbpf_error_get_channel(nbpf, error);
nbpf             1232 drivers/dma/nbpfaxi.c 		error = nbpf_error_get(nbpf);
nbpf             1238 drivers/dma/nbpfaxi.c static int nbpf_chan_probe(struct nbpf_device *nbpf, int n)
nbpf             1240 drivers/dma/nbpfaxi.c 	struct dma_device *dma_dev = &nbpf->dma_dev;
nbpf             1241 drivers/dma/nbpfaxi.c 	struct nbpf_channel *chan = nbpf->chan + n;
nbpf             1244 drivers/dma/nbpfaxi.c 	chan->nbpf = nbpf;
nbpf             1245 drivers/dma/nbpfaxi.c 	chan->base = nbpf->base + NBPF_REG_CHAN_OFFSET + NBPF_REG_CHAN_SIZE * n;
nbpf             1288 drivers/dma/nbpfaxi.c 	struct nbpf_device *nbpf;
nbpf             1306 drivers/dma/nbpfaxi.c 	nbpf = devm_kzalloc(dev, struct_size(nbpf, chan, num_channels),
nbpf             1308 drivers/dma/nbpfaxi.c 	if (!nbpf)
nbpf             1311 drivers/dma/nbpfaxi.c 	dma_dev = &nbpf->dma_dev;
nbpf             1315 drivers/dma/nbpfaxi.c 	nbpf->base = devm_ioremap_resource(dev, iomem);
nbpf             1316 drivers/dma/nbpfaxi.c 	if (IS_ERR(nbpf->base))
nbpf             1317 drivers/dma/nbpfaxi.c 		return PTR_ERR(nbpf->base);
nbpf             1319 drivers/dma/nbpfaxi.c 	nbpf->clk = devm_clk_get(dev, NULL);
nbpf             1320 drivers/dma/nbpfaxi.c 	if (IS_ERR(nbpf->clk))
nbpf             1321 drivers/dma/nbpfaxi.c 		return PTR_ERR(nbpf->clk);
nbpf             1324 drivers/dma/nbpfaxi.c 			     &nbpf->max_burst_mem_read);
nbpf             1326 drivers/dma/nbpfaxi.c 			     &nbpf->max_burst_mem_write);
nbpf             1328 drivers/dma/nbpfaxi.c 	nbpf->config = cfg;
nbpf             1353 drivers/dma/nbpfaxi.c 			nbpf->chan[i].irq = irqbuf[0];
nbpf             1362 drivers/dma/nbpfaxi.c 			for (i = 0, chan = nbpf->chan; i <= num_channels;
nbpf             1370 drivers/dma/nbpfaxi.c 			if (chan != nbpf->chan + num_channels)
nbpf             1380 drivers/dma/nbpfaxi.c 				nbpf->chan[i].irq = irq;
nbpf             1385 drivers/dma/nbpfaxi.c 			       IRQF_SHARED, "dma error", nbpf);
nbpf             1388 drivers/dma/nbpfaxi.c 	nbpf->eirq = eirq;
nbpf             1394 drivers/dma/nbpfaxi.c 		ret = nbpf_chan_probe(nbpf, i);
nbpf             1429 drivers/dma/nbpfaxi.c 	platform_set_drvdata(pdev, nbpf);
nbpf             1431 drivers/dma/nbpfaxi.c 	ret = clk_prepare_enable(nbpf->clk);
nbpf             1435 drivers/dma/nbpfaxi.c 	nbpf_configure(nbpf);
nbpf             1441 drivers/dma/nbpfaxi.c 	ret = of_dma_controller_register(np, nbpf_of_xlate, nbpf);
nbpf             1450 drivers/dma/nbpfaxi.c 	clk_disable_unprepare(nbpf->clk);
nbpf             1457 drivers/dma/nbpfaxi.c 	struct nbpf_device *nbpf = platform_get_drvdata(pdev);
nbpf             1460 drivers/dma/nbpfaxi.c 	devm_free_irq(&pdev->dev, nbpf->eirq, nbpf);
nbpf             1462 drivers/dma/nbpfaxi.c 	for (i = 0; i < nbpf->config->num_channels; i++) {
nbpf             1463 drivers/dma/nbpfaxi.c 		struct nbpf_channel *chan = nbpf->chan + i;
nbpf             1471 drivers/dma/nbpfaxi.c 	dma_async_device_unregister(&nbpf->dma_dev);
nbpf             1472 drivers/dma/nbpfaxi.c 	clk_disable_unprepare(nbpf->clk);
nbpf             1494 drivers/dma/nbpfaxi.c 	struct nbpf_device *nbpf = dev_get_drvdata(dev);
nbpf             1495 drivers/dma/nbpfaxi.c 	clk_disable_unprepare(nbpf->clk);
nbpf             1501 drivers/dma/nbpfaxi.c 	struct nbpf_device *nbpf = dev_get_drvdata(dev);
nbpf             1502 drivers/dma/nbpfaxi.c 	return clk_prepare_enable(nbpf->clk);