idma64 35 drivers/dma/idma64.c static void idma64_off(struct idma64 *idma64) idma64 39 drivers/dma/idma64.c dma_writel(idma64, CFG, 0); idma64 41 drivers/dma/idma64.c channel_clear_bit(idma64, MASK(XFER), idma64->all_chan_mask); idma64 42 drivers/dma/idma64.c channel_clear_bit(idma64, MASK(BLOCK), idma64->all_chan_mask); idma64 43 drivers/dma/idma64.c channel_clear_bit(idma64, MASK(SRC_TRAN), idma64->all_chan_mask); idma64 44 drivers/dma/idma64.c channel_clear_bit(idma64, MASK(DST_TRAN), idma64->all_chan_mask); idma64 45 drivers/dma/idma64.c channel_clear_bit(idma64, MASK(ERROR), idma64->all_chan_mask); idma64 49 drivers/dma/idma64.c } while (dma_readl(idma64, CFG) & IDMA64_CFG_DMA_EN && --count); idma64 52 drivers/dma/idma64.c static void idma64_on(struct idma64 *idma64) idma64 54 drivers/dma/idma64.c dma_writel(idma64, CFG, IDMA64_CFG_DMA_EN); idma64 59 drivers/dma/idma64.c static void idma64_chan_init(struct idma64 *idma64, struct idma64_chan *idma64c) idma64 71 drivers/dma/idma64.c channel_set_bit(idma64, MASK(XFER), idma64c->mask); idma64 72 drivers/dma/idma64.c channel_set_bit(idma64, MASK(ERROR), idma64c->mask); idma64 81 drivers/dma/idma64.c idma64_on(idma64); idma64 84 drivers/dma/idma64.c static void idma64_chan_stop(struct idma64 *idma64, struct idma64_chan *idma64c) idma64 86 drivers/dma/idma64.c channel_clear_bit(idma64, CH_EN, idma64c->mask); idma64 89 drivers/dma/idma64.c static void idma64_chan_start(struct idma64 *idma64, struct idma64_chan *idma64c) idma64 102 drivers/dma/idma64.c channel_set_bit(idma64, CH_EN, idma64c->mask); idma64 107 drivers/dma/idma64.c struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); idma64 109 drivers/dma/idma64.c idma64_chan_stop(idma64, idma64c); idma64 114 drivers/dma/idma64.c struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); idma64 128 drivers/dma/idma64.c idma64_chan_init(idma64, idma64c); idma64 131 drivers/dma/idma64.c idma64_chan_start(idma64, idma64c); idma64 136 drivers/dma/idma64.c static void idma64_chan_irq(struct idma64 *idma64, unsigned short c, idma64 139 drivers/dma/idma64.c struct idma64_chan *idma64c = &idma64->chan[c]; idma64 146 drivers/dma/idma64.c dma_writel(idma64, CLEAR(ERROR), idma64c->mask); idma64 149 drivers/dma/idma64.c dma_writel(idma64, CLEAR(XFER), idma64c->mask); idma64 164 drivers/dma/idma64.c struct idma64 *idma64 = dev; idma64 165 drivers/dma/idma64.c u32 status = dma_readl(idma64, STATUS_INT); idma64 170 drivers/dma/idma64.c dev_vdbg(idma64->dma.dev, "%s: status=%#x\n", __func__, status); idma64 176 drivers/dma/idma64.c status_xfer = dma_readl(idma64, RAW(XFER)); idma64 177 drivers/dma/idma64.c status_err = dma_readl(idma64, RAW(ERROR)); idma64 179 drivers/dma/idma64.c for (i = 0; i < idma64->dma.chancnt; i++) idma64 180 drivers/dma/idma64.c idma64_chan_irq(idma64, i, status_err, status_xfer); idma64 531 drivers/dma/idma64.c struct idma64 *idma64; idma64 536 drivers/dma/idma64.c idma64 = devm_kzalloc(chip->dev, sizeof(*idma64), GFP_KERNEL); idma64 537 drivers/dma/idma64.c if (!idma64) idma64 540 drivers/dma/idma64.c idma64->regs = chip->regs; idma64 541 drivers/dma/idma64.c chip->idma64 = idma64; idma64 543 drivers/dma/idma64.c idma64->chan = devm_kcalloc(chip->dev, nr_chan, sizeof(*idma64->chan), idma64 545 drivers/dma/idma64.c if (!idma64->chan) idma64 548 drivers/dma/idma64.c idma64->all_chan_mask = (1 << nr_chan) - 1; idma64 551 drivers/dma/idma64.c idma64_off(idma64); idma64 554 drivers/dma/idma64.c dev_name(chip->dev), idma64); idma64 558 drivers/dma/idma64.c INIT_LIST_HEAD(&idma64->dma.channels); idma64 560 drivers/dma/idma64.c struct idma64_chan *idma64c = &idma64->chan[i]; idma64 563 drivers/dma/idma64.c vchan_init(&idma64c->vchan, &idma64->dma); idma64 565 drivers/dma/idma64.c idma64c->regs = idma64->regs + i * IDMA64_CH_LENGTH; idma64 569 drivers/dma/idma64.c dma_cap_set(DMA_SLAVE, idma64->dma.cap_mask); idma64 570 drivers/dma/idma64.c dma_cap_set(DMA_PRIVATE, idma64->dma.cap_mask); idma64 572 drivers/dma/idma64.c idma64->dma.device_alloc_chan_resources = idma64_alloc_chan_resources; idma64 573 drivers/dma/idma64.c idma64->dma.device_free_chan_resources = idma64_free_chan_resources; idma64 575 drivers/dma/idma64.c idma64->dma.device_prep_slave_sg = idma64_prep_slave_sg; idma64 577 drivers/dma/idma64.c idma64->dma.device_issue_pending = idma64_issue_pending; idma64 578 drivers/dma/idma64.c idma64->dma.device_tx_status = idma64_tx_status; idma64 580 drivers/dma/idma64.c idma64->dma.device_config = idma64_slave_config; idma64 581 drivers/dma/idma64.c idma64->dma.device_pause = idma64_pause; idma64 582 drivers/dma/idma64.c idma64->dma.device_resume = idma64_resume; idma64 583 drivers/dma/idma64.c idma64->dma.device_terminate_all = idma64_terminate_all; idma64 584 drivers/dma/idma64.c idma64->dma.device_synchronize = idma64_synchronize; idma64 586 drivers/dma/idma64.c idma64->dma.src_addr_widths = IDMA64_BUSWIDTHS; idma64 587 drivers/dma/idma64.c idma64->dma.dst_addr_widths = IDMA64_BUSWIDTHS; idma64 588 drivers/dma/idma64.c idma64->dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); idma64 589 drivers/dma/idma64.c idma64->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; idma64 591 drivers/dma/idma64.c idma64->dma.dev = chip->sysdev; idma64 593 drivers/dma/idma64.c dma_set_max_seg_size(idma64->dma.dev, IDMA64C_CTLH_BLOCK_TS_MASK); idma64 595 drivers/dma/idma64.c ret = dma_async_device_register(&idma64->dma); idma64 605 drivers/dma/idma64.c struct idma64 *idma64 = chip->idma64; idma64 608 drivers/dma/idma64.c dma_async_device_unregister(&idma64->dma); idma64 614 drivers/dma/idma64.c devm_free_irq(chip->dev, chip->irq, idma64); idma64 616 drivers/dma/idma64.c for (i = 0; i < idma64->dma.chancnt; i++) { idma64 617 drivers/dma/idma64.c struct idma64_chan *idma64c = &idma64->chan[i]; idma64 676 drivers/dma/idma64.c idma64_off(chip->idma64); idma64 684 drivers/dma/idma64.c idma64_on(chip->idma64); idma64 146 drivers/dma/idma64.h #define channel_set_bit(idma64, reg, mask) \ idma64 147 drivers/dma/idma64.h dma_writel(idma64, reg, ((mask) << 8) | (mask)) idma64 148 drivers/dma/idma64.h #define channel_clear_bit(idma64, reg, mask) \ idma64 149 drivers/dma/idma64.h dma_writel(idma64, reg, ((mask) << 8) | 0) idma64 193 drivers/dma/idma64.h static inline struct idma64 *to_idma64(struct dma_device *ddev) idma64 195 drivers/dma/idma64.h return container_of(ddev, struct idma64, dma); idma64 198 drivers/dma/idma64.h static inline u32 idma64_readl(struct idma64 *idma64, int offset) idma64 200 drivers/dma/idma64.h return readl(idma64->regs + offset); idma64 203 drivers/dma/idma64.h static inline void idma64_writel(struct idma64 *idma64, int offset, u32 value) idma64 205 drivers/dma/idma64.h writel(value, idma64->regs + offset); idma64 208 drivers/dma/idma64.h #define dma_readl(idma64, reg) \ idma64 209 drivers/dma/idma64.h idma64_readl(idma64, IDMA64_##reg) idma64 210 drivers/dma/idma64.h #define dma_writel(idma64, reg, value) \ idma64 211 drivers/dma/idma64.h idma64_writel(idma64, IDMA64_##reg, (value)) idma64 226 drivers/dma/idma64.h struct idma64 *idma64;