root/arch/sparc/kernel/ebus.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. __ebus_dma_reset
  2. ebus_dma_irq
  3. ebus_dma_register
  4. ebus_dma_irq_enable
  5. ebus_dma_unregister
  6. ebus_dma_request
  7. ebus_dma_prepare
  8. ebus_dma_residue
  9. ebus_dma_addr
  10. ebus_dma_enable

   1 // SPDX-License-Identifier: GPL-2.0
   2 /* ebus.c: EBUS DMA library code.
   3  *
   4  * Copyright (C) 1997  Eddie C. Dost  (ecd@skynet.be)
   5  * Copyright (C) 1999  David S. Miller (davem@redhat.com)
   6  */
   7 
   8 #include <linux/export.h>
   9 #include <linux/kernel.h>
  10 #include <linux/types.h>
  11 #include <linux/interrupt.h>
  12 #include <linux/delay.h>
  13 
  14 #include <asm/ebus_dma.h>
  15 #include <asm/io.h>
  16 
  17 #define EBDMA_CSR       0x00UL  /* Control/Status */
  18 #define EBDMA_ADDR      0x04UL  /* DMA Address */
  19 #define EBDMA_COUNT     0x08UL  /* DMA Count */
  20 
  21 #define EBDMA_CSR_INT_PEND      0x00000001
  22 #define EBDMA_CSR_ERR_PEND      0x00000002
  23 #define EBDMA_CSR_DRAIN         0x00000004
  24 #define EBDMA_CSR_INT_EN        0x00000010
  25 #define EBDMA_CSR_RESET         0x00000080
  26 #define EBDMA_CSR_WRITE         0x00000100
  27 #define EBDMA_CSR_EN_DMA        0x00000200
  28 #define EBDMA_CSR_CYC_PEND      0x00000400
  29 #define EBDMA_CSR_DIAG_RD_DONE  0x00000800
  30 #define EBDMA_CSR_DIAG_WR_DONE  0x00001000
  31 #define EBDMA_CSR_EN_CNT        0x00002000
  32 #define EBDMA_CSR_TC            0x00004000
  33 #define EBDMA_CSR_DIS_CSR_DRN   0x00010000
  34 #define EBDMA_CSR_BURST_SZ_MASK 0x000c0000
  35 #define EBDMA_CSR_BURST_SZ_1    0x00080000
  36 #define EBDMA_CSR_BURST_SZ_4    0x00000000
  37 #define EBDMA_CSR_BURST_SZ_8    0x00040000
  38 #define EBDMA_CSR_BURST_SZ_16   0x000c0000
  39 #define EBDMA_CSR_DIAG_EN       0x00100000
  40 #define EBDMA_CSR_DIS_ERR_PEND  0x00400000
  41 #define EBDMA_CSR_TCI_DIS       0x00800000
  42 #define EBDMA_CSR_EN_NEXT       0x01000000
  43 #define EBDMA_CSR_DMA_ON        0x02000000
  44 #define EBDMA_CSR_A_LOADED      0x04000000
  45 #define EBDMA_CSR_NA_LOADED     0x08000000
  46 #define EBDMA_CSR_DEV_ID_MASK   0xf0000000
  47 
  48 #define EBUS_DMA_RESET_TIMEOUT  10000
  49 
  50 static void __ebus_dma_reset(struct ebus_dma_info *p, int no_drain)
  51 {
  52         int i;
  53         u32 val = 0;
  54 
  55         writel(EBDMA_CSR_RESET, p->regs + EBDMA_CSR);
  56         udelay(1);
  57 
  58         if (no_drain)
  59                 return;
  60 
  61         for (i = EBUS_DMA_RESET_TIMEOUT; i > 0; i--) {
  62                 val = readl(p->regs + EBDMA_CSR);
  63 
  64                 if (!(val & (EBDMA_CSR_DRAIN | EBDMA_CSR_CYC_PEND)))
  65                         break;
  66                 udelay(10);
  67         }
  68 }
  69 
  70 static irqreturn_t ebus_dma_irq(int irq, void *dev_id)
  71 {
  72         struct ebus_dma_info *p = dev_id;
  73         unsigned long flags;
  74         u32 csr = 0;
  75 
  76         spin_lock_irqsave(&p->lock, flags);
  77         csr = readl(p->regs + EBDMA_CSR);
  78         writel(csr, p->regs + EBDMA_CSR);
  79         spin_unlock_irqrestore(&p->lock, flags);
  80 
  81         if (csr & EBDMA_CSR_ERR_PEND) {
  82                 printk(KERN_CRIT "ebus_dma(%s): DMA error!\n", p->name);
  83                 p->callback(p, EBUS_DMA_EVENT_ERROR, p->client_cookie);
  84                 return IRQ_HANDLED;
  85         } else if (csr & EBDMA_CSR_INT_PEND) {
  86                 p->callback(p,
  87                             (csr & EBDMA_CSR_TC) ?
  88                             EBUS_DMA_EVENT_DMA : EBUS_DMA_EVENT_DEVICE,
  89                             p->client_cookie);
  90                 return IRQ_HANDLED;
  91         }
  92 
  93         return IRQ_NONE;
  94 
  95 }
  96 
  97 int ebus_dma_register(struct ebus_dma_info *p)
  98 {
  99         u32 csr;
 100 
 101         if (!p->regs)
 102                 return -EINVAL;
 103         if (p->flags & ~(EBUS_DMA_FLAG_USE_EBDMA_HANDLER |
 104                          EBUS_DMA_FLAG_TCI_DISABLE))
 105                 return -EINVAL;
 106         if ((p->flags & EBUS_DMA_FLAG_USE_EBDMA_HANDLER) && !p->callback)
 107                 return -EINVAL;
 108         if (!strlen(p->name))
 109                 return -EINVAL;
 110 
 111         __ebus_dma_reset(p, 1);
 112 
 113         csr = EBDMA_CSR_BURST_SZ_16 | EBDMA_CSR_EN_CNT;
 114 
 115         if (p->flags & EBUS_DMA_FLAG_TCI_DISABLE)
 116                 csr |= EBDMA_CSR_TCI_DIS;
 117 
 118         writel(csr, p->regs + EBDMA_CSR);
 119 
 120         return 0;
 121 }
 122 EXPORT_SYMBOL(ebus_dma_register);
 123 
 124 int ebus_dma_irq_enable(struct ebus_dma_info *p, int on)
 125 {
 126         unsigned long flags;
 127         u32 csr;
 128 
 129         if (on) {
 130                 if (p->flags & EBUS_DMA_FLAG_USE_EBDMA_HANDLER) {
 131                         if (request_irq(p->irq, ebus_dma_irq, IRQF_SHARED, p->name, p))
 132                                 return -EBUSY;
 133                 }
 134 
 135                 spin_lock_irqsave(&p->lock, flags);
 136                 csr = readl(p->regs + EBDMA_CSR);
 137                 csr |= EBDMA_CSR_INT_EN;
 138                 writel(csr, p->regs + EBDMA_CSR);
 139                 spin_unlock_irqrestore(&p->lock, flags);
 140         } else {
 141                 spin_lock_irqsave(&p->lock, flags);
 142                 csr = readl(p->regs + EBDMA_CSR);
 143                 csr &= ~EBDMA_CSR_INT_EN;
 144                 writel(csr, p->regs + EBDMA_CSR);
 145                 spin_unlock_irqrestore(&p->lock, flags);
 146 
 147                 if (p->flags & EBUS_DMA_FLAG_USE_EBDMA_HANDLER) {
 148                         free_irq(p->irq, p);
 149                 }
 150         }
 151 
 152         return 0;
 153 }
 154 EXPORT_SYMBOL(ebus_dma_irq_enable);
 155 
 156 void ebus_dma_unregister(struct ebus_dma_info *p)
 157 {
 158         unsigned long flags;
 159         u32 csr;
 160         int irq_on = 0;
 161 
 162         spin_lock_irqsave(&p->lock, flags);
 163         csr = readl(p->regs + EBDMA_CSR);
 164         if (csr & EBDMA_CSR_INT_EN) {
 165                 csr &= ~EBDMA_CSR_INT_EN;
 166                 writel(csr, p->regs + EBDMA_CSR);
 167                 irq_on = 1;
 168         }
 169         spin_unlock_irqrestore(&p->lock, flags);
 170 
 171         if (irq_on)
 172                 free_irq(p->irq, p);
 173 }
 174 EXPORT_SYMBOL(ebus_dma_unregister);
 175 
 176 int ebus_dma_request(struct ebus_dma_info *p, dma_addr_t bus_addr, size_t len)
 177 {
 178         unsigned long flags;
 179         u32 csr;
 180         int err;
 181 
 182         if (len >= (1 << 24))
 183                 return -EINVAL;
 184 
 185         spin_lock_irqsave(&p->lock, flags);
 186         csr = readl(p->regs + EBDMA_CSR);
 187         err = -EINVAL;
 188         if (!(csr & EBDMA_CSR_EN_DMA))
 189                 goto out;
 190         err = -EBUSY;
 191         if (csr & EBDMA_CSR_NA_LOADED)
 192                 goto out;
 193 
 194         writel(len,      p->regs + EBDMA_COUNT);
 195         writel(bus_addr, p->regs + EBDMA_ADDR);
 196         err = 0;
 197 
 198 out:
 199         spin_unlock_irqrestore(&p->lock, flags);
 200 
 201         return err;
 202 }
 203 EXPORT_SYMBOL(ebus_dma_request);
 204 
 205 void ebus_dma_prepare(struct ebus_dma_info *p, int write)
 206 {
 207         unsigned long flags;
 208         u32 csr;
 209 
 210         spin_lock_irqsave(&p->lock, flags);
 211         __ebus_dma_reset(p, 0);
 212 
 213         csr = (EBDMA_CSR_INT_EN |
 214                EBDMA_CSR_EN_CNT |
 215                EBDMA_CSR_BURST_SZ_16 |
 216                EBDMA_CSR_EN_NEXT);
 217 
 218         if (write)
 219                 csr |= EBDMA_CSR_WRITE;
 220         if (p->flags & EBUS_DMA_FLAG_TCI_DISABLE)
 221                 csr |= EBDMA_CSR_TCI_DIS;
 222 
 223         writel(csr, p->regs + EBDMA_CSR);
 224 
 225         spin_unlock_irqrestore(&p->lock, flags);
 226 }
 227 EXPORT_SYMBOL(ebus_dma_prepare);
 228 
 229 unsigned int ebus_dma_residue(struct ebus_dma_info *p)
 230 {
 231         return readl(p->regs + EBDMA_COUNT);
 232 }
 233 EXPORT_SYMBOL(ebus_dma_residue);
 234 
 235 unsigned int ebus_dma_addr(struct ebus_dma_info *p)
 236 {
 237         return readl(p->regs + EBDMA_ADDR);
 238 }
 239 EXPORT_SYMBOL(ebus_dma_addr);
 240 
 241 void ebus_dma_enable(struct ebus_dma_info *p, int on)
 242 {
 243         unsigned long flags;
 244         u32 orig_csr, csr;
 245 
 246         spin_lock_irqsave(&p->lock, flags);
 247         orig_csr = csr = readl(p->regs + EBDMA_CSR);
 248         if (on)
 249                 csr |= EBDMA_CSR_EN_DMA;
 250         else
 251                 csr &= ~EBDMA_CSR_EN_DMA;
 252         if ((orig_csr & EBDMA_CSR_EN_DMA) !=
 253             (csr & EBDMA_CSR_EN_DMA))
 254                 writel(csr, p->regs + EBDMA_CSR);
 255         spin_unlock_irqrestore(&p->lock, flags);
 256 }
 257 EXPORT_SYMBOL(ebus_dma_enable);

/* [<][>][^][v][top][bottom][index][help] */