Lines Matching refs:p

49 static void __ebus_dma_reset(struct ebus_dma_info *p, int no_drain)  in __ebus_dma_reset()  argument
54 writel(EBDMA_CSR_RESET, p->regs + EBDMA_CSR); in __ebus_dma_reset()
61 val = readl(p->regs + EBDMA_CSR); in __ebus_dma_reset()
71 struct ebus_dma_info *p = dev_id; in ebus_dma_irq() local
75 spin_lock_irqsave(&p->lock, flags); in ebus_dma_irq()
76 csr = readl(p->regs + EBDMA_CSR); in ebus_dma_irq()
77 writel(csr, p->regs + EBDMA_CSR); in ebus_dma_irq()
78 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_irq()
81 printk(KERN_CRIT "ebus_dma(%s): DMA error!\n", p->name); in ebus_dma_irq()
82 p->callback(p, EBUS_DMA_EVENT_ERROR, p->client_cookie); in ebus_dma_irq()
85 p->callback(p, in ebus_dma_irq()
88 p->client_cookie); in ebus_dma_irq()
96 int ebus_dma_register(struct ebus_dma_info *p) in ebus_dma_register() argument
100 if (!p->regs) in ebus_dma_register()
102 if (p->flags & ~(EBUS_DMA_FLAG_USE_EBDMA_HANDLER | in ebus_dma_register()
105 if ((p->flags & EBUS_DMA_FLAG_USE_EBDMA_HANDLER) && !p->callback) in ebus_dma_register()
107 if (!strlen(p->name)) in ebus_dma_register()
110 __ebus_dma_reset(p, 1); in ebus_dma_register()
114 if (p->flags & EBUS_DMA_FLAG_TCI_DISABLE) in ebus_dma_register()
117 writel(csr, p->regs + EBDMA_CSR); in ebus_dma_register()
123 int ebus_dma_irq_enable(struct ebus_dma_info *p, int on) in ebus_dma_irq_enable() argument
129 if (p->flags & EBUS_DMA_FLAG_USE_EBDMA_HANDLER) { in ebus_dma_irq_enable()
130 if (request_irq(p->irq, ebus_dma_irq, IRQF_SHARED, p->name, p)) in ebus_dma_irq_enable()
134 spin_lock_irqsave(&p->lock, flags); in ebus_dma_irq_enable()
135 csr = readl(p->regs + EBDMA_CSR); in ebus_dma_irq_enable()
137 writel(csr, p->regs + EBDMA_CSR); in ebus_dma_irq_enable()
138 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_irq_enable()
140 spin_lock_irqsave(&p->lock, flags); in ebus_dma_irq_enable()
141 csr = readl(p->regs + EBDMA_CSR); in ebus_dma_irq_enable()
143 writel(csr, p->regs + EBDMA_CSR); in ebus_dma_irq_enable()
144 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_irq_enable()
146 if (p->flags & EBUS_DMA_FLAG_USE_EBDMA_HANDLER) { in ebus_dma_irq_enable()
147 free_irq(p->irq, p); in ebus_dma_irq_enable()
155 void ebus_dma_unregister(struct ebus_dma_info *p) in ebus_dma_unregister() argument
161 spin_lock_irqsave(&p->lock, flags); in ebus_dma_unregister()
162 csr = readl(p->regs + EBDMA_CSR); in ebus_dma_unregister()
165 writel(csr, p->regs + EBDMA_CSR); in ebus_dma_unregister()
168 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_unregister()
171 free_irq(p->irq, p); in ebus_dma_unregister()
175 int ebus_dma_request(struct ebus_dma_info *p, dma_addr_t bus_addr, size_t len) in ebus_dma_request() argument
184 spin_lock_irqsave(&p->lock, flags); in ebus_dma_request()
185 csr = readl(p->regs + EBDMA_CSR); in ebus_dma_request()
193 writel(len, p->regs + EBDMA_COUNT); in ebus_dma_request()
194 writel(bus_addr, p->regs + EBDMA_ADDR); in ebus_dma_request()
198 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_request()
204 void ebus_dma_prepare(struct ebus_dma_info *p, int write) in ebus_dma_prepare() argument
209 spin_lock_irqsave(&p->lock, flags); in ebus_dma_prepare()
210 __ebus_dma_reset(p, 0); in ebus_dma_prepare()
219 if (p->flags & EBUS_DMA_FLAG_TCI_DISABLE) in ebus_dma_prepare()
222 writel(csr, p->regs + EBDMA_CSR); in ebus_dma_prepare()
224 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_prepare()
228 unsigned int ebus_dma_residue(struct ebus_dma_info *p) in ebus_dma_residue() argument
230 return readl(p->regs + EBDMA_COUNT); in ebus_dma_residue()
234 unsigned int ebus_dma_addr(struct ebus_dma_info *p) in ebus_dma_addr() argument
236 return readl(p->regs + EBDMA_ADDR); in ebus_dma_addr()
240 void ebus_dma_enable(struct ebus_dma_info *p, int on) in ebus_dma_enable() argument
245 spin_lock_irqsave(&p->lock, flags); in ebus_dma_enable()
246 orig_csr = csr = readl(p->regs + EBDMA_CSR); in ebus_dma_enable()
253 writel(csr, p->regs + EBDMA_CSR); in ebus_dma_enable()
254 spin_unlock_irqrestore(&p->lock, flags); in ebus_dma_enable()