Lines Matching refs:chan
99 struct dma_chan chan; member
151 static inline struct pch_dma_chan *to_pd_chan(struct dma_chan *chan) in to_pd_chan() argument
153 return container_of(chan, struct pch_dma_chan, chan); in to_pd_chan()
161 static inline struct device *chan2dev(struct dma_chan *chan) in chan2dev() argument
163 return &chan->dev->device; in chan2dev()
166 static inline struct device *chan2parent(struct dma_chan *chan) in chan2parent() argument
168 return chan->dev->device.parent; in chan2parent()
185 static void pdc_enable_irq(struct dma_chan *chan, int enable) in pdc_enable_irq() argument
187 struct pch_dma *pd = to_pd(chan->device); in pdc_enable_irq()
191 if (chan->chan_id < 8) in pdc_enable_irq()
192 pos = chan->chan_id; in pdc_enable_irq()
194 pos = chan->chan_id + 8; in pdc_enable_irq()
205 dev_dbg(chan2dev(chan), "pdc_enable_irq: chan %d -> %x\n", in pdc_enable_irq()
206 chan->chan_id, val); in pdc_enable_irq()
209 static void pdc_set_dir(struct dma_chan *chan) in pdc_set_dir() argument
211 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pdc_set_dir()
212 struct pch_dma *pd = to_pd(chan->device); in pdc_set_dir()
217 if (chan->chan_id < 8) { in pdc_set_dir()
221 (DMA_CTL0_BITS_PER_CH * chan->chan_id); in pdc_set_dir()
223 (DMA_CTL0_BITS_PER_CH * chan->chan_id)); in pdc_set_dir()
226 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id + in pdc_set_dir()
229 val &= ~(0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id + in pdc_set_dir()
235 int ch = chan->chan_id - 8; /* ch8-->0 ch9-->1 ... ch11->3 */ in pdc_set_dir()
253 dev_dbg(chan2dev(chan), "pdc_set_dir: chan %d -> %x\n", in pdc_set_dir()
254 chan->chan_id, val); in pdc_set_dir()
257 static void pdc_set_mode(struct dma_chan *chan, u32 mode) in pdc_set_mode() argument
259 struct pch_dma *pd = to_pd(chan->device); in pdc_set_mode()
264 if (chan->chan_id < 8) { in pdc_set_mode()
266 (DMA_CTL0_BITS_PER_CH * chan->chan_id)); in pdc_set_mode()
267 mask_dir = 1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id +\ in pdc_set_mode()
271 val |= mode << (DMA_CTL0_BITS_PER_CH * chan->chan_id); in pdc_set_mode()
275 int ch = chan->chan_id - 8; /* ch8-->0 ch9-->1 ... ch11->3 */ in pdc_set_mode()
287 dev_dbg(chan2dev(chan), "pdc_set_mode: chan %d -> %x\n", in pdc_set_mode()
288 chan->chan_id, val); in pdc_set_mode()
293 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status0()
298 DMA_STATUS_BITS_PER_CH * pd_chan->chan.chan_id)); in pdc_get_status0()
303 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status2()
308 DMA_STATUS_BITS_PER_CH * (pd_chan->chan.chan_id - 8))); in pdc_get_status2()
315 if (pd_chan->chan.chan_id < 8) in pdc_is_idle()
330 dev_err(chan2dev(&pd_chan->chan), in pdc_dostart()
335 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> dev_addr: %x\n", in pdc_dostart()
336 pd_chan->chan.chan_id, desc->regs.dev_addr); in pdc_dostart()
337 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> mem_addr: %x\n", in pdc_dostart()
338 pd_chan->chan.chan_id, desc->regs.mem_addr); in pdc_dostart()
339 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> size: %x\n", in pdc_dostart()
340 pd_chan->chan.chan_id, desc->regs.size); in pdc_dostart()
341 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> next: %x\n", in pdc_dostart()
342 pd_chan->chan.chan_id, desc->regs.next); in pdc_dostart()
349 pdc_set_mode(&pd_chan->chan, DMA_CTL0_ONESHOT); in pdc_dostart()
352 pdc_set_mode(&pd_chan->chan, DMA_CTL0_SG); in pdc_dostart()
399 dev_crit(chan2dev(&pd_chan->chan), "Bad descriptor submitted\n"); in pdc_handle_error()
400 dev_crit(chan2dev(&pd_chan->chan), "descriptor cookie: %d\n", in pdc_handle_error()
420 struct pch_dma_chan *pd_chan = to_pd_chan(txd->chan); in pd_tx_submit()
437 static struct pch_dma_desc *pdc_alloc_desc(struct dma_chan *chan, gfp_t flags) in pdc_alloc_desc() argument
440 struct pch_dma *pd = to_pd(chan->device); in pdc_alloc_desc()
447 dma_async_tx_descriptor_init(&desc->txd, chan); in pdc_alloc_desc()
470 dev_dbg(chan2dev(&pd_chan->chan), "desc %p not ACKed\n", desc); in pdc_desc_get()
473 dev_dbg(chan2dev(&pd_chan->chan), "scanned %d descriptors\n", i); in pdc_desc_get()
476 ret = pdc_alloc_desc(&pd_chan->chan, GFP_ATOMIC); in pdc_desc_get()
482 dev_err(chan2dev(&pd_chan->chan), in pdc_desc_get()
501 static int pd_alloc_chan_resources(struct dma_chan *chan) in pd_alloc_chan_resources() argument
503 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_alloc_chan_resources()
509 dev_dbg(chan2dev(chan), "DMA channel not idle ?\n"); in pd_alloc_chan_resources()
517 desc = pdc_alloc_desc(chan, GFP_KERNEL); in pd_alloc_chan_resources()
520 dev_warn(chan2dev(chan), in pd_alloc_chan_resources()
531 dma_cookie_init(chan); in pd_alloc_chan_resources()
534 pdc_enable_irq(chan, 1); in pd_alloc_chan_resources()
539 static void pd_free_chan_resources(struct dma_chan *chan) in pd_free_chan_resources() argument
541 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_free_chan_resources()
542 struct pch_dma *pd = to_pd(chan->device); in pd_free_chan_resources()
558 pdc_enable_irq(chan, 0); in pd_free_chan_resources()
561 static enum dma_status pd_tx_status(struct dma_chan *chan, dma_cookie_t cookie, in pd_tx_status() argument
564 return dma_cookie_status(chan, cookie, txstate); in pd_tx_status()
567 static void pd_issue_pending(struct dma_chan *chan) in pd_issue_pending() argument
569 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_issue_pending()
578 static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan, in pd_prep_slave_sg() argument
583 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_prep_slave_sg()
584 struct pch_dma_slave *pd_slave = chan->private; in pd_prep_slave_sg()
593 dev_info(chan2dev(chan), "prep_slave_sg: length is zero!\n"); in pd_prep_slave_sg()
605 pdc_set_dir(chan); in pd_prep_slave_sg()
659 dev_err(chan2dev(chan), "failed to get desc or wrong parameters\n"); in pd_prep_slave_sg()
664 static int pd_device_terminate_all(struct dma_chan *chan) in pd_device_terminate_all() argument
666 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_device_terminate_all()
672 pdc_set_mode(&pd_chan->chan, DMA_CTL0_DISABLE); in pd_device_terminate_all()
691 dev_err(chan2dev(&pd_chan->chan), in pdc_tasklet()
754 struct dma_chan *chan, *_c; in pch_dma_save_regs() local
762 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_save_regs()
763 pd_chan = to_pd_chan(chan); in pch_dma_save_regs()
777 struct dma_chan *chan, *_c; in pch_dma_restore_regs() local
785 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_restore_regs()
786 pd_chan = to_pd_chan(chan); in pch_dma_restore_regs()
902 pd_chan->chan.device = &pd->dma; in pch_dma_probe()
903 dma_cookie_init(&pd_chan->chan); in pch_dma_probe()
915 list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels); in pch_dma_probe()
956 struct dma_chan *chan, *_c; in pch_dma_remove() local
963 list_for_each_entry_safe(chan, _c, &pd->dma.channels, in pch_dma_remove()
965 pd_chan = to_pd_chan(chan); in pch_dma_remove()