Lines Matching refs:chan

103 	struct dma_chan		chan;  member
155 static inline struct pch_dma_chan *to_pd_chan(struct dma_chan *chan) in to_pd_chan() argument
157 return container_of(chan, struct pch_dma_chan, chan); in to_pd_chan()
165 static inline struct device *chan2dev(struct dma_chan *chan) in chan2dev() argument
167 return &chan->dev->device; in chan2dev()
170 static inline struct device *chan2parent(struct dma_chan *chan) in chan2parent() argument
172 return chan->dev->device.parent; in chan2parent()
189 static void pdc_enable_irq(struct dma_chan *chan, int enable) in pdc_enable_irq() argument
191 struct pch_dma *pd = to_pd(chan->device); in pdc_enable_irq()
195 if (chan->chan_id < 8) in pdc_enable_irq()
196 pos = chan->chan_id; in pdc_enable_irq()
198 pos = chan->chan_id + 8; in pdc_enable_irq()
209 dev_dbg(chan2dev(chan), "pdc_enable_irq: chan %d -> %x\n", in pdc_enable_irq()
210 chan->chan_id, val); in pdc_enable_irq()
213 static void pdc_set_dir(struct dma_chan *chan) in pdc_set_dir() argument
215 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pdc_set_dir()
216 struct pch_dma *pd = to_pd(chan->device); in pdc_set_dir()
221 if (chan->chan_id < 8) { in pdc_set_dir()
225 (DMA_CTL0_BITS_PER_CH * chan->chan_id); in pdc_set_dir()
227 (DMA_CTL0_BITS_PER_CH * chan->chan_id)); in pdc_set_dir()
230 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id + in pdc_set_dir()
233 val &= ~(0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id + in pdc_set_dir()
239 int ch = chan->chan_id - 8; /* ch8-->0 ch9-->1 ... ch11->3 */ in pdc_set_dir()
257 dev_dbg(chan2dev(chan), "pdc_set_dir: chan %d -> %x\n", in pdc_set_dir()
258 chan->chan_id, val); in pdc_set_dir()
261 static void pdc_set_mode(struct dma_chan *chan, u32 mode) in pdc_set_mode() argument
263 struct pch_dma *pd = to_pd(chan->device); in pdc_set_mode()
268 if (chan->chan_id < 8) { in pdc_set_mode()
270 (DMA_CTL0_BITS_PER_CH * chan->chan_id)); in pdc_set_mode()
271 mask_dir = 1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id +\ in pdc_set_mode()
275 val |= mode << (DMA_CTL0_BITS_PER_CH * chan->chan_id); in pdc_set_mode()
279 int ch = chan->chan_id - 8; /* ch8-->0 ch9-->1 ... ch11->3 */ in pdc_set_mode()
291 dev_dbg(chan2dev(chan), "pdc_set_mode: chan %d -> %x\n", in pdc_set_mode()
292 chan->chan_id, val); in pdc_set_mode()
297 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status0()
302 DMA_STATUS_BITS_PER_CH * pd_chan->chan.chan_id)); in pdc_get_status0()
307 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status2()
312 DMA_STATUS_BITS_PER_CH * (pd_chan->chan.chan_id - 8))); in pdc_get_status2()
319 if (pd_chan->chan.chan_id < 8) in pdc_is_idle()
334 dev_err(chan2dev(&pd_chan->chan), in pdc_dostart()
339 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> dev_addr: %x\n", in pdc_dostart()
340 pd_chan->chan.chan_id, desc->regs.dev_addr); in pdc_dostart()
341 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> mem_addr: %x\n", in pdc_dostart()
342 pd_chan->chan.chan_id, desc->regs.mem_addr); in pdc_dostart()
343 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> size: %x\n", in pdc_dostart()
344 pd_chan->chan.chan_id, desc->regs.size); in pdc_dostart()
345 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> next: %x\n", in pdc_dostart()
346 pd_chan->chan.chan_id, desc->regs.next); in pdc_dostart()
353 pdc_set_mode(&pd_chan->chan, DMA_CTL0_ONESHOT); in pdc_dostart()
356 pdc_set_mode(&pd_chan->chan, DMA_CTL0_SG); in pdc_dostart()
403 dev_crit(chan2dev(&pd_chan->chan), "Bad descriptor submitted\n"); in pdc_handle_error()
404 dev_crit(chan2dev(&pd_chan->chan), "descriptor cookie: %d\n", in pdc_handle_error()
424 struct pch_dma_chan *pd_chan = to_pd_chan(txd->chan); in pd_tx_submit()
441 static struct pch_dma_desc *pdc_alloc_desc(struct dma_chan *chan, gfp_t flags) in pdc_alloc_desc() argument
444 struct pch_dma *pd = to_pd(chan->device); in pdc_alloc_desc()
451 dma_async_tx_descriptor_init(&desc->txd, chan); in pdc_alloc_desc()
474 dev_dbg(chan2dev(&pd_chan->chan), "desc %p not ACKed\n", desc); in pdc_desc_get()
477 dev_dbg(chan2dev(&pd_chan->chan), "scanned %d descriptors\n", i); in pdc_desc_get()
480 ret = pdc_alloc_desc(&pd_chan->chan, GFP_ATOMIC); in pdc_desc_get()
486 dev_err(chan2dev(&pd_chan->chan), in pdc_desc_get()
505 static int pd_alloc_chan_resources(struct dma_chan *chan) in pd_alloc_chan_resources() argument
507 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_alloc_chan_resources()
513 dev_dbg(chan2dev(chan), "DMA channel not idle ?\n"); in pd_alloc_chan_resources()
521 desc = pdc_alloc_desc(chan, GFP_KERNEL); in pd_alloc_chan_resources()
524 dev_warn(chan2dev(chan), in pd_alloc_chan_resources()
535 dma_cookie_init(chan); in pd_alloc_chan_resources()
538 pdc_enable_irq(chan, 1); in pd_alloc_chan_resources()
543 static void pd_free_chan_resources(struct dma_chan *chan) in pd_free_chan_resources() argument
545 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_free_chan_resources()
546 struct pch_dma *pd = to_pd(chan->device); in pd_free_chan_resources()
562 pdc_enable_irq(chan, 0); in pd_free_chan_resources()
565 static enum dma_status pd_tx_status(struct dma_chan *chan, dma_cookie_t cookie, in pd_tx_status() argument
568 return dma_cookie_status(chan, cookie, txstate); in pd_tx_status()
571 static void pd_issue_pending(struct dma_chan *chan) in pd_issue_pending() argument
573 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_issue_pending()
582 static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan, in pd_prep_slave_sg() argument
587 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_prep_slave_sg()
588 struct pch_dma_slave *pd_slave = chan->private; in pd_prep_slave_sg()
597 dev_info(chan2dev(chan), "prep_slave_sg: length is zero!\n"); in pd_prep_slave_sg()
609 pdc_set_dir(chan); in pd_prep_slave_sg()
663 dev_err(chan2dev(chan), "failed to get desc or wrong parameters\n"); in pd_prep_slave_sg()
668 static int pd_device_terminate_all(struct dma_chan *chan) in pd_device_terminate_all() argument
670 struct pch_dma_chan *pd_chan = to_pd_chan(chan); in pd_device_terminate_all()
676 pdc_set_mode(&pd_chan->chan, DMA_CTL0_DISABLE); in pd_device_terminate_all()
695 dev_err(chan2dev(&pd_chan->chan), in pdc_tasklet()
758 struct dma_chan *chan, *_c; in pch_dma_save_regs() local
766 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_save_regs()
767 pd_chan = to_pd_chan(chan); in pch_dma_save_regs()
781 struct dma_chan *chan, *_c; in pch_dma_restore_regs() local
789 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_restore_regs()
790 pd_chan = to_pd_chan(chan); in pch_dma_restore_regs()
906 pd_chan->chan.device = &pd->dma; in pch_dma_probe()
907 dma_cookie_init(&pd_chan->chan); in pch_dma_probe()
919 list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels); in pch_dma_probe()
960 struct dma_chan *chan, *_c; in pch_dma_remove() local
967 list_for_each_entry_safe(chan, _c, &pd->dma.channels, in pch_dma_remove()
969 pd_chan = to_pd_chan(chan); in pch_dma_remove()