Lines Matching refs:dwc
78 static struct dw_desc *dwc_first_active(struct dw_dma_chan *dwc) in dwc_first_active() argument
80 return to_dw_desc(dwc->active_list.next); in dwc_first_active()
83 static struct dw_desc *dwc_desc_get(struct dw_dma_chan *dwc) in dwc_desc_get() argument
90 spin_lock_irqsave(&dwc->lock, flags); in dwc_desc_get()
91 list_for_each_entry_safe(desc, _desc, &dwc->free_list, desc_node) { in dwc_desc_get()
98 dev_dbg(chan2dev(&dwc->chan), "desc %p not ACKed\n", desc); in dwc_desc_get()
100 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_desc_get()
102 dev_vdbg(chan2dev(&dwc->chan), "scanned %u descriptors on freelist\n", i); in dwc_desc_get()
111 static void dwc_desc_put(struct dw_dma_chan *dwc, struct dw_desc *desc) in dwc_desc_put() argument
118 spin_lock_irqsave(&dwc->lock, flags); in dwc_desc_put()
120 dev_vdbg(chan2dev(&dwc->chan), in dwc_desc_put()
123 list_splice_init(&desc->tx_list, &dwc->free_list); in dwc_desc_put()
124 dev_vdbg(chan2dev(&dwc->chan), "moving desc %p to freelist\n", desc); in dwc_desc_put()
125 list_add(&desc->desc_node, &dwc->free_list); in dwc_desc_put()
126 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_desc_put()
130 static void dwc_initialize(struct dw_dma_chan *dwc) in dwc_initialize() argument
132 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_initialize()
134 u32 cfglo = DWC_CFGL_CH_PRIOR(dwc->priority); in dwc_initialize()
136 if (dwc->initialized == true) in dwc_initialize()
139 cfghi |= DWC_CFGH_DST_PER(dwc->dst_id); in dwc_initialize()
140 cfghi |= DWC_CFGH_SRC_PER(dwc->src_id); in dwc_initialize()
142 channel_writel(dwc, CFG_LO, cfglo); in dwc_initialize()
143 channel_writel(dwc, CFG_HI, cfghi); in dwc_initialize()
146 channel_set_bit(dw, MASK.XFER, dwc->mask); in dwc_initialize()
147 channel_set_bit(dw, MASK.ERROR, dwc->mask); in dwc_initialize()
149 dwc->initialized = true; in dwc_initialize()
169 static inline void dwc_dump_chan_regs(struct dw_dma_chan *dwc) in dwc_dump_chan_regs() argument
171 dev_err(chan2dev(&dwc->chan), in dwc_dump_chan_regs()
173 channel_readl(dwc, SAR), in dwc_dump_chan_regs()
174 channel_readl(dwc, DAR), in dwc_dump_chan_regs()
175 channel_readl(dwc, LLP), in dwc_dump_chan_regs()
176 channel_readl(dwc, CTL_HI), in dwc_dump_chan_regs()
177 channel_readl(dwc, CTL_LO)); in dwc_dump_chan_regs()
180 static inline void dwc_chan_disable(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_chan_disable() argument
182 channel_clear_bit(dw, CH_EN, dwc->mask); in dwc_chan_disable()
183 while (dma_readl(dw, CH_EN) & dwc->mask) in dwc_chan_disable()
190 static inline void dwc_do_single_block(struct dw_dma_chan *dwc, in dwc_do_single_block() argument
193 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_do_single_block()
202 channel_writel(dwc, SAR, desc->lli.sar); in dwc_do_single_block()
203 channel_writel(dwc, DAR, desc->lli.dar); in dwc_do_single_block()
204 channel_writel(dwc, CTL_LO, ctllo); in dwc_do_single_block()
205 channel_writel(dwc, CTL_HI, desc->lli.ctlhi); in dwc_do_single_block()
206 channel_set_bit(dw, CH_EN, dwc->mask); in dwc_do_single_block()
209 dwc->tx_node_active = dwc->tx_node_active->next; in dwc_do_single_block()
213 static void dwc_dostart(struct dw_dma_chan *dwc, struct dw_desc *first) in dwc_dostart() argument
215 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_dostart()
219 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_dostart()
220 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
223 dwc_dump_chan_regs(dwc); in dwc_dostart()
229 if (dwc->nollp) { in dwc_dostart()
231 &dwc->flags); in dwc_dostart()
233 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
238 dwc_initialize(dwc); in dwc_dostart()
240 dwc->residue = first->total_len; in dwc_dostart()
241 dwc->tx_node_active = &first->tx_list; in dwc_dostart()
244 dwc_do_single_block(dwc, first); in dwc_dostart()
249 dwc_initialize(dwc); in dwc_dostart()
251 channel_writel(dwc, LLP, first->txd.phys); in dwc_dostart()
252 channel_writel(dwc, CTL_LO, in dwc_dostart()
254 channel_writel(dwc, CTL_HI, 0); in dwc_dostart()
255 channel_set_bit(dw, CH_EN, dwc->mask); in dwc_dostart()
258 static void dwc_dostart_first_queued(struct dw_dma_chan *dwc) in dwc_dostart_first_queued() argument
262 if (list_empty(&dwc->queue)) in dwc_dostart_first_queued()
265 list_move(dwc->queue.next, &dwc->active_list); in dwc_dostart_first_queued()
266 desc = dwc_first_active(dwc); in dwc_dostart_first_queued()
267 dev_vdbg(chan2dev(&dwc->chan), "%s: started %u\n", __func__, desc->txd.cookie); in dwc_dostart_first_queued()
268 dwc_dostart(dwc, desc); in dwc_dostart_first_queued()
274 dwc_descriptor_complete(struct dw_dma_chan *dwc, struct dw_desc *desc, in dwc_descriptor_complete() argument
283 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); in dwc_descriptor_complete()
285 spin_lock_irqsave(&dwc->lock, flags); in dwc_descriptor_complete()
297 list_splice_init(&desc->tx_list, &dwc->free_list); in dwc_descriptor_complete()
298 list_move(&desc->desc_node, &dwc->free_list); in dwc_descriptor_complete()
301 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_descriptor_complete()
307 static void dwc_complete_all(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_complete_all() argument
313 spin_lock_irqsave(&dwc->lock, flags); in dwc_complete_all()
314 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_complete_all()
315 dev_err(chan2dev(&dwc->chan), in dwc_complete_all()
319 dwc_chan_disable(dw, dwc); in dwc_complete_all()
326 list_splice_init(&dwc->active_list, &list); in dwc_complete_all()
327 dwc_dostart_first_queued(dwc); in dwc_complete_all()
329 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_complete_all()
332 dwc_descriptor_complete(dwc, desc, true); in dwc_complete_all()
336 static inline u32 dwc_get_sent(struct dw_dma_chan *dwc) in dwc_get_sent() argument
338 u32 ctlhi = channel_readl(dwc, CTL_HI); in dwc_get_sent()
339 u32 ctllo = channel_readl(dwc, CTL_LO); in dwc_get_sent()
344 static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_scan_descriptors() argument
352 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
353 llp = channel_readl(dwc, LLP); in dwc_scan_descriptors()
356 if (status_xfer & dwc->mask) { in dwc_scan_descriptors()
358 dma_writel(dw, CLEAR.XFER, dwc->mask); in dwc_scan_descriptors()
360 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { in dwc_scan_descriptors()
361 struct list_head *head, *active = dwc->tx_node_active; in dwc_scan_descriptors()
367 desc = dwc_first_active(dwc); in dwc_scan_descriptors()
375 dwc->residue -= desc->len; in dwc_scan_descriptors()
380 dwc_do_single_block(dwc, child); in dwc_scan_descriptors()
382 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
387 clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); in dwc_scan_descriptors()
390 dwc->residue = 0; in dwc_scan_descriptors()
392 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
394 dwc_complete_all(dw, dwc); in dwc_scan_descriptors()
398 if (list_empty(&dwc->active_list)) { in dwc_scan_descriptors()
399 dwc->residue = 0; in dwc_scan_descriptors()
400 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
404 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { in dwc_scan_descriptors()
405 dev_vdbg(chan2dev(&dwc->chan), "%s: soft LLP mode\n", __func__); in dwc_scan_descriptors()
406 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
410 dev_vdbg(chan2dev(&dwc->chan), "%s: llp=%pad\n", __func__, &llp); in dwc_scan_descriptors()
412 list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) { in dwc_scan_descriptors()
414 dwc->residue = desc->total_len; in dwc_scan_descriptors()
418 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
425 dwc->residue -= dwc_get_sent(dwc); in dwc_scan_descriptors()
426 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
430 dwc->residue -= desc->len; in dwc_scan_descriptors()
434 dwc->residue -= dwc_get_sent(dwc); in dwc_scan_descriptors()
435 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
438 dwc->residue -= child->len; in dwc_scan_descriptors()
445 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
446 dwc_descriptor_complete(dwc, desc, true); in dwc_scan_descriptors()
447 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
450 dev_err(chan2dev(&dwc->chan), in dwc_scan_descriptors()
454 dwc_chan_disable(dw, dwc); in dwc_scan_descriptors()
456 dwc_dostart_first_queued(dwc); in dwc_scan_descriptors()
457 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
460 static inline void dwc_dump_lli(struct dw_dma_chan *dwc, struct dw_lli *lli) in dwc_dump_lli() argument
462 dev_crit(chan2dev(&dwc->chan), " desc: s0x%x d0x%x l0x%x c0x%x:%x\n", in dwc_dump_lli()
466 static void dwc_handle_error(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_handle_error() argument
472 dwc_scan_descriptors(dw, dwc); in dwc_handle_error()
474 spin_lock_irqsave(&dwc->lock, flags); in dwc_handle_error()
481 bad_desc = dwc_first_active(dwc); in dwc_handle_error()
483 list_move(dwc->queue.next, dwc->active_list.prev); in dwc_handle_error()
486 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dwc_handle_error()
487 if (!list_empty(&dwc->active_list)) in dwc_handle_error()
488 dwc_dostart(dwc, dwc_first_active(dwc)); in dwc_handle_error()
497 dev_WARN(chan2dev(&dwc->chan), "Bad descriptor submitted for DMA!\n" in dwc_handle_error()
499 dwc_dump_lli(dwc, &bad_desc->lli); in dwc_handle_error()
501 dwc_dump_lli(dwc, &child->lli); in dwc_handle_error()
503 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_handle_error()
506 dwc_descriptor_complete(dwc, bad_desc, true); in dwc_handle_error()
513 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_get_src_addr() local
514 return channel_readl(dwc, SAR); in dw_dma_get_src_addr()
520 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_get_dst_addr() local
521 return channel_readl(dwc, DAR); in dw_dma_get_dst_addr()
526 static void dwc_handle_cyclic(struct dw_dma *dw, struct dw_dma_chan *dwc, in dwc_handle_cyclic() argument
531 if (status_block & dwc->mask) { in dwc_handle_cyclic()
535 dev_vdbg(chan2dev(&dwc->chan), "new cyclic period llp 0x%08x\n", in dwc_handle_cyclic()
536 channel_readl(dwc, LLP)); in dwc_handle_cyclic()
537 dma_writel(dw, CLEAR.BLOCK, dwc->mask); in dwc_handle_cyclic()
539 callback = dwc->cdesc->period_callback; in dwc_handle_cyclic()
540 callback_param = dwc->cdesc->period_callback_param; in dwc_handle_cyclic()
550 if (unlikely(status_err & dwc->mask) || in dwc_handle_cyclic()
551 unlikely(status_xfer & dwc->mask)) { in dwc_handle_cyclic()
554 dev_err(chan2dev(&dwc->chan), in dwc_handle_cyclic()
558 spin_lock_irqsave(&dwc->lock, flags); in dwc_handle_cyclic()
560 dwc_dump_chan_regs(dwc); in dwc_handle_cyclic()
562 dwc_chan_disable(dw, dwc); in dwc_handle_cyclic()
565 channel_writel(dwc, LLP, 0); in dwc_handle_cyclic()
566 channel_writel(dwc, CTL_LO, 0); in dwc_handle_cyclic()
567 channel_writel(dwc, CTL_HI, 0); in dwc_handle_cyclic()
569 dma_writel(dw, CLEAR.BLOCK, dwc->mask); in dwc_handle_cyclic()
570 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dwc_handle_cyclic()
571 dma_writel(dw, CLEAR.XFER, dwc->mask); in dwc_handle_cyclic()
573 for (i = 0; i < dwc->cdesc->periods; i++) in dwc_handle_cyclic()
574 dwc_dump_lli(dwc, &dwc->cdesc->desc[i]->lli); in dwc_handle_cyclic()
576 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_handle_cyclic()
580 channel_set_bit(dw, MASK.BLOCK, dwc->mask); in dwc_handle_cyclic()
588 struct dw_dma_chan *dwc; in dw_dma_tasklet() local
601 dwc = &dw->chan[i]; in dw_dma_tasklet()
602 if (test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) in dw_dma_tasklet()
603 dwc_handle_cyclic(dw, dwc, status_block, status_err, in dw_dma_tasklet()
606 dwc_handle_error(dw, dwc); in dw_dma_tasklet()
608 dwc_scan_descriptors(dw, dwc); in dw_dma_tasklet()
659 struct dw_dma_chan *dwc = to_dw_dma_chan(tx->chan); in dwc_tx_submit() local
663 spin_lock_irqsave(&dwc->lock, flags); in dwc_tx_submit()
673 list_add_tail(&desc->desc_node, &dwc->queue); in dwc_tx_submit()
675 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_tx_submit()
684 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_dma_memcpy() local
705 dwc->direction = DMA_MEM_TO_MEM; in dwc_prep_dma_memcpy()
707 data_width = min_t(unsigned int, dw->data_width[dwc->src_master], in dwc_prep_dma_memcpy()
708 dw->data_width[dwc->dst_master]); in dwc_prep_dma_memcpy()
723 dwc->block_size); in dwc_prep_dma_memcpy()
725 desc = dwc_desc_get(dwc); in dwc_prep_dma_memcpy()
756 dwc_desc_put(dwc, first); in dwc_prep_dma_memcpy()
765 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_slave_sg() local
767 struct dma_slave_config *sconfig = &dwc->dma_sconfig; in dwc_prep_slave_sg()
784 dwc->direction = direction; in dwc_prep_slave_sg()
800 data_width = dw->data_width[dwc->src_master]; in dwc_prep_slave_sg()
813 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
820 if ((len >> mem_width) > dwc->block_size) { in dwc_prep_slave_sg()
821 dlen = dwc->block_size << mem_width; in dwc_prep_slave_sg()
857 data_width = dw->data_width[dwc->dst_master]; in dwc_prep_slave_sg()
870 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
877 if ((len >> reg_width) > dwc->block_size) { in dwc_prep_slave_sg()
878 dlen = dwc->block_size << reg_width; in dwc_prep_slave_sg()
918 dwc_desc_put(dwc, first); in dwc_prep_slave_sg()
924 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_filter() local
932 dwc->src_id = dws->src_id; in dw_dma_filter()
933 dwc->dst_id = dws->dst_id; in dw_dma_filter()
935 dwc->src_master = dws->src_master; in dw_dma_filter()
936 dwc->dst_master = dws->dst_master; in dw_dma_filter()
960 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_config() local
966 memcpy(&dwc->dma_sconfig, sconfig, sizeof(*sconfig)); in dwc_config()
967 dwc->direction = sconfig->direction; in dwc_config()
969 convert_burst(&dwc->dma_sconfig.src_maxburst); in dwc_config()
970 convert_burst(&dwc->dma_sconfig.dst_maxburst); in dwc_config()
977 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_pause() local
982 spin_lock_irqsave(&dwc->lock, flags); in dwc_pause()
984 cfglo = channel_readl(dwc, CFG_LO); in dwc_pause()
985 channel_writel(dwc, CFG_LO, cfglo | DWC_CFGL_CH_SUSP); in dwc_pause()
986 while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--) in dwc_pause()
989 dwc->paused = true; in dwc_pause()
991 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_pause()
996 static inline void dwc_chan_resume(struct dw_dma_chan *dwc) in dwc_chan_resume() argument
998 u32 cfglo = channel_readl(dwc, CFG_LO); in dwc_chan_resume()
1000 channel_writel(dwc, CFG_LO, cfglo & ~DWC_CFGL_CH_SUSP); in dwc_chan_resume()
1002 dwc->paused = false; in dwc_chan_resume()
1007 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_resume() local
1010 if (!dwc->paused) in dwc_resume()
1013 spin_lock_irqsave(&dwc->lock, flags); in dwc_resume()
1015 dwc_chan_resume(dwc); in dwc_resume()
1017 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_resume()
1024 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_terminate_all() local
1030 spin_lock_irqsave(&dwc->lock, flags); in dwc_terminate_all()
1032 clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); in dwc_terminate_all()
1034 dwc_chan_disable(dw, dwc); in dwc_terminate_all()
1036 dwc_chan_resume(dwc); in dwc_terminate_all()
1039 list_splice_init(&dwc->queue, &list); in dwc_terminate_all()
1040 list_splice_init(&dwc->active_list, &list); in dwc_terminate_all()
1042 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_terminate_all()
1046 dwc_descriptor_complete(dwc, desc, false); in dwc_terminate_all()
1051 static inline u32 dwc_get_residue(struct dw_dma_chan *dwc) in dwc_get_residue() argument
1056 spin_lock_irqsave(&dwc->lock, flags); in dwc_get_residue()
1058 residue = dwc->residue; in dwc_get_residue()
1059 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags) && residue) in dwc_get_residue()
1060 residue -= dwc_get_sent(dwc); in dwc_get_residue()
1062 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_get_residue()
1071 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_tx_status() local
1078 dwc_scan_descriptors(to_dw_dma(chan->device), dwc); in dwc_tx_status()
1082 dma_set_residue(txstate, dwc_get_residue(dwc)); in dwc_tx_status()
1084 if (dwc->paused && ret == DMA_IN_PROGRESS) in dwc_tx_status()
1092 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_issue_pending() local
1095 spin_lock_irqsave(&dwc->lock, flags); in dwc_issue_pending()
1096 if (list_empty(&dwc->active_list)) in dwc_issue_pending()
1097 dwc_dostart_first_queued(dwc); in dwc_issue_pending()
1098 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_issue_pending()
1129 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_alloc_chan_resources() local
1138 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_alloc_chan_resources()
1162 dw->in_use |= dwc->mask; in dwc_alloc_chan_resources()
1164 spin_lock_irqsave(&dwc->lock, flags); in dwc_alloc_chan_resources()
1165 i = dwc->descs_allocated; in dwc_alloc_chan_resources()
1166 while (dwc->descs_allocated < NR_DESCS_PER_CHANNEL) { in dwc_alloc_chan_resources()
1169 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_alloc_chan_resources()
1183 dwc_desc_put(dwc, desc); in dwc_alloc_chan_resources()
1185 spin_lock_irqsave(&dwc->lock, flags); in dwc_alloc_chan_resources()
1186 i = ++dwc->descs_allocated; in dwc_alloc_chan_resources()
1189 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_alloc_chan_resources()
1203 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_free_chan_resources() local
1210 dwc->descs_allocated); in dwc_free_chan_resources()
1213 BUG_ON(!list_empty(&dwc->active_list)); in dwc_free_chan_resources()
1214 BUG_ON(!list_empty(&dwc->queue)); in dwc_free_chan_resources()
1215 BUG_ON(dma_readl(to_dw_dma(chan->device), CH_EN) & dwc->mask); in dwc_free_chan_resources()
1217 spin_lock_irqsave(&dwc->lock, flags); in dwc_free_chan_resources()
1218 list_splice_init(&dwc->free_list, &list); in dwc_free_chan_resources()
1219 dwc->descs_allocated = 0; in dwc_free_chan_resources()
1222 dwc->src_id = 0; in dwc_free_chan_resources()
1223 dwc->dst_id = 0; in dwc_free_chan_resources()
1225 dwc->src_master = 0; in dwc_free_chan_resources()
1226 dwc->dst_master = 0; in dwc_free_chan_resources()
1228 dwc->initialized = false; in dwc_free_chan_resources()
1231 channel_clear_bit(dw, MASK.XFER, dwc->mask); in dwc_free_chan_resources()
1232 channel_clear_bit(dw, MASK.BLOCK, dwc->mask); in dwc_free_chan_resources()
1233 channel_clear_bit(dw, MASK.ERROR, dwc->mask); in dwc_free_chan_resources()
1235 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_free_chan_resources()
1238 dw->in_use &= ~dwc->mask; in dwc_free_chan_resources()
1261 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_cyclic_start() local
1265 if (!test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) { in dw_dma_cyclic_start()
1266 dev_err(chan2dev(&dwc->chan), "missing prep for cyclic DMA\n"); in dw_dma_cyclic_start()
1270 spin_lock_irqsave(&dwc->lock, flags); in dw_dma_cyclic_start()
1273 channel_set_bit(dw, MASK.BLOCK, dwc->mask); in dw_dma_cyclic_start()
1275 dwc_dostart(dwc, dwc->cdesc->desc[0]); in dw_dma_cyclic_start()
1277 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_start()
1291 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_cyclic_stop() local
1292 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dw_dma_cyclic_stop()
1295 spin_lock_irqsave(&dwc->lock, flags); in dw_dma_cyclic_stop()
1297 dwc_chan_disable(dw, dwc); in dw_dma_cyclic_stop()
1299 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_stop()
1318 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_cyclic_prep() local
1319 struct dma_slave_config *sconfig = &dwc->dma_sconfig; in dw_dma_cyclic_prep()
1330 spin_lock_irqsave(&dwc->lock, flags); in dw_dma_cyclic_prep()
1331 if (dwc->nollp) { in dw_dma_cyclic_prep()
1332 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_prep()
1333 dev_dbg(chan2dev(&dwc->chan), in dw_dma_cyclic_prep()
1338 if (!list_empty(&dwc->queue) || !list_empty(&dwc->active_list)) { in dw_dma_cyclic_prep()
1339 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_prep()
1340 dev_dbg(chan2dev(&dwc->chan), in dw_dma_cyclic_prep()
1345 was_cyclic = test_and_set_bit(DW_DMA_IS_CYCLIC, &dwc->flags); in dw_dma_cyclic_prep()
1346 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_prep()
1348 dev_dbg(chan2dev(&dwc->chan), in dw_dma_cyclic_prep()
1358 dwc->direction = direction; in dw_dma_cyclic_prep()
1368 if (period_len > (dwc->block_size << reg_width)) in dw_dma_cyclic_prep()
1389 desc = dwc_desc_get(dwc); in dw_dma_cyclic_prep()
1440 dev_dbg(chan2dev(&dwc->chan), in dw_dma_cyclic_prep()
1445 dwc->cdesc = cdesc; in dw_dma_cyclic_prep()
1451 dwc_desc_put(dwc, cdesc->desc[i]); in dw_dma_cyclic_prep()
1455 clear_bit(DW_DMA_IS_CYCLIC, &dwc->flags); in dw_dma_cyclic_prep()
1466 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_cyclic_free() local
1467 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dw_dma_cyclic_free()
1468 struct dw_cyclic_desc *cdesc = dwc->cdesc; in dw_dma_cyclic_free()
1472 dev_dbg(chan2dev(&dwc->chan), "%s\n", __func__); in dw_dma_cyclic_free()
1477 spin_lock_irqsave(&dwc->lock, flags); in dw_dma_cyclic_free()
1479 dwc_chan_disable(dw, dwc); in dw_dma_cyclic_free()
1481 dma_writel(dw, CLEAR.BLOCK, dwc->mask); in dw_dma_cyclic_free()
1482 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dw_dma_cyclic_free()
1483 dma_writel(dw, CLEAR.XFER, dwc->mask); in dw_dma_cyclic_free()
1485 spin_unlock_irqrestore(&dwc->lock, flags); in dw_dma_cyclic_free()
1488 dwc_desc_put(dwc, cdesc->desc[i]); in dw_dma_cyclic_free()
1493 clear_bit(DW_DMA_IS_CYCLIC, &dwc->flags); in dw_dma_cyclic_free()
1588 struct dw_dma_chan *dwc = &dw->chan[i]; in dw_dma_probe() local
1590 dwc->chan.device = &dw->dma; in dw_dma_probe()
1591 dma_cookie_init(&dwc->chan); in dw_dma_probe()
1593 list_add_tail(&dwc->chan.device_node, in dw_dma_probe()
1596 list_add(&dwc->chan.device_node, &dw->dma.channels); in dw_dma_probe()
1600 dwc->priority = pdata->nr_channels - i - 1; in dw_dma_probe()
1602 dwc->priority = i; in dw_dma_probe()
1604 dwc->ch_regs = &__dw_regs(dw)->CHAN[i]; in dw_dma_probe()
1605 spin_lock_init(&dwc->lock); in dw_dma_probe()
1606 dwc->mask = 1 << i; in dw_dma_probe()
1608 INIT_LIST_HEAD(&dwc->active_list); in dw_dma_probe()
1609 INIT_LIST_HEAD(&dwc->queue); in dw_dma_probe()
1610 INIT_LIST_HEAD(&dwc->free_list); in dw_dma_probe()
1612 channel_clear_bit(dw, CH_EN, dwc->mask); in dw_dma_probe()
1614 dwc->direction = DMA_TRANS_NONE; in dw_dma_probe()
1632 dwc->block_size = in dw_dma_probe()
1634 dwc->nollp = in dw_dma_probe()
1637 dwc->block_size = pdata->block_size; in dw_dma_probe()
1640 channel_writel(dwc, LLP, 0xfffffffc); in dw_dma_probe()
1641 dwc->nollp = in dw_dma_probe()
1642 (channel_readl(dwc, LLP) & 0xfffffffc) == 0; in dw_dma_probe()
1643 channel_writel(dwc, LLP, 0); in dw_dma_probe()
1705 struct dw_dma_chan *dwc, *_dwc; in dw_dma_remove() local
1715 list_for_each_entry_safe(dwc, _dwc, &dw->dma.channels, in dw_dma_remove()
1717 list_del(&dwc->chan.device_node); in dw_dma_remove()
1718 channel_clear_bit(dw, CH_EN, dwc->mask); in dw_dma_remove()