Lines Matching refs:hsuc

40 static inline void hsu_chan_disable(struct hsu_dma_chan *hsuc)  in hsu_chan_disable()  argument
42 hsu_chan_writel(hsuc, HSU_CH_CR, 0); in hsu_chan_disable()
45 static inline void hsu_chan_enable(struct hsu_dma_chan *hsuc) in hsu_chan_enable() argument
49 if (hsuc->direction == DMA_MEM_TO_DEV) in hsu_chan_enable()
51 else if (hsuc->direction == DMA_DEV_TO_MEM) in hsu_chan_enable()
54 hsu_chan_writel(hsuc, HSU_CH_CR, cr); in hsu_chan_enable()
57 static void hsu_dma_chan_start(struct hsu_dma_chan *hsuc) in hsu_dma_chan_start() argument
59 struct dma_slave_config *config = &hsuc->config; in hsu_dma_chan_start()
60 struct hsu_dma_desc *desc = hsuc->desc; in hsu_dma_chan_start()
65 if (hsuc->direction == DMA_MEM_TO_DEV) { in hsu_dma_chan_start()
68 } else if (hsuc->direction == DMA_DEV_TO_MEM) { in hsu_dma_chan_start()
73 hsu_chan_disable(hsuc); in hsu_dma_chan_start()
75 hsu_chan_writel(hsuc, HSU_CH_DCR, 0); in hsu_dma_chan_start()
76 hsu_chan_writel(hsuc, HSU_CH_BSR, bsr); in hsu_dma_chan_start()
77 hsu_chan_writel(hsuc, HSU_CH_MTSR, mtsr); in hsu_dma_chan_start()
82 hsu_chan_writel(hsuc, HSU_CH_DxSAR(i), desc->sg[i].addr); in hsu_dma_chan_start()
83 hsu_chan_writel(hsuc, HSU_CH_DxTSR(i), desc->sg[i].len); in hsu_dma_chan_start()
95 hsu_chan_writel(hsuc, HSU_CH_DCR, dcr); in hsu_dma_chan_start()
97 hsu_chan_enable(hsuc); in hsu_dma_chan_start()
100 static void hsu_dma_stop_channel(struct hsu_dma_chan *hsuc) in hsu_dma_stop_channel() argument
104 spin_lock_irqsave(&hsuc->lock, flags); in hsu_dma_stop_channel()
105 hsu_chan_disable(hsuc); in hsu_dma_stop_channel()
106 hsu_chan_writel(hsuc, HSU_CH_DCR, 0); in hsu_dma_stop_channel()
107 spin_unlock_irqrestore(&hsuc->lock, flags); in hsu_dma_stop_channel()
110 static void hsu_dma_start_channel(struct hsu_dma_chan *hsuc) in hsu_dma_start_channel() argument
114 spin_lock_irqsave(&hsuc->lock, flags); in hsu_dma_start_channel()
115 hsu_dma_chan_start(hsuc); in hsu_dma_start_channel()
116 spin_unlock_irqrestore(&hsuc->lock, flags); in hsu_dma_start_channel()
119 static void hsu_dma_start_transfer(struct hsu_dma_chan *hsuc) in hsu_dma_start_transfer() argument
124 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
126 hsuc->desc = NULL; in hsu_dma_start_transfer()
131 hsuc->desc = to_hsu_dma_desc(vdesc); in hsu_dma_start_transfer()
134 hsu_dma_start_channel(hsuc); in hsu_dma_start_transfer()
137 static u32 hsu_dma_chan_get_sr(struct hsu_dma_chan *hsuc) in hsu_dma_chan_get_sr() argument
142 spin_lock_irqsave(&hsuc->lock, flags); in hsu_dma_chan_get_sr()
143 sr = hsu_chan_readl(hsuc, HSU_CH_SR); in hsu_dma_chan_get_sr()
144 spin_unlock_irqrestore(&hsuc->lock, flags); in hsu_dma_chan_get_sr()
151 struct hsu_dma_chan *hsuc; in hsu_dma_irq() local
160 hsuc = &chip->hsu->chan[nr]; in hsu_dma_irq()
166 sr = hsu_dma_chan_get_sr(hsuc); in hsu_dma_irq()
171 if (hsuc->direction == DMA_DEV_TO_MEM && (sr & HSU_CH_SR_DESCTO_ANY)) in hsu_dma_irq()
178 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_irq()
179 desc = hsuc->desc; in hsu_dma_irq()
184 hsu_dma_start_channel(hsuc); in hsu_dma_irq()
188 hsu_dma_start_transfer(hsuc); in hsu_dma_irq()
191 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_irq()
227 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); in hsu_dma_prep_slave_sg() local
246 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
251 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); in hsu_dma_issue_pending() local
254 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
255 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
256 hsu_dma_start_transfer(hsuc); in hsu_dma_issue_pending()
257 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
271 static size_t hsu_dma_active_desc_size(struct hsu_dma_chan *hsuc) in hsu_dma_active_desc_size() argument
273 struct hsu_dma_desc *desc = hsuc->desc; in hsu_dma_active_desc_size()
278 spin_lock_irqsave(&hsuc->lock, flags); in hsu_dma_active_desc_size()
281 bytes += hsu_chan_readl(hsuc, HSU_CH_DxTSR(i)); in hsu_dma_active_desc_size()
283 spin_unlock_irqrestore(&hsuc->lock, flags); in hsu_dma_active_desc_size()
291 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); in hsu_dma_tx_status() local
301 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
302 vdesc = vchan_find_desc(&hsuc->vchan, cookie); in hsu_dma_tx_status()
303 if (hsuc->desc && cookie == hsuc->desc->vdesc.tx.cookie) { in hsu_dma_tx_status()
304 bytes = hsu_dma_active_desc_size(hsuc); in hsu_dma_tx_status()
306 status = hsuc->desc->status; in hsu_dma_tx_status()
311 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
319 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); in hsu_dma_slave_config() local
325 memcpy(&hsuc->config, config, sizeof(hsuc->config)); in hsu_dma_slave_config()
330 static void hsu_dma_chan_deactivate(struct hsu_dma_chan *hsuc) in hsu_dma_chan_deactivate() argument
334 spin_lock_irqsave(&hsuc->lock, flags); in hsu_dma_chan_deactivate()
335 hsu_chan_disable(hsuc); in hsu_dma_chan_deactivate()
336 spin_unlock_irqrestore(&hsuc->lock, flags); in hsu_dma_chan_deactivate()
339 static void hsu_dma_chan_activate(struct hsu_dma_chan *hsuc) in hsu_dma_chan_activate() argument
343 spin_lock_irqsave(&hsuc->lock, flags); in hsu_dma_chan_activate()
344 hsu_chan_enable(hsuc); in hsu_dma_chan_activate()
345 spin_unlock_irqrestore(&hsuc->lock, flags); in hsu_dma_chan_activate()
350 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); in hsu_dma_pause() local
353 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_pause()
354 if (hsuc->desc && hsuc->desc->status == DMA_IN_PROGRESS) { in hsu_dma_pause()
355 hsu_dma_chan_deactivate(hsuc); in hsu_dma_pause()
356 hsuc->desc->status = DMA_PAUSED; in hsu_dma_pause()
358 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_pause()
365 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); in hsu_dma_resume() local
368 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_resume()
369 if (hsuc->desc && hsuc->desc->status == DMA_PAUSED) { in hsu_dma_resume()
370 hsuc->desc->status = DMA_IN_PROGRESS; in hsu_dma_resume()
371 hsu_dma_chan_activate(hsuc); in hsu_dma_resume()
373 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_resume()
380 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); in hsu_dma_terminate_all() local
384 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_terminate_all()
386 hsu_dma_stop_channel(hsuc); in hsu_dma_terminate_all()
387 if (hsuc->desc) { in hsu_dma_terminate_all()
388 hsu_dma_desc_free(&hsuc->desc->vdesc); in hsu_dma_terminate_all()
389 hsuc->desc = NULL; in hsu_dma_terminate_all()
392 vchan_get_all_descriptors(&hsuc->vchan, &head); in hsu_dma_terminate_all()
393 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_terminate_all()
394 vchan_dma_desc_free_list(&hsuc->vchan, &head); in hsu_dma_terminate_all()
437 struct hsu_dma_chan *hsuc = &hsu->chan[i]; in hsu_dma_probe() local
439 hsuc->vchan.desc_free = hsu_dma_desc_free; in hsu_dma_probe()
440 vchan_init(&hsuc->vchan, &hsu->dma); in hsu_dma_probe()
442 hsuc->direction = (i & 0x1) ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV; in hsu_dma_probe()
443 hsuc->reg = addr + i * HSU_DMA_CHAN_LENGTH; in hsu_dma_probe()
445 spin_lock_init(&hsuc->lock); in hsu_dma_probe()
487 struct hsu_dma_chan *hsuc = &hsu->chan[i]; in hsu_dma_remove() local
489 tasklet_kill(&hsuc->vchan.task); in hsu_dma_remove()