Lines Matching refs:pch

1448 	struct dma_pl330_chan *pch;  in dma_pl330_rqcb()  local
1454 pch = desc->pchan; in dma_pl330_rqcb()
1457 if (!pch) in dma_pl330_rqcb()
1460 spin_lock_irqsave(&pch->lock, flags); in dma_pl330_rqcb()
1464 spin_unlock_irqrestore(&pch->lock, flags); in dma_pl330_rqcb()
1466 tasklet_schedule(&pch->task); in dma_pl330_rqcb()
1942 static inline void fill_queue(struct dma_pl330_chan *pch) in fill_queue() argument
1947 list_for_each_entry(desc, &pch->work_list, node) { in fill_queue()
1953 ret = pl330_submit_req(pch->thread, desc); in fill_queue()
1962 dev_err(pch->dmac->ddma.dev, "%s:%d Bad Desc(%d)\n", in fill_queue()
1964 tasklet_schedule(&pch->task); in fill_queue()
1971 struct dma_pl330_chan *pch = (struct dma_pl330_chan *)data; in pl330_tasklet() local
1976 spin_lock_irqsave(&pch->lock, flags); in pl330_tasklet()
1979 list_for_each_entry_safe(desc, _dt, &pch->work_list, node) in pl330_tasklet()
1981 if (!pch->cyclic) in pl330_tasklet()
1983 list_move_tail(&desc->node, &pch->completed_list); in pl330_tasklet()
1987 fill_queue(pch); in pl330_tasklet()
1989 if (list_empty(&pch->work_list)) { in pl330_tasklet()
1990 spin_lock(&pch->thread->dmac->lock); in pl330_tasklet()
1991 _stop(pch->thread); in pl330_tasklet()
1992 spin_unlock(&pch->thread->dmac->lock); in pl330_tasklet()
1996 spin_lock(&pch->thread->dmac->lock); in pl330_tasklet()
1997 _start(pch->thread); in pl330_tasklet()
1998 spin_unlock(&pch->thread->dmac->lock); in pl330_tasklet()
2001 while (!list_empty(&pch->completed_list)) { in pl330_tasklet()
2005 desc = list_first_entry(&pch->completed_list, in pl330_tasklet()
2011 if (pch->cyclic) { in pl330_tasklet()
2013 list_move_tail(&desc->node, &pch->work_list); in pl330_tasklet()
2015 spin_lock(&pch->thread->dmac->lock); in pl330_tasklet()
2016 _start(pch->thread); in pl330_tasklet()
2017 spin_unlock(&pch->thread->dmac->lock); in pl330_tasklet()
2022 list_move_tail(&desc->node, &pch->dmac->desc_pool); in pl330_tasklet()
2028 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tasklet()
2030 spin_lock_irqsave(&pch->lock, flags); in pl330_tasklet()
2033 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tasklet()
2037 pm_runtime_mark_last_busy(pch->dmac->ddma.dev); in pl330_tasklet()
2038 pm_runtime_put_autosuspend(pch->dmac->ddma.dev); in pl330_tasklet()
2076 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_alloc_chan_resources() local
2077 struct pl330_dmac *pl330 = pch->dmac; in pl330_alloc_chan_resources()
2080 spin_lock_irqsave(&pch->lock, flags); in pl330_alloc_chan_resources()
2083 pch->cyclic = false; in pl330_alloc_chan_resources()
2085 pch->thread = pl330_request_channel(pl330); in pl330_alloc_chan_resources()
2086 if (!pch->thread) { in pl330_alloc_chan_resources()
2087 spin_unlock_irqrestore(&pch->lock, flags); in pl330_alloc_chan_resources()
2091 tasklet_init(&pch->task, pl330_tasklet, (unsigned long) pch); in pl330_alloc_chan_resources()
2093 spin_unlock_irqrestore(&pch->lock, flags); in pl330_alloc_chan_resources()
2101 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_config() local
2105 pch->fifo_addr = slave_config->dst_addr; in pl330_config()
2107 pch->burst_sz = __ffs(slave_config->dst_addr_width); in pl330_config()
2109 pch->burst_len = slave_config->dst_maxburst; in pl330_config()
2112 pch->fifo_addr = slave_config->src_addr; in pl330_config()
2114 pch->burst_sz = __ffs(slave_config->src_addr_width); in pl330_config()
2116 pch->burst_len = slave_config->src_maxburst; in pl330_config()
2124 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_terminate_all() local
2127 struct pl330_dmac *pl330 = pch->dmac; in pl330_terminate_all()
2131 spin_lock_irqsave(&pch->lock, flags); in pl330_terminate_all()
2133 _stop(pch->thread); in pl330_terminate_all()
2136 pch->thread->req[0].desc = NULL; in pl330_terminate_all()
2137 pch->thread->req[1].desc = NULL; in pl330_terminate_all()
2138 pch->thread->req_running = -1; in pl330_terminate_all()
2141 list_for_each_entry(desc, &pch->submitted_list, node) { in pl330_terminate_all()
2146 list_for_each_entry(desc, &pch->work_list , node) { in pl330_terminate_all()
2151 list_splice_tail_init(&pch->submitted_list, &pl330->desc_pool); in pl330_terminate_all()
2152 list_splice_tail_init(&pch->work_list, &pl330->desc_pool); in pl330_terminate_all()
2153 list_splice_tail_init(&pch->completed_list, &pl330->desc_pool); in pl330_terminate_all()
2154 spin_unlock_irqrestore(&pch->lock, flags); in pl330_terminate_all()
2170 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_pause() local
2171 struct pl330_dmac *pl330 = pch->dmac; in pl330_pause()
2175 spin_lock_irqsave(&pch->lock, flags); in pl330_pause()
2178 _stop(pch->thread); in pl330_pause()
2181 spin_unlock_irqrestore(&pch->lock, flags); in pl330_pause()
2190 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_free_chan_resources() local
2193 tasklet_kill(&pch->task); in pl330_free_chan_resources()
2195 pm_runtime_get_sync(pch->dmac->ddma.dev); in pl330_free_chan_resources()
2196 spin_lock_irqsave(&pch->lock, flags); in pl330_free_chan_resources()
2198 pl330_release_channel(pch->thread); in pl330_free_chan_resources()
2199 pch->thread = NULL; in pl330_free_chan_resources()
2201 if (pch->cyclic) in pl330_free_chan_resources()
2202 list_splice_tail_init(&pch->work_list, &pch->dmac->desc_pool); in pl330_free_chan_resources()
2204 spin_unlock_irqrestore(&pch->lock, flags); in pl330_free_chan_resources()
2205 pm_runtime_mark_last_busy(pch->dmac->ddma.dev); in pl330_free_chan_resources()
2206 pm_runtime_put_autosuspend(pch->dmac->ddma.dev); in pl330_free_chan_resources()
2209 static int pl330_get_current_xferred_count(struct dma_pl330_chan *pch, in pl330_get_current_xferred_count() argument
2212 struct pl330_thread *thrd = pch->thread; in pl330_get_current_xferred_count()
2213 struct pl330_dmac *pl330 = pch->dmac; in pl330_get_current_xferred_count()
2226 pm_runtime_mark_last_busy(pch->dmac->ddma.dev); in pl330_get_current_xferred_count()
2238 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_tx_status() local
2249 spin_lock_irqsave(&pch->lock, flags); in pl330_tx_status()
2251 if (pch->thread->req_running != -1) in pl330_tx_status()
2252 running = pch->thread->req[pch->thread->req_running].desc; in pl330_tx_status()
2255 list_for_each_entry(desc, &pch->work_list, node) { in pl330_tx_status()
2260 pl330_get_current_xferred_count(pch, desc); in pl330_tx_status()
2281 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tx_status()
2291 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_issue_pending() local
2294 spin_lock_irqsave(&pch->lock, flags); in pl330_issue_pending()
2295 if (list_empty(&pch->work_list)) { in pl330_issue_pending()
2301 WARN_ON(list_empty(&pch->submitted_list)); in pl330_issue_pending()
2302 pm_runtime_get_sync(pch->dmac->ddma.dev); in pl330_issue_pending()
2304 list_splice_tail_init(&pch->submitted_list, &pch->work_list); in pl330_issue_pending()
2305 spin_unlock_irqrestore(&pch->lock, flags); in pl330_issue_pending()
2307 pl330_tasklet((unsigned long)pch); in pl330_issue_pending()
2318 struct dma_pl330_chan *pch = to_pchan(tx->chan); in pl330_tx_submit() local
2322 spin_lock_irqsave(&pch->lock, flags); in pl330_tx_submit()
2327 if (pch->cyclic) { in pl330_tx_submit()
2335 list_move_tail(&desc->node, &pch->submitted_list); in pl330_tx_submit()
2340 list_add_tail(&last->node, &pch->submitted_list); in pl330_tx_submit()
2341 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tx_submit()
2401 static struct dma_pl330_desc *pl330_get_desc(struct dma_pl330_chan *pch) in pl330_get_desc() argument
2403 struct pl330_dmac *pl330 = pch->dmac; in pl330_get_desc()
2404 u8 *peri_id = pch->chan.private; in pl330_get_desc()
2418 dev_err(pch->dmac->ddma.dev, in pl330_get_desc()
2425 desc->pchan = pch; in pl330_get_desc()
2429 desc->peri = peri_id ? pch->chan.chan_id : 0; in pl330_get_desc()
2430 desc->rqcfg.pcfg = &pch->dmac->pcfg; in pl330_get_desc()
2432 dma_async_tx_descriptor_init(&desc->txd, &pch->chan); in pl330_get_desc()
2446 __pl330_prep_dma_memcpy(struct dma_pl330_chan *pch, dma_addr_t dst, in __pl330_prep_dma_memcpy() argument
2449 struct dma_pl330_desc *desc = pl330_get_desc(pch); in __pl330_prep_dma_memcpy()
2452 dev_err(pch->dmac->ddma.dev, "%s:%d Unable to fetch desc\n", in __pl330_prep_dma_memcpy()
2475 struct dma_pl330_chan *pch = desc->pchan; in get_burst_len() local
2476 struct pl330_dmac *pl330 = pch->dmac; in get_burst_len()
2502 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_prep_dma_cyclic() local
2503 struct pl330_dmac *pl330 = pch->dmac; in pl330_prep_dma_cyclic()
2512 dev_err(pch->dmac->ddma.dev, "%s:%d Invalid dma direction\n", in pl330_prep_dma_cyclic()
2518 desc = pl330_get_desc(pch); in pl330_prep_dma_cyclic()
2520 dev_err(pch->dmac->ddma.dev, "%s:%d Unable to fetch desc\n", in pl330_prep_dma_cyclic()
2546 dst = pch->fifo_addr; in pl330_prep_dma_cyclic()
2551 src = pch->fifo_addr; in pl330_prep_dma_cyclic()
2559 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_dma_cyclic()
2575 pch->cyclic = true; in pl330_prep_dma_cyclic()
2586 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_prep_dma_memcpy() local
2587 struct pl330_dmac *pl330 = pch->dmac; in pl330_prep_dma_memcpy()
2590 if (unlikely(!pch || !len)) in pl330_prep_dma_memcpy()
2593 desc = __pl330_prep_dma_memcpy(pch, dst, src, len); in pl330_prep_dma_memcpy()
2659 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_prep_slave_sg() local
2664 if (unlikely(!pch || !sgl || !sg_len)) in pl330_prep_slave_sg()
2667 addr = pch->fifo_addr; in pl330_prep_slave_sg()
2673 desc = pl330_get_desc(pch); in pl330_prep_slave_sg()
2675 struct pl330_dmac *pl330 = pch->dmac; in pl330_prep_slave_sg()
2677 dev_err(pch->dmac->ddma.dev, in pl330_prep_slave_sg()
2702 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_slave_sg()
2774 struct dma_pl330_chan *pch, *_p; in pl330_probe() local
2842 pl330->peripherals = kzalloc(num_chan * sizeof(*pch), GFP_KERNEL); in pl330_probe()
2850 pch = &pl330->peripherals[i]; in pl330_probe()
2852 pch->chan.private = pdat ? &pdat->peri_id[i] : NULL; in pl330_probe()
2854 pch->chan.private = adev->dev.of_node; in pl330_probe()
2856 INIT_LIST_HEAD(&pch->submitted_list); in pl330_probe()
2857 INIT_LIST_HEAD(&pch->work_list); in pl330_probe()
2858 INIT_LIST_HEAD(&pch->completed_list); in pl330_probe()
2859 spin_lock_init(&pch->lock); in pl330_probe()
2860 pch->thread = NULL; in pl330_probe()
2861 pch->chan.device = pd; in pl330_probe()
2862 pch->dmac = pl330; in pl330_probe()
2865 list_add_tail(&pch->chan.device_node, &pd->channels); in pl330_probe()
2936 list_for_each_entry_safe(pch, _p, &pl330->ddma.channels, in pl330_probe()
2940 list_del(&pch->chan.device_node); in pl330_probe()
2943 if (pch->thread) { in pl330_probe()
2944 pl330_terminate_all(&pch->chan); in pl330_probe()
2945 pl330_free_chan_resources(&pch->chan); in pl330_probe()
2957 struct dma_pl330_chan *pch, *_p; in pl330_remove() local
2967 list_for_each_entry_safe(pch, _p, &pl330->ddma.channels, in pl330_remove()
2971 list_del(&pch->chan.device_node); in pl330_remove()
2974 if (pch->thread) { in pl330_remove()
2975 pl330_terminate_all(&pch->chan); in pl330_remove()
2976 pl330_free_chan_resources(&pch->chan); in pl330_remove()