pd_chan           164 drivers/dma/pch_dma.c struct pch_dma_desc *pdc_first_active(struct pch_dma_chan *pd_chan)
pd_chan           166 drivers/dma/pch_dma.c 	return list_first_entry(&pd_chan->active_list,
pd_chan           171 drivers/dma/pch_dma.c struct pch_dma_desc *pdc_first_queued(struct pch_dma_chan *pd_chan)
pd_chan           173 drivers/dma/pch_dma.c 	return list_first_entry(&pd_chan->queue,
pd_chan           203 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan = to_pd_chan(chan);
pd_chan           217 drivers/dma/pch_dma.c 		if (pd_chan->dir == DMA_MEM_TO_DEV)
pd_chan           235 drivers/dma/pch_dma.c 		if (pd_chan->dir == DMA_MEM_TO_DEV)
pd_chan           283 drivers/dma/pch_dma.c static u32 pdc_get_status0(struct pch_dma_chan *pd_chan)
pd_chan           285 drivers/dma/pch_dma.c 	struct pch_dma *pd = to_pd(pd_chan->chan.device);
pd_chan           290 drivers/dma/pch_dma.c 			DMA_STATUS_BITS_PER_CH * pd_chan->chan.chan_id));
pd_chan           293 drivers/dma/pch_dma.c static u32 pdc_get_status2(struct pch_dma_chan *pd_chan)
pd_chan           295 drivers/dma/pch_dma.c 	struct pch_dma *pd = to_pd(pd_chan->chan.device);
pd_chan           300 drivers/dma/pch_dma.c 			DMA_STATUS_BITS_PER_CH * (pd_chan->chan.chan_id - 8)));
pd_chan           303 drivers/dma/pch_dma.c static bool pdc_is_idle(struct pch_dma_chan *pd_chan)
pd_chan           307 drivers/dma/pch_dma.c 	if (pd_chan->chan.chan_id < 8)
pd_chan           308 drivers/dma/pch_dma.c 		sts = pdc_get_status0(pd_chan);
pd_chan           310 drivers/dma/pch_dma.c 		sts = pdc_get_status2(pd_chan);
pd_chan           319 drivers/dma/pch_dma.c static void pdc_dostart(struct pch_dma_chan *pd_chan, struct pch_dma_desc* desc)
pd_chan           321 drivers/dma/pch_dma.c 	if (!pdc_is_idle(pd_chan)) {
pd_chan           322 drivers/dma/pch_dma.c 		dev_err(chan2dev(&pd_chan->chan),
pd_chan           327 drivers/dma/pch_dma.c 	dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> dev_addr: %x\n",
pd_chan           328 drivers/dma/pch_dma.c 		pd_chan->chan.chan_id, desc->regs.dev_addr);
pd_chan           329 drivers/dma/pch_dma.c 	dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> mem_addr: %x\n",
pd_chan           330 drivers/dma/pch_dma.c 		pd_chan->chan.chan_id, desc->regs.mem_addr);
pd_chan           331 drivers/dma/pch_dma.c 	dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> size: %x\n",
pd_chan           332 drivers/dma/pch_dma.c 		pd_chan->chan.chan_id, desc->regs.size);
pd_chan           333 drivers/dma/pch_dma.c 	dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> next: %x\n",
pd_chan           334 drivers/dma/pch_dma.c 		pd_chan->chan.chan_id, desc->regs.next);
pd_chan           337 drivers/dma/pch_dma.c 		channel_writel(pd_chan, DEV_ADDR, desc->regs.dev_addr);
pd_chan           338 drivers/dma/pch_dma.c 		channel_writel(pd_chan, MEM_ADDR, desc->regs.mem_addr);
pd_chan           339 drivers/dma/pch_dma.c 		channel_writel(pd_chan, SIZE, desc->regs.size);
pd_chan           340 drivers/dma/pch_dma.c 		channel_writel(pd_chan, NEXT, desc->regs.next);
pd_chan           341 drivers/dma/pch_dma.c 		pdc_set_mode(&pd_chan->chan, DMA_CTL0_ONESHOT);
pd_chan           343 drivers/dma/pch_dma.c 		channel_writel(pd_chan, NEXT, desc->txd.phys);
pd_chan           344 drivers/dma/pch_dma.c 		pdc_set_mode(&pd_chan->chan, DMA_CTL0_SG);
pd_chan           348 drivers/dma/pch_dma.c static void pdc_chain_complete(struct pch_dma_chan *pd_chan,
pd_chan           355 drivers/dma/pch_dma.c 	list_splice_init(&desc->tx_list, &pd_chan->free_list);
pd_chan           356 drivers/dma/pch_dma.c 	list_move(&desc->desc_node, &pd_chan->free_list);
pd_chan           361 drivers/dma/pch_dma.c static void pdc_complete_all(struct pch_dma_chan *pd_chan)
pd_chan           366 drivers/dma/pch_dma.c 	BUG_ON(!pdc_is_idle(pd_chan));
pd_chan           368 drivers/dma/pch_dma.c 	if (!list_empty(&pd_chan->queue))
pd_chan           369 drivers/dma/pch_dma.c 		pdc_dostart(pd_chan, pdc_first_queued(pd_chan));
pd_chan           371 drivers/dma/pch_dma.c 	list_splice_init(&pd_chan->active_list, &list);
pd_chan           372 drivers/dma/pch_dma.c 	list_splice_init(&pd_chan->queue, &pd_chan->active_list);
pd_chan           375 drivers/dma/pch_dma.c 		pdc_chain_complete(pd_chan, desc);
pd_chan           378 drivers/dma/pch_dma.c static void pdc_handle_error(struct pch_dma_chan *pd_chan)
pd_chan           382 drivers/dma/pch_dma.c 	bad_desc = pdc_first_active(pd_chan);
pd_chan           385 drivers/dma/pch_dma.c 	list_splice_init(&pd_chan->queue, pd_chan->active_list.prev);
pd_chan           387 drivers/dma/pch_dma.c 	if (!list_empty(&pd_chan->active_list))
pd_chan           388 drivers/dma/pch_dma.c 		pdc_dostart(pd_chan, pdc_first_active(pd_chan));
pd_chan           390 drivers/dma/pch_dma.c 	dev_crit(chan2dev(&pd_chan->chan), "Bad descriptor submitted\n");
pd_chan           391 drivers/dma/pch_dma.c 	dev_crit(chan2dev(&pd_chan->chan), "descriptor cookie: %d\n",
pd_chan           394 drivers/dma/pch_dma.c 	pdc_chain_complete(pd_chan, bad_desc);
pd_chan           397 drivers/dma/pch_dma.c static void pdc_advance_work(struct pch_dma_chan *pd_chan)
pd_chan           399 drivers/dma/pch_dma.c 	if (list_empty(&pd_chan->active_list) ||
pd_chan           400 drivers/dma/pch_dma.c 		list_is_singular(&pd_chan->active_list)) {
pd_chan           401 drivers/dma/pch_dma.c 		pdc_complete_all(pd_chan);
pd_chan           403 drivers/dma/pch_dma.c 		pdc_chain_complete(pd_chan, pdc_first_active(pd_chan));
pd_chan           404 drivers/dma/pch_dma.c 		pdc_dostart(pd_chan, pdc_first_active(pd_chan));
pd_chan           411 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan = to_pd_chan(txd->chan);
pd_chan           413 drivers/dma/pch_dma.c 	spin_lock(&pd_chan->lock);
pd_chan           415 drivers/dma/pch_dma.c 	if (list_empty(&pd_chan->active_list)) {
pd_chan           416 drivers/dma/pch_dma.c 		list_add_tail(&desc->desc_node, &pd_chan->active_list);
pd_chan           417 drivers/dma/pch_dma.c 		pdc_dostart(pd_chan, desc);
pd_chan           419 drivers/dma/pch_dma.c 		list_add_tail(&desc->desc_node, &pd_chan->queue);
pd_chan           422 drivers/dma/pch_dma.c 	spin_unlock(&pd_chan->lock);
pd_chan           444 drivers/dma/pch_dma.c static struct pch_dma_desc *pdc_desc_get(struct pch_dma_chan *pd_chan)
pd_chan           450 drivers/dma/pch_dma.c 	spin_lock(&pd_chan->lock);
pd_chan           451 drivers/dma/pch_dma.c 	list_for_each_entry_safe(desc, _d, &pd_chan->free_list, desc_node) {
pd_chan           458 drivers/dma/pch_dma.c 		dev_dbg(chan2dev(&pd_chan->chan), "desc %p not ACKed\n", desc);
pd_chan           460 drivers/dma/pch_dma.c 	spin_unlock(&pd_chan->lock);
pd_chan           461 drivers/dma/pch_dma.c 	dev_dbg(chan2dev(&pd_chan->chan), "scanned %d descriptors\n", i);
pd_chan           464 drivers/dma/pch_dma.c 		ret = pdc_alloc_desc(&pd_chan->chan, GFP_ATOMIC);
pd_chan           466 drivers/dma/pch_dma.c 			spin_lock(&pd_chan->lock);
pd_chan           467 drivers/dma/pch_dma.c 			pd_chan->descs_allocated++;
pd_chan           468 drivers/dma/pch_dma.c 			spin_unlock(&pd_chan->lock);
pd_chan           470 drivers/dma/pch_dma.c 			dev_err(chan2dev(&pd_chan->chan),
pd_chan           478 drivers/dma/pch_dma.c static void pdc_desc_put(struct pch_dma_chan *pd_chan,
pd_chan           482 drivers/dma/pch_dma.c 		spin_lock(&pd_chan->lock);
pd_chan           483 drivers/dma/pch_dma.c 		list_splice_init(&desc->tx_list, &pd_chan->free_list);
pd_chan           484 drivers/dma/pch_dma.c 		list_add(&desc->desc_node, &pd_chan->free_list);
pd_chan           485 drivers/dma/pch_dma.c 		spin_unlock(&pd_chan->lock);
pd_chan           491 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan = to_pd_chan(chan);
pd_chan           496 drivers/dma/pch_dma.c 	if (!pdc_is_idle(pd_chan)) {
pd_chan           501 drivers/dma/pch_dma.c 	if (!list_empty(&pd_chan->free_list))
pd_chan           502 drivers/dma/pch_dma.c 		return pd_chan->descs_allocated;
pd_chan           516 drivers/dma/pch_dma.c 	spin_lock_irq(&pd_chan->lock);
pd_chan           517 drivers/dma/pch_dma.c 	list_splice(&tmp_list, &pd_chan->free_list);
pd_chan           518 drivers/dma/pch_dma.c 	pd_chan->descs_allocated = i;
pd_chan           520 drivers/dma/pch_dma.c 	spin_unlock_irq(&pd_chan->lock);
pd_chan           524 drivers/dma/pch_dma.c 	return pd_chan->descs_allocated;
pd_chan           529 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan = to_pd_chan(chan);
pd_chan           534 drivers/dma/pch_dma.c 	BUG_ON(!pdc_is_idle(pd_chan));
pd_chan           535 drivers/dma/pch_dma.c 	BUG_ON(!list_empty(&pd_chan->active_list));
pd_chan           536 drivers/dma/pch_dma.c 	BUG_ON(!list_empty(&pd_chan->queue));
pd_chan           538 drivers/dma/pch_dma.c 	spin_lock_irq(&pd_chan->lock);
pd_chan           539 drivers/dma/pch_dma.c 	list_splice_init(&pd_chan->free_list, &tmp_list);
pd_chan           540 drivers/dma/pch_dma.c 	pd_chan->descs_allocated = 0;
pd_chan           541 drivers/dma/pch_dma.c 	spin_unlock_irq(&pd_chan->lock);
pd_chan           557 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan = to_pd_chan(chan);
pd_chan           559 drivers/dma/pch_dma.c 	if (pdc_is_idle(pd_chan)) {
pd_chan           560 drivers/dma/pch_dma.c 		spin_lock(&pd_chan->lock);
pd_chan           561 drivers/dma/pch_dma.c 		pdc_advance_work(pd_chan);
pd_chan           562 drivers/dma/pch_dma.c 		spin_unlock(&pd_chan->lock);
pd_chan           571 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan = to_pd_chan(chan);
pd_chan           592 drivers/dma/pch_dma.c 	pd_chan->dir = direction;
pd_chan           596 drivers/dma/pch_dma.c 		desc = pdc_desc_get(pd_chan);
pd_chan           648 drivers/dma/pch_dma.c 	pdc_desc_put(pd_chan, first);
pd_chan           654 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan = to_pd_chan(chan);
pd_chan           658 drivers/dma/pch_dma.c 	spin_lock_irq(&pd_chan->lock);
pd_chan           660 drivers/dma/pch_dma.c 	pdc_set_mode(&pd_chan->chan, DMA_CTL0_DISABLE);
pd_chan           662 drivers/dma/pch_dma.c 	list_splice_init(&pd_chan->active_list, &list);
pd_chan           663 drivers/dma/pch_dma.c 	list_splice_init(&pd_chan->queue, &list);
pd_chan           666 drivers/dma/pch_dma.c 		pdc_chain_complete(pd_chan, desc);
pd_chan           668 drivers/dma/pch_dma.c 	spin_unlock_irq(&pd_chan->lock);
pd_chan           675 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan = (struct pch_dma_chan *)data;
pd_chan           678 drivers/dma/pch_dma.c 	if (!pdc_is_idle(pd_chan)) {
pd_chan           679 drivers/dma/pch_dma.c 		dev_err(chan2dev(&pd_chan->chan),
pd_chan           684 drivers/dma/pch_dma.c 	spin_lock_irqsave(&pd_chan->lock, flags);
pd_chan           685 drivers/dma/pch_dma.c 	if (test_and_clear_bit(0, &pd_chan->err_status))
pd_chan           686 drivers/dma/pch_dma.c 		pdc_handle_error(pd_chan);
pd_chan           688 drivers/dma/pch_dma.c 		pdc_advance_work(pd_chan);
pd_chan           689 drivers/dma/pch_dma.c 	spin_unlock_irqrestore(&pd_chan->lock, flags);
pd_chan           695 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan;
pd_chan           708 drivers/dma/pch_dma.c 		pd_chan = &pd->channels[i];
pd_chan           713 drivers/dma/pch_dma.c 					set_bit(0, &pd_chan->err_status);
pd_chan           715 drivers/dma/pch_dma.c 				tasklet_schedule(&pd_chan->tasklet);
pd_chan           721 drivers/dma/pch_dma.c 					set_bit(0, &pd_chan->err_status);
pd_chan           723 drivers/dma/pch_dma.c 				tasklet_schedule(&pd_chan->tasklet);
pd_chan           741 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan;
pd_chan           751 drivers/dma/pch_dma.c 		pd_chan = to_pd_chan(chan);
pd_chan           753 drivers/dma/pch_dma.c 		pd->ch_regs[i].dev_addr = channel_readl(pd_chan, DEV_ADDR);
pd_chan           754 drivers/dma/pch_dma.c 		pd->ch_regs[i].mem_addr = channel_readl(pd_chan, MEM_ADDR);
pd_chan           755 drivers/dma/pch_dma.c 		pd->ch_regs[i].size = channel_readl(pd_chan, SIZE);
pd_chan           756 drivers/dma/pch_dma.c 		pd->ch_regs[i].next = channel_readl(pd_chan, NEXT);
pd_chan           764 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan;
pd_chan           774 drivers/dma/pch_dma.c 		pd_chan = to_pd_chan(chan);
pd_chan           776 drivers/dma/pch_dma.c 		channel_writel(pd_chan, DEV_ADDR, pd->ch_regs[i].dev_addr);
pd_chan           777 drivers/dma/pch_dma.c 		channel_writel(pd_chan, MEM_ADDR, pd->ch_regs[i].mem_addr);
pd_chan           778 drivers/dma/pch_dma.c 		channel_writel(pd_chan, SIZE, pd->ch_regs[i].size);
pd_chan           779 drivers/dma/pch_dma.c 		channel_writel(pd_chan, NEXT, pd->ch_regs[i].next);
pd_chan           888 drivers/dma/pch_dma.c 		struct pch_dma_chan *pd_chan = &pd->channels[i];
pd_chan           890 drivers/dma/pch_dma.c 		pd_chan->chan.device = &pd->dma;
pd_chan           891 drivers/dma/pch_dma.c 		dma_cookie_init(&pd_chan->chan);
pd_chan           893 drivers/dma/pch_dma.c 		pd_chan->membase = &regs->desc[i];
pd_chan           895 drivers/dma/pch_dma.c 		spin_lock_init(&pd_chan->lock);
pd_chan           897 drivers/dma/pch_dma.c 		INIT_LIST_HEAD(&pd_chan->active_list);
pd_chan           898 drivers/dma/pch_dma.c 		INIT_LIST_HEAD(&pd_chan->queue);
pd_chan           899 drivers/dma/pch_dma.c 		INIT_LIST_HEAD(&pd_chan->free_list);
pd_chan           901 drivers/dma/pch_dma.c 		tasklet_init(&pd_chan->tasklet, pdc_tasklet,
pd_chan           902 drivers/dma/pch_dma.c 			     (unsigned long)pd_chan);
pd_chan           903 drivers/dma/pch_dma.c 		list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels);
pd_chan           943 drivers/dma/pch_dma.c 	struct pch_dma_chan *pd_chan;
pd_chan           953 drivers/dma/pch_dma.c 			pd_chan = to_pd_chan(chan);
pd_chan           955 drivers/dma/pch_dma.c 			tasklet_kill(&pd_chan->tasklet);