td_chan           103 drivers/dma/timb_dma.c static struct timb_dma *tdchantotd(struct timb_dma_chan *td_chan)
td_chan           105 drivers/dma/timb_dma.c 	int id = td_chan->chan.chan_id;
td_chan           106 drivers/dma/timb_dma.c 	return (struct timb_dma *)((u8 *)td_chan -
td_chan           111 drivers/dma/timb_dma.c static void __td_enable_chan_irq(struct timb_dma_chan *td_chan)
td_chan           113 drivers/dma/timb_dma.c 	int id = td_chan->chan.chan_id;
td_chan           114 drivers/dma/timb_dma.c 	struct timb_dma *td = tdchantotd(td_chan);
td_chan           120 drivers/dma/timb_dma.c 	dev_dbg(chan2dev(&td_chan->chan), "Enabling irq: %d, IER: 0x%x\n", id,
td_chan           126 drivers/dma/timb_dma.c static bool __td_dma_done_ack(struct timb_dma_chan *td_chan)
td_chan           128 drivers/dma/timb_dma.c 	int id = td_chan->chan.chan_id;
td_chan           129 drivers/dma/timb_dma.c 	struct timb_dma *td = (struct timb_dma *)((u8 *)td_chan -
td_chan           134 drivers/dma/timb_dma.c 	dev_dbg(chan2dev(&td_chan->chan), "Checking irq: %d, td: %p\n", id, td);
td_chan           145 drivers/dma/timb_dma.c static int td_fill_desc(struct timb_dma_chan *td_chan, u8 *dma_desc,
td_chan           149 drivers/dma/timb_dma.c 		dev_err(chan2dev(&td_chan->chan), "Too big sg element\n");
td_chan           155 drivers/dma/timb_dma.c 		dev_err(chan2dev(&td_chan->chan), "Incorrect length: %d\n",
td_chan           160 drivers/dma/timb_dma.c 	dev_dbg(chan2dev(&td_chan->chan), "desc: %p, addr: 0x%llx\n",
td_chan           178 drivers/dma/timb_dma.c static void __td_start_dma(struct timb_dma_chan *td_chan)
td_chan           182 drivers/dma/timb_dma.c 	if (td_chan->ongoing) {
td_chan           183 drivers/dma/timb_dma.c 		dev_err(chan2dev(&td_chan->chan),
td_chan           188 drivers/dma/timb_dma.c 	td_desc = list_entry(td_chan->active_list.next, struct timb_dma_desc,
td_chan           191 drivers/dma/timb_dma.c 	dev_dbg(chan2dev(&td_chan->chan),
td_chan           193 drivers/dma/timb_dma.c 		td_chan, td_chan->chan.chan_id, td_chan->membase);
td_chan           195 drivers/dma/timb_dma.c 	if (td_chan->direction == DMA_DEV_TO_MEM) {
td_chan           198 drivers/dma/timb_dma.c 		iowrite32(0, td_chan->membase + TIMBDMA_OFFS_RX_DHAR);
td_chan           199 drivers/dma/timb_dma.c 		iowrite32(td_desc->txd.phys, td_chan->membase +
td_chan           202 drivers/dma/timb_dma.c 		iowrite32(td_chan->bytes_per_line, td_chan->membase +
td_chan           205 drivers/dma/timb_dma.c 		iowrite32(TIMBDMA_RX_EN, td_chan->membase + TIMBDMA_OFFS_RX_ER);
td_chan           208 drivers/dma/timb_dma.c 		iowrite32(0, td_chan->membase + TIMBDMA_OFFS_TX_DHAR);
td_chan           209 drivers/dma/timb_dma.c 		iowrite32(td_desc->txd.phys, td_chan->membase +
td_chan           213 drivers/dma/timb_dma.c 	td_chan->ongoing = true;
td_chan           216 drivers/dma/timb_dma.c 		__td_enable_chan_irq(td_chan);
td_chan           219 drivers/dma/timb_dma.c static void __td_finish(struct timb_dma_chan *td_chan)
td_chan           226 drivers/dma/timb_dma.c 	if (list_empty(&td_chan->active_list))
td_chan           229 drivers/dma/timb_dma.c 	td_desc = list_entry(td_chan->active_list.next, struct timb_dma_desc,
td_chan           233 drivers/dma/timb_dma.c 	dev_dbg(chan2dev(&td_chan->chan), "descriptor %u complete\n",
td_chan           237 drivers/dma/timb_dma.c 	if (td_chan->direction == DMA_DEV_TO_MEM)
td_chan           238 drivers/dma/timb_dma.c 		iowrite32(0, td_chan->membase + TIMBDMA_OFFS_RX_ER);
td_chan           244 drivers/dma/timb_dma.c 	td_chan->ongoing = false;
td_chan           248 drivers/dma/timb_dma.c 	list_move(&td_desc->desc_node, &td_chan->free_list);
td_chan           264 drivers/dma/timb_dma.c 		struct timb_dma_chan *td_chan = td->channels + i;
td_chan           265 drivers/dma/timb_dma.c 		if (td_chan->ongoing) {
td_chan           267 drivers/dma/timb_dma.c 				list_entry(td_chan->active_list.next,
td_chan           277 drivers/dma/timb_dma.c static void __td_start_next(struct timb_dma_chan *td_chan)
td_chan           281 drivers/dma/timb_dma.c 	BUG_ON(list_empty(&td_chan->queue));
td_chan           282 drivers/dma/timb_dma.c 	BUG_ON(td_chan->ongoing);
td_chan           284 drivers/dma/timb_dma.c 	td_desc = list_entry(td_chan->queue.next, struct timb_dma_desc,
td_chan           287 drivers/dma/timb_dma.c 	dev_dbg(chan2dev(&td_chan->chan), "%s: started %u\n",
td_chan           290 drivers/dma/timb_dma.c 	list_move(&td_desc->desc_node, &td_chan->active_list);
td_chan           291 drivers/dma/timb_dma.c 	__td_start_dma(td_chan);
td_chan           298 drivers/dma/timb_dma.c 	struct timb_dma_chan *td_chan = container_of(txd->chan,
td_chan           302 drivers/dma/timb_dma.c 	spin_lock_bh(&td_chan->lock);
td_chan           305 drivers/dma/timb_dma.c 	if (list_empty(&td_chan->active_list)) {
td_chan           308 drivers/dma/timb_dma.c 		list_add_tail(&td_desc->desc_node, &td_chan->active_list);
td_chan           309 drivers/dma/timb_dma.c 		__td_start_dma(td_chan);
td_chan           314 drivers/dma/timb_dma.c 		list_add_tail(&td_desc->desc_node, &td_chan->queue);
td_chan           317 drivers/dma/timb_dma.c 	spin_unlock_bh(&td_chan->lock);
td_chan           322 drivers/dma/timb_dma.c static struct timb_dma_desc *td_alloc_init_desc(struct timb_dma_chan *td_chan)
td_chan           324 drivers/dma/timb_dma.c 	struct dma_chan *chan = &td_chan->chan;
td_chan           332 drivers/dma/timb_dma.c 	td_desc->desc_list_len = td_chan->desc_elems * TIMB_DMA_DESC_SIZE;
td_chan           370 drivers/dma/timb_dma.c static void td_desc_put(struct timb_dma_chan *td_chan,
td_chan           373 drivers/dma/timb_dma.c 	dev_dbg(chan2dev(&td_chan->chan), "Putting desc: %p\n", td_desc);
td_chan           375 drivers/dma/timb_dma.c 	spin_lock_bh(&td_chan->lock);
td_chan           376 drivers/dma/timb_dma.c 	list_add(&td_desc->desc_node, &td_chan->free_list);
td_chan           377 drivers/dma/timb_dma.c 	spin_unlock_bh(&td_chan->lock);
td_chan           380 drivers/dma/timb_dma.c static struct timb_dma_desc *td_desc_get(struct timb_dma_chan *td_chan)
td_chan           385 drivers/dma/timb_dma.c 	spin_lock_bh(&td_chan->lock);
td_chan           386 drivers/dma/timb_dma.c 	list_for_each_entry_safe(td_desc, _td_desc, &td_chan->free_list,
td_chan           393 drivers/dma/timb_dma.c 		dev_dbg(chan2dev(&td_chan->chan), "desc %p not ACKed\n",
td_chan           396 drivers/dma/timb_dma.c 	spin_unlock_bh(&td_chan->lock);
td_chan           403 drivers/dma/timb_dma.c 	struct timb_dma_chan *td_chan =
td_chan           409 drivers/dma/timb_dma.c 	BUG_ON(!list_empty(&td_chan->free_list));
td_chan           410 drivers/dma/timb_dma.c 	for (i = 0; i < td_chan->descs; i++) {
td_chan           411 drivers/dma/timb_dma.c 		struct timb_dma_desc *td_desc = td_alloc_init_desc(td_chan);
td_chan           422 drivers/dma/timb_dma.c 		td_desc_put(td_chan, td_desc);
td_chan           425 drivers/dma/timb_dma.c 	spin_lock_bh(&td_chan->lock);
td_chan           427 drivers/dma/timb_dma.c 	spin_unlock_bh(&td_chan->lock);
td_chan           434 drivers/dma/timb_dma.c 	struct timb_dma_chan *td_chan =
td_chan           442 drivers/dma/timb_dma.c 	BUG_ON(!list_empty(&td_chan->active_list));
td_chan           443 drivers/dma/timb_dma.c 	BUG_ON(!list_empty(&td_chan->queue));
td_chan           445 drivers/dma/timb_dma.c 	spin_lock_bh(&td_chan->lock);
td_chan           446 drivers/dma/timb_dma.c 	list_splice_init(&td_chan->free_list, &list);
td_chan           447 drivers/dma/timb_dma.c 	spin_unlock_bh(&td_chan->lock);
td_chan           472 drivers/dma/timb_dma.c 	struct timb_dma_chan *td_chan =
td_chan           476 drivers/dma/timb_dma.c 	spin_lock_bh(&td_chan->lock);
td_chan           478 drivers/dma/timb_dma.c 	if (!list_empty(&td_chan->active_list))
td_chan           480 drivers/dma/timb_dma.c 		if (__td_dma_done_ack(td_chan))
td_chan           481 drivers/dma/timb_dma.c 			__td_finish(td_chan);
td_chan           483 drivers/dma/timb_dma.c 	if (list_empty(&td_chan->active_list) && !list_empty(&td_chan->queue))
td_chan           484 drivers/dma/timb_dma.c 		__td_start_next(td_chan);
td_chan           486 drivers/dma/timb_dma.c 	spin_unlock_bh(&td_chan->lock);
td_chan           494 drivers/dma/timb_dma.c 	struct timb_dma_chan *td_chan =
td_chan           507 drivers/dma/timb_dma.c 	if (td_chan->direction != direction) {
td_chan           513 drivers/dma/timb_dma.c 	td_desc = td_desc_get(td_chan);
td_chan           528 drivers/dma/timb_dma.c 		err = td_fill_desc(td_chan, td_desc->desc_list + desc_usage, sg,
td_chan           533 drivers/dma/timb_dma.c 			td_desc_put(td_chan, td_desc);
td_chan           547 drivers/dma/timb_dma.c 	struct timb_dma_chan *td_chan =
td_chan           554 drivers/dma/timb_dma.c 	spin_lock_bh(&td_chan->lock);
td_chan           555 drivers/dma/timb_dma.c 	list_for_each_entry_safe(td_desc, _td_desc, &td_chan->queue,
td_chan           557 drivers/dma/timb_dma.c 		list_move(&td_desc->desc_node, &td_chan->free_list);
td_chan           560 drivers/dma/timb_dma.c 	__td_finish(td_chan);
td_chan           561 drivers/dma/timb_dma.c 	spin_unlock_bh(&td_chan->lock);
td_chan           582 drivers/dma/timb_dma.c 			struct timb_dma_chan *td_chan = td->channels + i;
td_chan           583 drivers/dma/timb_dma.c 			spin_lock(&td_chan->lock);
td_chan           584 drivers/dma/timb_dma.c 			__td_finish(td_chan);
td_chan           585 drivers/dma/timb_dma.c 			if (!list_empty(&td_chan->queue))
td_chan           586 drivers/dma/timb_dma.c 				__td_start_next(td_chan);
td_chan           587 drivers/dma/timb_dma.c 			spin_unlock(&td_chan->lock);
td_chan           684 drivers/dma/timb_dma.c 		struct timb_dma_chan *td_chan = &td->channels[i];
td_chan           695 drivers/dma/timb_dma.c 		td_chan->chan.device = &td->dma;
td_chan           696 drivers/dma/timb_dma.c 		dma_cookie_init(&td_chan->chan);
td_chan           697 drivers/dma/timb_dma.c 		spin_lock_init(&td_chan->lock);
td_chan           698 drivers/dma/timb_dma.c 		INIT_LIST_HEAD(&td_chan->active_list);
td_chan           699 drivers/dma/timb_dma.c 		INIT_LIST_HEAD(&td_chan->queue);
td_chan           700 drivers/dma/timb_dma.c 		INIT_LIST_HEAD(&td_chan->free_list);
td_chan           702 drivers/dma/timb_dma.c 		td_chan->descs = pchan->descriptors;
td_chan           703 drivers/dma/timb_dma.c 		td_chan->desc_elems = pchan->descriptor_elements;
td_chan           704 drivers/dma/timb_dma.c 		td_chan->bytes_per_line = pchan->bytes_per_line;
td_chan           705 drivers/dma/timb_dma.c 		td_chan->direction = pchan->rx ? DMA_DEV_TO_MEM :
td_chan           708 drivers/dma/timb_dma.c 		td_chan->membase = td->membase +
td_chan           713 drivers/dma/timb_dma.c 			i, td_chan->membase);
td_chan           715 drivers/dma/timb_dma.c 		list_add_tail(&td_chan->chan.device_node, &td->dma.channels);