Lines Matching refs:txd

401 	struct pl08x_txd *txd = to_pl08x_txd(&vd->tx);  in pl08x_start_next_txd()  local
404 list_del(&txd->vd.node); in pl08x_start_next_txd()
406 plchan->at = txd; in pl08x_start_next_txd()
412 pl08x_write_lli(pl08x, phychan, &txd->llis_va[0], txd->ccfg); in pl08x_start_next_txd()
534 struct pl08x_txd *txd; in pl08x_getbytes_chan() local
540 txd = plchan->at; in pl08x_getbytes_chan()
542 if (!ch || !txd) in pl08x_getbytes_chan()
562 llis_va = txd->llis_va; in pl08x_getbytes_chan()
563 llis_bus = txd->llis_bus; in pl08x_getbytes_chan()
806 struct pl08x_txd *txd; member
850 u32 *llis_va = bd->txd->llis_va + offset; in pl08x_fill_lli_for_desc()
851 dma_addr_t llis_bus = bd->txd->llis_bus; in pl08x_fill_lli_for_desc()
929 struct pl08x_txd *txd) in pl08x_fill_llis_for_desc() argument
939 txd->llis_va = dma_pool_alloc(pl08x->pool, GFP_NOWAIT, &txd->llis_bus); in pl08x_fill_llis_for_desc()
940 if (!txd->llis_va) { in pl08x_fill_llis_for_desc()
945 bd.txd = txd; in pl08x_fill_llis_for_desc()
947 cctl = txd->cctl; in pl08x_fill_llis_for_desc()
959 list_for_each_entry(dsg, &txd->dsg_list, node) { in pl08x_fill_llis_for_desc()
961 cctl = txd->cctl; in pl08x_fill_llis_for_desc()
1006 u32 fc = (txd->ccfg & PL080_CONFIG_FLOW_CONTROL_MASK) >> in pl08x_fill_llis_for_desc()
1138 llis_va = txd->llis_va; in pl08x_fill_llis_for_desc()
1141 if (txd->cyclic) { in pl08x_fill_llis_for_desc()
1143 last_lli[PL080_LLI_LLI] = txd->llis_bus | bd.lli_bus; in pl08x_fill_llis_for_desc()
1157 struct pl08x_txd *txd) in pl08x_free_txd() argument
1161 if (txd->llis_va) in pl08x_free_txd()
1162 dma_pool_free(pl08x->pool, txd->llis_va, txd->llis_bus); in pl08x_free_txd()
1164 list_for_each_entry_safe(dsg, _dsg, &txd->dsg_list, node) { in pl08x_free_txd()
1169 kfree(txd); in pl08x_free_txd()
1174 struct pl08x_txd *txd = to_pl08x_txd(&vd->tx); in pl08x_desc_free() local
1178 if (!txd->done) in pl08x_desc_free()
1181 pl08x_free_txd(plchan->host, txd); in pl08x_desc_free()
1244 struct pl08x_txd *txd = to_pl08x_txd(&vd->tx); in pl08x_dma_tx_status() local
1247 list_for_each_entry(dsg, &txd->dsg_list, node) in pl08x_dma_tx_status()
1407 struct pl08x_txd *txd = kzalloc(sizeof(*txd), GFP_NOWAIT); in pl08x_get_txd() local
1409 if (txd) { in pl08x_get_txd()
1410 INIT_LIST_HEAD(&txd->dsg_list); in pl08x_get_txd()
1413 txd->ccfg = PL080_CONFIG_ERR_IRQ_MASK | in pl08x_get_txd()
1416 return txd; in pl08x_get_txd()
1428 struct pl08x_txd *txd; in pl08x_prep_dma_memcpy() local
1432 txd = pl08x_get_txd(plchan); in pl08x_prep_dma_memcpy()
1433 if (!txd) { in pl08x_prep_dma_memcpy()
1441 pl08x_free_txd(pl08x, txd); in pl08x_prep_dma_memcpy()
1446 list_add_tail(&dsg->node, &txd->dsg_list); in pl08x_prep_dma_memcpy()
1453 txd->ccfg |= PL080_FLOW_MEM2MEM << PL080_CONFIG_FLOW_CONTROL_SHIFT; in pl08x_prep_dma_memcpy()
1454 txd->cctl = pl08x->pd->memcpy_channel.cctl_memcpy & in pl08x_prep_dma_memcpy()
1458 txd->cctl |= PL080_CONTROL_SRC_INCR | PL080_CONTROL_DST_INCR; in pl08x_prep_dma_memcpy()
1461 txd->cctl |= pl08x_select_bus(pl08x->mem_buses, in pl08x_prep_dma_memcpy()
1464 ret = pl08x_fill_llis_for_desc(plchan->host, txd); in pl08x_prep_dma_memcpy()
1466 pl08x_free_txd(pl08x, txd); in pl08x_prep_dma_memcpy()
1470 return vchan_tx_prep(&plchan->vc, &txd->vd, flags); in pl08x_prep_dma_memcpy()
1480 struct pl08x_txd *txd; in pl08x_init_txd() local
1486 txd = pl08x_get_txd(plchan); in pl08x_init_txd()
1487 if (!txd) { in pl08x_init_txd()
1512 pl08x_free_txd(pl08x, txd); in pl08x_init_txd()
1520 pl08x_free_txd(pl08x, txd); in pl08x_init_txd()
1526 txd->cctl = cctl | pl08x_select_bus(src_buses, dst_buses); in pl08x_init_txd()
1535 txd->ccfg |= tmp << PL080_CONFIG_FLOW_CONTROL_SHIFT; in pl08x_init_txd()
1539 pl08x_free_txd(pl08x, txd); in pl08x_init_txd()
1551 txd->ccfg |= plchan->signal << PL080_CONFIG_DST_SEL_SHIFT; in pl08x_init_txd()
1553 txd->ccfg |= plchan->signal << PL080_CONFIG_SRC_SEL_SHIFT; in pl08x_init_txd()
1555 return txd; in pl08x_init_txd()
1558 static int pl08x_tx_add_sg(struct pl08x_txd *txd, in pl08x_tx_add_sg() argument
1570 list_add_tail(&dsg->node, &txd->dsg_list); in pl08x_tx_add_sg()
1591 struct pl08x_txd *txd; in pl08x_prep_slave_sg() local
1599 txd = pl08x_init_txd(chan, direction, &slave_addr); in pl08x_prep_slave_sg()
1600 if (!txd) in pl08x_prep_slave_sg()
1604 ret = pl08x_tx_add_sg(txd, direction, slave_addr, in pl08x_prep_slave_sg()
1609 pl08x_free_txd(pl08x, txd); in pl08x_prep_slave_sg()
1616 ret = pl08x_fill_llis_for_desc(plchan->host, txd); in pl08x_prep_slave_sg()
1619 pl08x_free_txd(pl08x, txd); in pl08x_prep_slave_sg()
1623 return vchan_tx_prep(&plchan->vc, &txd->vd, flags); in pl08x_prep_slave_sg()
1633 struct pl08x_txd *txd; in pl08x_prep_dma_cyclic() local
1643 txd = pl08x_init_txd(chan, direction, &slave_addr); in pl08x_prep_dma_cyclic()
1644 if (!txd) in pl08x_prep_dma_cyclic()
1647 txd->cyclic = true; in pl08x_prep_dma_cyclic()
1648 txd->cctl |= PL080_CONTROL_TC_IRQ_EN; in pl08x_prep_dma_cyclic()
1650 ret = pl08x_tx_add_sg(txd, direction, slave_addr, in pl08x_prep_dma_cyclic()
1654 pl08x_free_txd(pl08x, txd); in pl08x_prep_dma_cyclic()
1659 ret = pl08x_fill_llis_for_desc(plchan->host, txd); in pl08x_prep_dma_cyclic()
1662 pl08x_free_txd(pl08x, txd); in pl08x_prep_dma_cyclic()
1666 return vchan_tx_prep(&plchan->vc, &txd->vd, flags); in pl08x_prep_dma_cyclic()