tdma               60 drivers/crypto/marvell/tdma.c 	struct mv_cesa_tdma_desc *tdma;
tdma               62 drivers/crypto/marvell/tdma.c 	for (tdma = dreq->chain.first; tdma;) {
tdma               63 drivers/crypto/marvell/tdma.c 		struct mv_cesa_tdma_desc *old_tdma = tdma;
tdma               64 drivers/crypto/marvell/tdma.c 		u32 type = tdma->flags & CESA_TDMA_TYPE_MSK;
tdma               67 drivers/crypto/marvell/tdma.c 			dma_pool_free(cesa_dev->dma->op_pool, tdma->op,
tdma               68 drivers/crypto/marvell/tdma.c 				      le32_to_cpu(tdma->src));
tdma               70 drivers/crypto/marvell/tdma.c 		tdma = tdma->next;
tdma               82 drivers/crypto/marvell/tdma.c 	struct mv_cesa_tdma_desc *tdma;
tdma               84 drivers/crypto/marvell/tdma.c 	for (tdma = dreq->chain.first; tdma; tdma = tdma->next) {
tdma               85 drivers/crypto/marvell/tdma.c 		if (tdma->flags & CESA_TDMA_DST_IN_SRAM)
tdma               86 drivers/crypto/marvell/tdma.c 			tdma->dst = cpu_to_le32(tdma->dst + engine->sram_dma);
tdma               88 drivers/crypto/marvell/tdma.c 		if (tdma->flags & CESA_TDMA_SRC_IN_SRAM)
tdma               89 drivers/crypto/marvell/tdma.c 			tdma->src = cpu_to_le32(tdma->src + engine->sram_dma);
tdma               91 drivers/crypto/marvell/tdma.c 		if ((tdma->flags & CESA_TDMA_TYPE_MSK) == CESA_TDMA_OP)
tdma               92 drivers/crypto/marvell/tdma.c 			mv_cesa_adjust_op(engine, tdma->op);
tdma              124 drivers/crypto/marvell/tdma.c 	struct mv_cesa_tdma_desc *tdma = NULL, *next = NULL;
tdma              130 drivers/crypto/marvell/tdma.c 	for (tdma = engine->chain.first; tdma; tdma = next) {
tdma              132 drivers/crypto/marvell/tdma.c 		next = tdma->next;
tdma              135 drivers/crypto/marvell/tdma.c 		if (tdma->flags & CESA_TDMA_END_OF_REQ) {
tdma              152 drivers/crypto/marvell/tdma.c 			engine->chain.first = tdma->next;
tdma              153 drivers/crypto/marvell/tdma.c 			tdma->next = NULL;
tdma              161 drivers/crypto/marvell/tdma.c 			current_status = (tdma->cur_dma == tdma_cur) ?
tdma              174 drivers/crypto/marvell/tdma.c 		if (res || tdma->cur_dma == tdma_cur)
tdma              216 drivers/crypto/marvell/tdma.c 	struct mv_cesa_tdma_desc *tdma, *op_desc;
tdma              218 drivers/crypto/marvell/tdma.c 	tdma = mv_cesa_dma_add_desc(chain, gfp_flags);
tdma              219 drivers/crypto/marvell/tdma.c 	if (IS_ERR(tdma))
tdma              220 drivers/crypto/marvell/tdma.c 		return PTR_ERR(tdma);
tdma              237 drivers/crypto/marvell/tdma.c 	tdma->byte_cnt = cpu_to_le32(size | BIT(31));
tdma              238 drivers/crypto/marvell/tdma.c 	tdma->src = src;
tdma              239 drivers/crypto/marvell/tdma.c 	tdma->dst = op_desc->src;
tdma              240 drivers/crypto/marvell/tdma.c 	tdma->op = op_desc->op;
tdma              243 drivers/crypto/marvell/tdma.c 	tdma->flags = flags | CESA_TDMA_RESULT;
tdma              252 drivers/crypto/marvell/tdma.c 	struct mv_cesa_tdma_desc *tdma;
tdma              257 drivers/crypto/marvell/tdma.c 	tdma = mv_cesa_dma_add_desc(chain, flags);
tdma              258 drivers/crypto/marvell/tdma.c 	if (IS_ERR(tdma))
tdma              259 drivers/crypto/marvell/tdma.c 		return ERR_CAST(tdma);
tdma              269 drivers/crypto/marvell/tdma.c 	tdma = chain->last;
tdma              270 drivers/crypto/marvell/tdma.c 	tdma->op = op;
tdma              271 drivers/crypto/marvell/tdma.c 	tdma->byte_cnt = cpu_to_le32(size | BIT(31));
tdma              272 drivers/crypto/marvell/tdma.c 	tdma->src = cpu_to_le32(dma_handle);
tdma              273 drivers/crypto/marvell/tdma.c 	tdma->dst = CESA_SA_CFG_SRAM_OFFSET;
tdma              274 drivers/crypto/marvell/tdma.c 	tdma->flags = CESA_TDMA_DST_IN_SRAM | CESA_TDMA_OP;
tdma              283 drivers/crypto/marvell/tdma.c 	struct mv_cesa_tdma_desc *tdma;
tdma              285 drivers/crypto/marvell/tdma.c 	tdma = mv_cesa_dma_add_desc(chain, gfp_flags);
tdma              286 drivers/crypto/marvell/tdma.c 	if (IS_ERR(tdma))
tdma              287 drivers/crypto/marvell/tdma.c 		return PTR_ERR(tdma);
tdma              289 drivers/crypto/marvell/tdma.c 	tdma->byte_cnt = cpu_to_le32(size | BIT(31));
tdma              290 drivers/crypto/marvell/tdma.c 	tdma->src = src;
tdma              291 drivers/crypto/marvell/tdma.c 	tdma->dst = dst;
tdma              294 drivers/crypto/marvell/tdma.c 	tdma->flags = flags | CESA_TDMA_DATA;
tdma              301 drivers/crypto/marvell/tdma.c 	struct mv_cesa_tdma_desc *tdma;
tdma              303 drivers/crypto/marvell/tdma.c 	tdma = mv_cesa_dma_add_desc(chain, flags);
tdma              304 drivers/crypto/marvell/tdma.c 	return PTR_ERR_OR_ZERO(tdma);
tdma              309 drivers/crypto/marvell/tdma.c 	struct mv_cesa_tdma_desc *tdma;
tdma              311 drivers/crypto/marvell/tdma.c 	tdma = mv_cesa_dma_add_desc(chain, flags);
tdma              312 drivers/crypto/marvell/tdma.c 	if (IS_ERR(tdma))
tdma              313 drivers/crypto/marvell/tdma.c 		return PTR_ERR(tdma);
tdma              315 drivers/crypto/marvell/tdma.c 	tdma->byte_cnt = cpu_to_le32(BIT(31));
tdma              189 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma	*tdma;
tdma              232 drivers/dma/tegra20-apb-dma.c static inline void tdma_write(struct tegra_dma *tdma, u32 reg, u32 val)
tdma              234 drivers/dma/tegra20-apb-dma.c 	writel(val, tdma->base_addr + reg);
tdma              237 drivers/dma/tegra20-apb-dma.c static inline u32 tdma_read(struct tegra_dma *tdma, u32 reg)
tdma              239 drivers/dma/tegra20-apb-dma.c 	return readl(tdma->base_addr + reg);
tdma              362 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = tdc->tdma;
tdma              364 drivers/dma/tegra20-apb-dma.c 	spin_lock(&tdma->global_lock);
tdma              366 drivers/dma/tegra20-apb-dma.c 	if (tdc->tdma->global_pause_count == 0) {
tdma              367 drivers/dma/tegra20-apb-dma.c 		tdma_write(tdma, TEGRA_APBDMA_GENERAL, 0);
tdma              372 drivers/dma/tegra20-apb-dma.c 	tdc->tdma->global_pause_count++;
tdma              374 drivers/dma/tegra20-apb-dma.c 	spin_unlock(&tdma->global_lock);
tdma              379 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = tdc->tdma;
tdma              381 drivers/dma/tegra20-apb-dma.c 	spin_lock(&tdma->global_lock);
tdma              383 drivers/dma/tegra20-apb-dma.c 	if (WARN_ON(tdc->tdma->global_pause_count == 0))
tdma              386 drivers/dma/tegra20-apb-dma.c 	if (--tdc->tdma->global_pause_count == 0)
tdma              387 drivers/dma/tegra20-apb-dma.c 		tdma_write(tdma, TEGRA_APBDMA_GENERAL,
tdma              391 drivers/dma/tegra20-apb-dma.c 	spin_unlock(&tdma->global_lock);
tdma              397 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = tdc->tdma;
tdma              399 drivers/dma/tegra20-apb-dma.c 	if (tdma->chip_data->support_channel_pause) {
tdma              411 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = tdc->tdma;
tdma              413 drivers/dma/tegra20-apb-dma.c 	if (tdma->chip_data->support_channel_pause) {
tdma              453 drivers/dma/tegra20-apb-dma.c 	if (tdc->tdma->chip_data->support_separate_wcount_reg)
tdma              494 drivers/dma/tegra20-apb-dma.c 	if (tdc->tdma->chip_data->support_separate_wcount_reg)
tdma              772 drivers/dma/tegra20-apb-dma.c 	if (tdc->tdma->chip_data->support_separate_wcount_reg)
tdma              809 drivers/dma/tegra20-apb-dma.c 	if (tdc->tdma->chip_data->support_separate_wcount_reg)
tdma              814 drivers/dma/tegra20-apb-dma.c 	if (!tdc->tdma->chip_data->support_separate_wcount_reg)
tdma              994 drivers/dma/tegra20-apb-dma.c 	if (tdc->tdma->chip_data->support_separate_wcount_reg)
tdma             1071 drivers/dma/tegra20-apb-dma.c 				(len > tdc->tdma->chip_data->max_dma_count)) {
tdma             1170 drivers/dma/tegra20-apb-dma.c 			(len > tdc->tdma->chip_data->max_dma_count)) {
tdma             1262 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = tdc->tdma;
tdma             1268 drivers/dma/tegra20-apb-dma.c 	ret = pm_runtime_get_sync(tdma->dev);
tdma             1278 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = tdc->tdma;
tdma             1314 drivers/dma/tegra20-apb-dma.c 	pm_runtime_put(tdma->dev);
tdma             1322 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = ofdma->of_dma_data;
tdma             1327 drivers/dma/tegra20-apb-dma.c 		dev_err(tdma->dev, "Invalid slave id: %d\n", dma_spec->args[0]);
tdma             1331 drivers/dma/tegra20-apb-dma.c 	chan = dma_get_any_slave_channel(&tdma->dma_dev);
tdma             1380 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma;
tdma             1391 drivers/dma/tegra20-apb-dma.c 	tdma = devm_kzalloc(&pdev->dev,
tdma             1392 drivers/dma/tegra20-apb-dma.c 			    struct_size(tdma, channels, cdata->nr_channels),
tdma             1394 drivers/dma/tegra20-apb-dma.c 	if (!tdma)
tdma             1397 drivers/dma/tegra20-apb-dma.c 	tdma->dev = &pdev->dev;
tdma             1398 drivers/dma/tegra20-apb-dma.c 	tdma->chip_data = cdata;
tdma             1399 drivers/dma/tegra20-apb-dma.c 	platform_set_drvdata(pdev, tdma);
tdma             1402 drivers/dma/tegra20-apb-dma.c 	tdma->base_addr = devm_ioremap_resource(&pdev->dev, res);
tdma             1403 drivers/dma/tegra20-apb-dma.c 	if (IS_ERR(tdma->base_addr))
tdma             1404 drivers/dma/tegra20-apb-dma.c 		return PTR_ERR(tdma->base_addr);
tdma             1406 drivers/dma/tegra20-apb-dma.c 	tdma->dma_clk = devm_clk_get(&pdev->dev, NULL);
tdma             1407 drivers/dma/tegra20-apb-dma.c 	if (IS_ERR(tdma->dma_clk)) {
tdma             1409 drivers/dma/tegra20-apb-dma.c 		return PTR_ERR(tdma->dma_clk);
tdma             1412 drivers/dma/tegra20-apb-dma.c 	tdma->rst = devm_reset_control_get(&pdev->dev, "dma");
tdma             1413 drivers/dma/tegra20-apb-dma.c 	if (IS_ERR(tdma->rst)) {
tdma             1415 drivers/dma/tegra20-apb-dma.c 		return PTR_ERR(tdma->rst);
tdma             1418 drivers/dma/tegra20-apb-dma.c 	spin_lock_init(&tdma->global_lock);
tdma             1432 drivers/dma/tegra20-apb-dma.c 	reset_control_assert(tdma->rst);
tdma             1434 drivers/dma/tegra20-apb-dma.c 	reset_control_deassert(tdma->rst);
tdma             1437 drivers/dma/tegra20-apb-dma.c 	tdma_write(tdma, TEGRA_APBDMA_GENERAL, TEGRA_APBDMA_GENERAL_ENABLE);
tdma             1438 drivers/dma/tegra20-apb-dma.c 	tdma_write(tdma, TEGRA_APBDMA_CONTROL, 0);
tdma             1439 drivers/dma/tegra20-apb-dma.c 	tdma_write(tdma, TEGRA_APBDMA_IRQ_MASK_SET, 0xFFFFFFFFul);
tdma             1443 drivers/dma/tegra20-apb-dma.c 	INIT_LIST_HEAD(&tdma->dma_dev.channels);
tdma             1445 drivers/dma/tegra20-apb-dma.c 		struct tegra_dma_channel *tdc = &tdma->channels[i];
tdma             1447 drivers/dma/tegra20-apb-dma.c 		tdc->chan_addr = tdma->base_addr +
tdma             1467 drivers/dma/tegra20-apb-dma.c 		tdc->dma_chan.device = &tdma->dma_dev;
tdma             1470 drivers/dma/tegra20-apb-dma.c 				&tdma->dma_dev.channels);
tdma             1471 drivers/dma/tegra20-apb-dma.c 		tdc->tdma = tdma;
tdma             1485 drivers/dma/tegra20-apb-dma.c 	dma_cap_set(DMA_SLAVE, tdma->dma_dev.cap_mask);
tdma             1486 drivers/dma/tegra20-apb-dma.c 	dma_cap_set(DMA_PRIVATE, tdma->dma_dev.cap_mask);
tdma             1487 drivers/dma/tegra20-apb-dma.c 	dma_cap_set(DMA_CYCLIC, tdma->dma_dev.cap_mask);
tdma             1489 drivers/dma/tegra20-apb-dma.c 	tdma->global_pause_count = 0;
tdma             1490 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.dev = &pdev->dev;
tdma             1491 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.device_alloc_chan_resources =
tdma             1493 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.device_free_chan_resources =
tdma             1495 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.device_prep_slave_sg = tegra_dma_prep_slave_sg;
tdma             1496 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.device_prep_dma_cyclic = tegra_dma_prep_dma_cyclic;
tdma             1497 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
tdma             1501 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
tdma             1505 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
tdma             1506 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
tdma             1507 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.device_config = tegra_dma_slave_config;
tdma             1508 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.device_terminate_all = tegra_dma_terminate_all;
tdma             1509 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.device_tx_status = tegra_dma_tx_status;
tdma             1510 drivers/dma/tegra20-apb-dma.c 	tdma->dma_dev.device_issue_pending = tegra_dma_issue_pending;
tdma             1512 drivers/dma/tegra20-apb-dma.c 	ret = dma_async_device_register(&tdma->dma_dev);
tdma             1520 drivers/dma/tegra20-apb-dma.c 					 tegra_dma_of_xlate, tdma);
tdma             1532 drivers/dma/tegra20-apb-dma.c 	dma_async_device_unregister(&tdma->dma_dev);
tdma             1535 drivers/dma/tegra20-apb-dma.c 		struct tegra_dma_channel *tdc = &tdma->channels[i];
tdma             1549 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = platform_get_drvdata(pdev);
tdma             1553 drivers/dma/tegra20-apb-dma.c 	dma_async_device_unregister(&tdma->dma_dev);
tdma             1555 drivers/dma/tegra20-apb-dma.c 	for (i = 0; i < tdma->chip_data->nr_channels; ++i) {
tdma             1556 drivers/dma/tegra20-apb-dma.c 		tdc = &tdma->channels[i];
tdma             1570 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = dev_get_drvdata(dev);
tdma             1573 drivers/dma/tegra20-apb-dma.c 	tdma->reg_gen = tdma_read(tdma, TEGRA_APBDMA_GENERAL);
tdma             1574 drivers/dma/tegra20-apb-dma.c 	for (i = 0; i < tdma->chip_data->nr_channels; i++) {
tdma             1575 drivers/dma/tegra20-apb-dma.c 		struct tegra_dma_channel *tdc = &tdma->channels[i];
tdma             1587 drivers/dma/tegra20-apb-dma.c 		if (tdma->chip_data->support_separate_wcount_reg)
tdma             1592 drivers/dma/tegra20-apb-dma.c 	clk_disable_unprepare(tdma->dma_clk);
tdma             1599 drivers/dma/tegra20-apb-dma.c 	struct tegra_dma *tdma = dev_get_drvdata(dev);
tdma             1602 drivers/dma/tegra20-apb-dma.c 	ret = clk_prepare_enable(tdma->dma_clk);
tdma             1608 drivers/dma/tegra20-apb-dma.c 	tdma_write(tdma, TEGRA_APBDMA_GENERAL, tdma->reg_gen);
tdma             1609 drivers/dma/tegra20-apb-dma.c 	tdma_write(tdma, TEGRA_APBDMA_CONTROL, 0);
tdma             1610 drivers/dma/tegra20-apb-dma.c 	tdma_write(tdma, TEGRA_APBDMA_IRQ_MASK_SET, 0xFFFFFFFFul);
tdma             1612 drivers/dma/tegra20-apb-dma.c 	for (i = 0; i < tdma->chip_data->nr_channels; i++) {
tdma             1613 drivers/dma/tegra20-apb-dma.c 		struct tegra_dma_channel *tdc = &tdma->channels[i];
tdma             1620 drivers/dma/tegra20-apb-dma.c 		if (tdma->chip_data->support_separate_wcount_reg)
tdma              133 drivers/dma/tegra210-adma.c 	struct tegra_adma		*tdma;
tdma              170 drivers/dma/tegra210-adma.c static inline void tdma_write(struct tegra_adma *tdma, u32 reg, u32 val)
tdma              172 drivers/dma/tegra210-adma.c 	writel(val, tdma->base_addr + tdma->cdata->global_reg_offset + reg);
tdma              175 drivers/dma/tegra210-adma.c static inline u32 tdma_read(struct tegra_adma *tdma, u32 reg)
tdma              177 drivers/dma/tegra210-adma.c 	return readl(tdma->base_addr + tdma->cdata->global_reg_offset + reg);
tdma              203 drivers/dma/tegra210-adma.c 	return tdc->tdma->dev;
tdma              221 drivers/dma/tegra210-adma.c static int tegra_adma_init(struct tegra_adma *tdma)
tdma              227 drivers/dma/tegra210-adma.c 	tdma_write(tdma, tdma->cdata->global_int_clear, 0x1);
tdma              230 drivers/dma/tegra210-adma.c 	tdma_write(tdma, ADMA_GLOBAL_SOFT_RESET, 0x1);
tdma              234 drivers/dma/tegra210-adma.c 				 tdma->base_addr +
tdma              235 drivers/dma/tegra210-adma.c 				 tdma->cdata->global_reg_offset +
tdma              242 drivers/dma/tegra210-adma.c 	tdma_write(tdma, ADMA_GLOBAL_CMD, 1);
tdma              250 drivers/dma/tegra210-adma.c 	struct tegra_adma *tdma = tdc->tdma;
tdma              256 drivers/dma/tegra210-adma.c 	if (sreq_index > tdma->cdata->ch_req_max) {
tdma              257 drivers/dma/tegra210-adma.c 		dev_err(tdma->dev, "invalid DMA request\n");
tdma              263 drivers/dma/tegra210-adma.c 		if (test_and_set_bit(sreq_index, &tdma->tx_requests_reserved)) {
tdma              264 drivers/dma/tegra210-adma.c 			dev_err(tdma->dev, "DMA request reserved\n");
tdma              270 drivers/dma/tegra210-adma.c 		if (test_and_set_bit(sreq_index, &tdma->rx_requests_reserved)) {
tdma              271 drivers/dma/tegra210-adma.c 			dev_err(tdma->dev, "DMA request reserved\n");
tdma              277 drivers/dma/tegra210-adma.c 		dev_WARN(tdma->dev, "channel %s has invalid transfer type\n",
tdma              290 drivers/dma/tegra210-adma.c 	struct tegra_adma *tdma = tdc->tdma;
tdma              297 drivers/dma/tegra210-adma.c 		clear_bit(tdc->sreq_index, &tdma->tx_requests_reserved);
tdma              301 drivers/dma/tegra210-adma.c 		clear_bit(tdc->sreq_index, &tdma->rx_requests_reserved);
tdma              305 drivers/dma/tegra210-adma.c 		dev_WARN(tdma->dev, "channel %s has invalid transfer type\n",
tdma              563 drivers/dma/tegra210-adma.c 	const struct tegra_adma_chip_data *cdata = tdc->tdma->cdata;
tdma              687 drivers/dma/tegra210-adma.c 	struct tegra_adma *tdma = ofdma->of_dma_data;
tdma              698 drivers/dma/tegra210-adma.c 		dev_err(tdma->dev, "DMA request must not be 0\n");
tdma              702 drivers/dma/tegra210-adma.c 	chan = dma_get_any_slave_channel(&tdma->dma_dev);
tdma              714 drivers/dma/tegra210-adma.c 	struct tegra_adma *tdma = dev_get_drvdata(dev);
tdma              719 drivers/dma/tegra210-adma.c 	tdma->global_cmd = tdma_read(tdma, ADMA_GLOBAL_CMD);
tdma              720 drivers/dma/tegra210-adma.c 	if (!tdma->global_cmd)
tdma              723 drivers/dma/tegra210-adma.c 	for (i = 0; i < tdma->nr_channels; i++) {
tdma              724 drivers/dma/tegra210-adma.c 		tdc = &tdma->channels[i];
tdma              739 drivers/dma/tegra210-adma.c 	clk_disable_unprepare(tdma->ahub_clk);
tdma              746 drivers/dma/tegra210-adma.c 	struct tegra_adma *tdma = dev_get_drvdata(dev);
tdma              751 drivers/dma/tegra210-adma.c 	ret = clk_prepare_enable(tdma->ahub_clk);
tdma              756 drivers/dma/tegra210-adma.c 	tdma_write(tdma, ADMA_GLOBAL_CMD, tdma->global_cmd);
tdma              758 drivers/dma/tegra210-adma.c 	if (!tdma->global_cmd)
tdma              761 drivers/dma/tegra210-adma.c 	for (i = 0; i < tdma->nr_channels; i++) {
tdma              762 drivers/dma/tegra210-adma.c 		tdc = &tdma->channels[i];
tdma              819 drivers/dma/tegra210-adma.c 	struct tegra_adma *tdma;
tdma              829 drivers/dma/tegra210-adma.c 	tdma = devm_kzalloc(&pdev->dev,
tdma              830 drivers/dma/tegra210-adma.c 			    struct_size(tdma, channels, cdata->nr_channels),
tdma              832 drivers/dma/tegra210-adma.c 	if (!tdma)
tdma              835 drivers/dma/tegra210-adma.c 	tdma->dev = &pdev->dev;
tdma              836 drivers/dma/tegra210-adma.c 	tdma->cdata = cdata;
tdma              837 drivers/dma/tegra210-adma.c 	tdma->nr_channels = cdata->nr_channels;
tdma              838 drivers/dma/tegra210-adma.c 	platform_set_drvdata(pdev, tdma);
tdma              841 drivers/dma/tegra210-adma.c 	tdma->base_addr = devm_ioremap_resource(&pdev->dev, res);
tdma              842 drivers/dma/tegra210-adma.c 	if (IS_ERR(tdma->base_addr))
tdma              843 drivers/dma/tegra210-adma.c 		return PTR_ERR(tdma->base_addr);
tdma              845 drivers/dma/tegra210-adma.c 	tdma->ahub_clk = devm_clk_get(&pdev->dev, "d_audio");
tdma              846 drivers/dma/tegra210-adma.c 	if (IS_ERR(tdma->ahub_clk)) {
tdma              848 drivers/dma/tegra210-adma.c 		return PTR_ERR(tdma->ahub_clk);
tdma              851 drivers/dma/tegra210-adma.c 	INIT_LIST_HEAD(&tdma->dma_dev.channels);
tdma              852 drivers/dma/tegra210-adma.c 	for (i = 0; i < tdma->nr_channels; i++) {
tdma              853 drivers/dma/tegra210-adma.c 		struct tegra_adma_chan *tdc = &tdma->channels[i];
tdma              855 drivers/dma/tegra210-adma.c 		tdc->chan_addr = tdma->base_addr + cdata->ch_base_offset
tdma              864 drivers/dma/tegra210-adma.c 		vchan_init(&tdc->vc, &tdma->dma_dev);
tdma              866 drivers/dma/tegra210-adma.c 		tdc->tdma = tdma;
tdma              875 drivers/dma/tegra210-adma.c 	ret = tegra_adma_init(tdma);
tdma              879 drivers/dma/tegra210-adma.c 	dma_cap_set(DMA_SLAVE, tdma->dma_dev.cap_mask);
tdma              880 drivers/dma/tegra210-adma.c 	dma_cap_set(DMA_PRIVATE, tdma->dma_dev.cap_mask);
tdma              881 drivers/dma/tegra210-adma.c 	dma_cap_set(DMA_CYCLIC, tdma->dma_dev.cap_mask);
tdma              883 drivers/dma/tegra210-adma.c 	tdma->dma_dev.dev = &pdev->dev;
tdma              884 drivers/dma/tegra210-adma.c 	tdma->dma_dev.device_alloc_chan_resources =
tdma              886 drivers/dma/tegra210-adma.c 	tdma->dma_dev.device_free_chan_resources =
tdma              888 drivers/dma/tegra210-adma.c 	tdma->dma_dev.device_issue_pending = tegra_adma_issue_pending;
tdma              889 drivers/dma/tegra210-adma.c 	tdma->dma_dev.device_prep_dma_cyclic = tegra_adma_prep_dma_cyclic;
tdma              890 drivers/dma/tegra210-adma.c 	tdma->dma_dev.device_config = tegra_adma_slave_config;
tdma              891 drivers/dma/tegra210-adma.c 	tdma->dma_dev.device_tx_status = tegra_adma_tx_status;
tdma              892 drivers/dma/tegra210-adma.c 	tdma->dma_dev.device_terminate_all = tegra_adma_terminate_all;
tdma              893 drivers/dma/tegra210-adma.c 	tdma->dma_dev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
tdma              894 drivers/dma/tegra210-adma.c 	tdma->dma_dev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
tdma              895 drivers/dma/tegra210-adma.c 	tdma->dma_dev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
tdma              896 drivers/dma/tegra210-adma.c 	tdma->dma_dev.residue_granularity = DMA_RESIDUE_GRANULARITY_SEGMENT;
tdma              897 drivers/dma/tegra210-adma.c 	tdma->dma_dev.device_pause = tegra_adma_pause;
tdma              898 drivers/dma/tegra210-adma.c 	tdma->dma_dev.device_resume = tegra_adma_resume;
tdma              900 drivers/dma/tegra210-adma.c 	ret = dma_async_device_register(&tdma->dma_dev);
tdma              907 drivers/dma/tegra210-adma.c 					 tegra_dma_of_xlate, tdma);
tdma              916 drivers/dma/tegra210-adma.c 		 tdma->nr_channels);
tdma              921 drivers/dma/tegra210-adma.c 	dma_async_device_unregister(&tdma->dma_dev);
tdma              928 drivers/dma/tegra210-adma.c 		irq_dispose_mapping(tdma->channels[i].irq);
tdma              935 drivers/dma/tegra210-adma.c 	struct tegra_adma *tdma = platform_get_drvdata(pdev);
tdma              939 drivers/dma/tegra210-adma.c 	dma_async_device_unregister(&tdma->dma_dev);
tdma              941 drivers/dma/tegra210-adma.c 	for (i = 0; i < tdma->nr_channels; ++i)
tdma              942 drivers/dma/tegra210-adma.c 		irq_dispose_mapping(tdma->channels[i].irq);
tdma               13 drivers/media/pci/b2c2/flexcop-dma.c 	dma_addr_t tdma = 0;
tdma               20 drivers/media/pci/b2c2/flexcop-dma.c 	tcpu = pci_alloc_consistent(pdev, size, &tdma);
tdma               24 drivers/media/pci/b2c2/flexcop-dma.c 		dma->dma_addr0 = tdma;
tdma               26 drivers/media/pci/b2c2/flexcop-dma.c 		dma->dma_addr1 = tdma + size/2;
tdma               45 drivers/net/ethernet/broadcom/bcmsysport.c BCM_SYSPORT_IO_MACRO(tdma, SYS_PORT_TDMA_OFFSET);
tdma               75 drivers/net/ethernet/seeq/sgiseeq.c 	volatile struct hpc_dma_desc tdma;
tdma              192 drivers/net/ethernet/seeq/sgiseeq.c 		sp->tx_desc[i].tdma.cntinfo = TCNTINFO_INIT;
tdma              268 drivers/net/ethernet/seeq/sgiseeq.c 		       i, (&t[i]), t[i].tdma.pbuf, t[i].tdma.cntinfo,
tdma              269 drivers/net/ethernet/seeq/sgiseeq.c 		       t[i].tdma.pnext);
tdma              272 drivers/net/ethernet/seeq/sgiseeq.c 		       i, (&t[i]), t[i].tdma.pbuf, t[i].tdma.cntinfo,
tdma              273 drivers/net/ethernet/seeq/sgiseeq.c 		       t[i].tdma.pnext);
tdma              439 drivers/net/ethernet/seeq/sgiseeq.c 	while ((td->tdma.cntinfo & (HPCDMA_XIU | HPCDMA_ETXD)) ==
tdma              445 drivers/net/ethernet/seeq/sgiseeq.c 	if (td->tdma.cntinfo & HPCDMA_XIU) {
tdma              476 drivers/net/ethernet/seeq/sgiseeq.c 		if (!(td->tdma.cntinfo & (HPCDMA_XIU)))
tdma              478 drivers/net/ethernet/seeq/sgiseeq.c 		if (!(td->tdma.cntinfo & (HPCDMA_ETXD))) {
tdma              487 drivers/net/ethernet/seeq/sgiseeq.c 		td->tdma.cntinfo &= ~(HPCDMA_XIU | HPCDMA_XIE);
tdma              488 drivers/net/ethernet/seeq/sgiseeq.c 		td->tdma.cntinfo |= HPCDMA_EOX;
tdma              622 drivers/net/ethernet/seeq/sgiseeq.c 	td->tdma.pbuf = dma_map_single(dev->dev.parent, skb->data,
tdma              624 drivers/net/ethernet/seeq/sgiseeq.c 	td->tdma.cntinfo = (len & HPCDMA_BCNT) |
tdma              632 drivers/net/ethernet/seeq/sgiseeq.c 		backend->tdma.cntinfo &= ~HPCDMA_EOX;
tdma              685 drivers/net/ethernet/seeq/sgiseeq.c 		buf[i].tdma.pnext = VIRT_TO_DMA(sp, buf + i + 1);
tdma              686 drivers/net/ethernet/seeq/sgiseeq.c 		buf[i].tdma.pbuf = 0;
tdma              690 drivers/net/ethernet/seeq/sgiseeq.c 	buf[i].tdma.pnext = VIRT_TO_DMA(sp, buf);
tdma              985 drivers/pinctrl/meson/pinctrl-meson-axg.c 	FUNCTION(tdma),