d40c              603 drivers/dma/ste_dma40.c static struct device *chan2dev(struct d40_chan *d40c)
d40c              605 drivers/dma/ste_dma40.c 	return &d40c->chan.dev->device;
d40c              627 drivers/dma/ste_dma40.c #define chan_err(d40c, format, arg...)		\
d40c              628 drivers/dma/ste_dma40.c 	d40_err(chan2dev(d40c), format, ## arg)
d40c              634 drivers/dma/ste_dma40.c static int d40_pool_lli_alloc(struct d40_chan *d40c, struct d40_desc *d40d,
d40c              637 drivers/dma/ste_dma40.c 	bool is_log = chan_is_logical(d40c);
d40c              669 drivers/dma/ste_dma40.c 		d40d->lli_pool.dma_addr = dma_map_single(d40c->base->dev,
d40c              674 drivers/dma/ste_dma40.c 		if (dma_mapping_error(d40c->base->dev,
d40c              686 drivers/dma/ste_dma40.c static void d40_pool_lli_free(struct d40_chan *d40c, struct d40_desc *d40d)
d40c              689 drivers/dma/ste_dma40.c 		dma_unmap_single(d40c->base->dev, d40d->lli_pool.dma_addr,
d40c              701 drivers/dma/ste_dma40.c static int d40_lcla_alloc_one(struct d40_chan *d40c,
d40c              708 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags);
d40c              715 drivers/dma/ste_dma40.c 		int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i;
d40c              717 drivers/dma/ste_dma40.c 		if (!d40c->base->lcla_pool.alloc_map[idx]) {
d40c              718 drivers/dma/ste_dma40.c 			d40c->base->lcla_pool.alloc_map[idx] = d40d;
d40c              725 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags);
d40c              730 drivers/dma/ste_dma40.c static int d40_lcla_free_all(struct d40_chan *d40c,
d40c              737 drivers/dma/ste_dma40.c 	if (chan_is_physical(d40c))
d40c              740 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags);
d40c              743 drivers/dma/ste_dma40.c 		int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i;
d40c              745 drivers/dma/ste_dma40.c 		if (d40c->base->lcla_pool.alloc_map[idx] == d40d) {
d40c              746 drivers/dma/ste_dma40.c 			d40c->base->lcla_pool.alloc_map[idx] = NULL;
d40c              755 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags);
d40c              766 drivers/dma/ste_dma40.c static struct d40_desc *d40_desc_get(struct d40_chan *d40c)
d40c              770 drivers/dma/ste_dma40.c 	if (!list_empty(&d40c->client)) {
d40c              774 drivers/dma/ste_dma40.c 		list_for_each_entry_safe(d, _d, &d40c->client, node) {
d40c              785 drivers/dma/ste_dma40.c 		desc = kmem_cache_zalloc(d40c->base->desc_slab, GFP_NOWAIT);
d40c              793 drivers/dma/ste_dma40.c static void d40_desc_free(struct d40_chan *d40c, struct d40_desc *d40d)
d40c              796 drivers/dma/ste_dma40.c 	d40_pool_lli_free(d40c, d40d);
d40c              797 drivers/dma/ste_dma40.c 	d40_lcla_free_all(d40c, d40d);
d40c              798 drivers/dma/ste_dma40.c 	kmem_cache_free(d40c->base->desc_slab, d40d);
d40c              801 drivers/dma/ste_dma40.c static void d40_desc_submit(struct d40_chan *d40c, struct d40_desc *desc)
d40c              803 drivers/dma/ste_dma40.c 	list_add_tail(&desc->node, &d40c->active);
d40c              823 drivers/dma/ste_dma40.c static void d40_desc_done(struct d40_chan *d40c, struct d40_desc *desc)
d40c              825 drivers/dma/ste_dma40.c 	list_add_tail(&desc->node, &d40c->done);
d40c              940 drivers/dma/ste_dma40.c static void d40_desc_load(struct d40_chan *d40c, struct d40_desc *d40d)
d40c              942 drivers/dma/ste_dma40.c 	if (chan_is_physical(d40c)) {
d40c              943 drivers/dma/ste_dma40.c 		d40_phy_lli_load(d40c, d40d);
d40c              946 drivers/dma/ste_dma40.c 		d40_log_lli_to_lcxa(d40c, d40d);
d40c              949 drivers/dma/ste_dma40.c static struct d40_desc *d40_first_active_get(struct d40_chan *d40c)
d40c              951 drivers/dma/ste_dma40.c 	return list_first_entry_or_null(&d40c->active, struct d40_desc, node);
d40c              955 drivers/dma/ste_dma40.c static void d40_desc_queue(struct d40_chan *d40c, struct d40_desc *desc)
d40c              959 drivers/dma/ste_dma40.c 	list_add_tail(&desc->node, &d40c->pending_queue);
d40c              962 drivers/dma/ste_dma40.c static struct d40_desc *d40_first_pending(struct d40_chan *d40c)
d40c              964 drivers/dma/ste_dma40.c 	return list_first_entry_or_null(&d40c->pending_queue, struct d40_desc,
d40c              968 drivers/dma/ste_dma40.c static struct d40_desc *d40_first_queued(struct d40_chan *d40c)
d40c              970 drivers/dma/ste_dma40.c 	return list_first_entry_or_null(&d40c->queue, struct d40_desc, node);
d40c              973 drivers/dma/ste_dma40.c static struct d40_desc *d40_first_done(struct d40_chan *d40c)
d40c              975 drivers/dma/ste_dma40.c 	return list_first_entry_or_null(&d40c->done, struct d40_desc, node);
d40c             1038 drivers/dma/ste_dma40.c static int __d40_execute_command_phy(struct d40_chan *d40c,
d40c             1049 drivers/dma/ste_dma40.c 		ret = __d40_execute_command_phy(d40c, D40_DMA_SUSPEND_REQ);
d40c             1054 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->base->execmd_lock, flags);
d40c             1056 drivers/dma/ste_dma40.c 	if (d40c->phy_chan->num % 2 == 0)
d40c             1057 drivers/dma/ste_dma40.c 		active_reg = d40c->base->virtbase + D40_DREG_ACTIVE;
d40c             1059 drivers/dma/ste_dma40.c 		active_reg = d40c->base->virtbase + D40_DREG_ACTIVO;
d40c             1063 drivers/dma/ste_dma40.c 			  D40_CHAN_POS_MASK(d40c->phy_chan->num)) >>
d40c             1064 drivers/dma/ste_dma40.c 			D40_CHAN_POS(d40c->phy_chan->num);
d40c             1070 drivers/dma/ste_dma40.c 	wmask = 0xffffffff & ~(D40_CHAN_POS_MASK(d40c->phy_chan->num));
d40c             1071 drivers/dma/ste_dma40.c 	writel(wmask | (command << D40_CHAN_POS(d40c->phy_chan->num)),
d40c             1078 drivers/dma/ste_dma40.c 				  D40_CHAN_POS_MASK(d40c->phy_chan->num)) >>
d40c             1079 drivers/dma/ste_dma40.c 				D40_CHAN_POS(d40c->phy_chan->num);
d40c             1094 drivers/dma/ste_dma40.c 			chan_err(d40c,
d40c             1096 drivers/dma/ste_dma40.c 				d40c->phy_chan->num, d40c->log_num,
d40c             1104 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->base->execmd_lock, flags);
d40c             1108 drivers/dma/ste_dma40.c static void d40_term_all(struct d40_chan *d40c)
d40c             1114 drivers/dma/ste_dma40.c 	while ((d40d = d40_first_done(d40c))) {
d40c             1116 drivers/dma/ste_dma40.c 		d40_desc_free(d40c, d40d);
d40c             1120 drivers/dma/ste_dma40.c 	while ((d40d = d40_first_active_get(d40c))) {
d40c             1122 drivers/dma/ste_dma40.c 		d40_desc_free(d40c, d40d);
d40c             1126 drivers/dma/ste_dma40.c 	while ((d40d = d40_first_queued(d40c))) {
d40c             1128 drivers/dma/ste_dma40.c 		d40_desc_free(d40c, d40d);
d40c             1132 drivers/dma/ste_dma40.c 	while ((d40d = d40_first_pending(d40c))) {
d40c             1134 drivers/dma/ste_dma40.c 		d40_desc_free(d40c, d40d);
d40c             1138 drivers/dma/ste_dma40.c 	if (!list_empty(&d40c->client))
d40c             1139 drivers/dma/ste_dma40.c 		list_for_each_entry_safe(d40d, _d, &d40c->client, node) {
d40c             1141 drivers/dma/ste_dma40.c 			d40_desc_free(d40c, d40d);
d40c             1145 drivers/dma/ste_dma40.c 	if (!list_empty(&d40c->prepare_queue))
d40c             1147 drivers/dma/ste_dma40.c 					 &d40c->prepare_queue, node) {
d40c             1149 drivers/dma/ste_dma40.c 			d40_desc_free(d40c, d40d);
d40c             1152 drivers/dma/ste_dma40.c 	d40c->pending_tx = 0;
d40c             1155 drivers/dma/ste_dma40.c static void __d40_config_set_event(struct d40_chan *d40c,
d40c             1159 drivers/dma/ste_dma40.c 	void __iomem *addr = chan_base(d40c) + reg;
d40c             1199 drivers/dma/ste_dma40.c 			chan_err(d40c,
d40c             1201 drivers/dma/ste_dma40.c 				"status %x\n", d40c->phy_chan->num,
d40c             1202 drivers/dma/ste_dma40.c 				 d40c->log_num, status);
d40c             1223 drivers/dma/ste_dma40.c 			dev_dbg(chan2dev(d40c),
d40c             1238 drivers/dma/ste_dma40.c static void d40_config_set_event(struct d40_chan *d40c,
d40c             1241 drivers/dma/ste_dma40.c 	u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type);
d40c             1244 drivers/dma/ste_dma40.c 	if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) ||
d40c             1245 drivers/dma/ste_dma40.c 	    (d40c->dma_cfg.dir == DMA_DEV_TO_DEV))
d40c             1246 drivers/dma/ste_dma40.c 		__d40_config_set_event(d40c, event_type, event,
d40c             1249 drivers/dma/ste_dma40.c 	if (d40c->dma_cfg.dir !=  DMA_DEV_TO_MEM)
d40c             1250 drivers/dma/ste_dma40.c 		__d40_config_set_event(d40c, event_type, event,
d40c             1254 drivers/dma/ste_dma40.c static u32 d40_chan_has_events(struct d40_chan *d40c)
d40c             1256 drivers/dma/ste_dma40.c 	void __iomem *chanbase = chan_base(d40c);
d40c             1266 drivers/dma/ste_dma40.c __d40_execute_command_log(struct d40_chan *d40c, enum d40_command command)
d40c             1273 drivers/dma/ste_dma40.c 	if (d40c->phy_chan->num % 2 == 0)
d40c             1274 drivers/dma/ste_dma40.c 		active_reg = d40c->base->virtbase + D40_DREG_ACTIVE;
d40c             1276 drivers/dma/ste_dma40.c 		active_reg = d40c->base->virtbase + D40_DREG_ACTIVO;
d40c             1279 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->phy_chan->lock, flags);
d40c             1286 drivers/dma/ste_dma40.c 				 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >>
d40c             1287 drivers/dma/ste_dma40.c 				 D40_CHAN_POS(d40c->phy_chan->num);
d40c             1290 drivers/dma/ste_dma40.c 			d40_config_set_event(d40c, D40_SUSPEND_REQ_EVENTLINE);
d40c             1292 drivers/dma/ste_dma40.c 			d40_config_set_event(d40c, D40_DEACTIVATE_EVENTLINE);
d40c             1294 drivers/dma/ste_dma40.c 		if (!d40_chan_has_events(d40c) && (command == D40_DMA_STOP))
d40c             1295 drivers/dma/ste_dma40.c 			ret = __d40_execute_command_phy(d40c, command);
d40c             1301 drivers/dma/ste_dma40.c 		d40_config_set_event(d40c, D40_ACTIVATE_EVENTLINE);
d40c             1302 drivers/dma/ste_dma40.c 		ret = __d40_execute_command_phy(d40c, command);
d40c             1310 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->phy_chan->lock, flags);
d40c             1314 drivers/dma/ste_dma40.c static int d40_channel_execute_command(struct d40_chan *d40c,
d40c             1317 drivers/dma/ste_dma40.c 	if (chan_is_logical(d40c))
d40c             1318 drivers/dma/ste_dma40.c 		return __d40_execute_command_log(d40c, command);
d40c             1320 drivers/dma/ste_dma40.c 		return __d40_execute_command_phy(d40c, command);
d40c             1323 drivers/dma/ste_dma40.c static u32 d40_get_prmo(struct d40_chan *d40c)
d40c             1342 drivers/dma/ste_dma40.c 	if (chan_is_physical(d40c))
d40c             1343 drivers/dma/ste_dma40.c 		return phy_map[d40c->dma_cfg.mode_opt];
d40c             1345 drivers/dma/ste_dma40.c 		return log_map[d40c->dma_cfg.mode_opt];
d40c             1348 drivers/dma/ste_dma40.c static void d40_config_write(struct d40_chan *d40c)
d40c             1354 drivers/dma/ste_dma40.c 	addr_base = (d40c->phy_chan->num % 2) * 4;
d40c             1356 drivers/dma/ste_dma40.c 	var = ((u32)(chan_is_logical(d40c)) + 1) <<
d40c             1357 drivers/dma/ste_dma40.c 		D40_CHAN_POS(d40c->phy_chan->num);
d40c             1358 drivers/dma/ste_dma40.c 	writel(var, d40c->base->virtbase + D40_DREG_PRMSE + addr_base);
d40c             1361 drivers/dma/ste_dma40.c 	var = d40_get_prmo(d40c) << D40_CHAN_POS(d40c->phy_chan->num);
d40c             1363 drivers/dma/ste_dma40.c 	writel(var, d40c->base->virtbase + D40_DREG_PRMOE + addr_base);
d40c             1365 drivers/dma/ste_dma40.c 	if (chan_is_logical(d40c)) {
d40c             1366 drivers/dma/ste_dma40.c 		int lidx = (d40c->phy_chan->num << D40_SREG_ELEM_LOG_LIDX_POS)
d40c             1368 drivers/dma/ste_dma40.c 		void __iomem *chanbase = chan_base(d40c);
d40c             1371 drivers/dma/ste_dma40.c 		writel(d40c->src_def_cfg, chanbase + D40_CHAN_REG_SSCFG);
d40c             1372 drivers/dma/ste_dma40.c 		writel(d40c->dst_def_cfg, chanbase + D40_CHAN_REG_SDCFG);
d40c             1384 drivers/dma/ste_dma40.c static u32 d40_residue(struct d40_chan *d40c)
d40c             1388 drivers/dma/ste_dma40.c 	if (chan_is_logical(d40c))
d40c             1389 drivers/dma/ste_dma40.c 		num_elt = (readl(&d40c->lcpa->lcsp2) & D40_MEM_LCSP2_ECNT_MASK)
d40c             1392 drivers/dma/ste_dma40.c 		u32 val = readl(chan_base(d40c) + D40_CHAN_REG_SDELT);
d40c             1397 drivers/dma/ste_dma40.c 	return num_elt * d40c->dma_cfg.dst_info.data_width;
d40c             1400 drivers/dma/ste_dma40.c static bool d40_tx_is_linked(struct d40_chan *d40c)
d40c             1404 drivers/dma/ste_dma40.c 	if (chan_is_logical(d40c))
d40c             1405 drivers/dma/ste_dma40.c 		is_link = readl(&d40c->lcpa->lcsp3) &  D40_MEM_LCSP3_DLOS_MASK;
d40c             1407 drivers/dma/ste_dma40.c 		is_link = readl(chan_base(d40c) + D40_CHAN_REG_SDLNK)
d40c             1415 drivers/dma/ste_dma40.c 	struct d40_chan *d40c = container_of(chan, struct d40_chan, chan);
d40c             1419 drivers/dma/ste_dma40.c 	if (d40c->phy_chan == NULL) {
d40c             1420 drivers/dma/ste_dma40.c 		chan_err(d40c, "Channel is not allocated!\n");
d40c             1424 drivers/dma/ste_dma40.c 	if (!d40c->busy)
d40c             1427 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             1428 drivers/dma/ste_dma40.c 	pm_runtime_get_sync(d40c->base->dev);
d40c             1430 drivers/dma/ste_dma40.c 	res = d40_channel_execute_command(d40c, D40_DMA_SUSPEND_REQ);
d40c             1432 drivers/dma/ste_dma40.c 	pm_runtime_mark_last_busy(d40c->base->dev);
d40c             1433 drivers/dma/ste_dma40.c 	pm_runtime_put_autosuspend(d40c->base->dev);
d40c             1434 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             1440 drivers/dma/ste_dma40.c 	struct d40_chan *d40c = container_of(chan, struct d40_chan, chan);
d40c             1444 drivers/dma/ste_dma40.c 	if (d40c->phy_chan == NULL) {
d40c             1445 drivers/dma/ste_dma40.c 		chan_err(d40c, "Channel is not allocated!\n");
d40c             1449 drivers/dma/ste_dma40.c 	if (!d40c->busy)
d40c             1452 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             1453 drivers/dma/ste_dma40.c 	pm_runtime_get_sync(d40c->base->dev);
d40c             1456 drivers/dma/ste_dma40.c 	if (d40_residue(d40c) || d40_tx_is_linked(d40c))
d40c             1457 drivers/dma/ste_dma40.c 		res = d40_channel_execute_command(d40c, D40_DMA_RUN);
d40c             1459 drivers/dma/ste_dma40.c 	pm_runtime_mark_last_busy(d40c->base->dev);
d40c             1460 drivers/dma/ste_dma40.c 	pm_runtime_put_autosuspend(d40c->base->dev);
d40c             1461 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             1467 drivers/dma/ste_dma40.c 	struct d40_chan *d40c = container_of(tx->chan,
d40c             1474 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             1476 drivers/dma/ste_dma40.c 	d40_desc_queue(d40c, d40d);
d40c             1477 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             1482 drivers/dma/ste_dma40.c static int d40_start(struct d40_chan *d40c)
d40c             1484 drivers/dma/ste_dma40.c 	return d40_channel_execute_command(d40c, D40_DMA_RUN);
d40c             1487 drivers/dma/ste_dma40.c static struct d40_desc *d40_queue_start(struct d40_chan *d40c)
d40c             1493 drivers/dma/ste_dma40.c 	d40d = d40_first_queued(d40c);
d40c             1496 drivers/dma/ste_dma40.c 		if (!d40c->busy) {
d40c             1497 drivers/dma/ste_dma40.c 			d40c->busy = true;
d40c             1498 drivers/dma/ste_dma40.c 			pm_runtime_get_sync(d40c->base->dev);
d40c             1505 drivers/dma/ste_dma40.c 		d40_desc_submit(d40c, d40d);
d40c             1508 drivers/dma/ste_dma40.c 		d40_desc_load(d40c, d40d);
d40c             1511 drivers/dma/ste_dma40.c 		err = d40_start(d40c);
d40c             1521 drivers/dma/ste_dma40.c static void dma_tc_handle(struct d40_chan *d40c)
d40c             1526 drivers/dma/ste_dma40.c 	d40d = d40_first_active_get(d40c);
d40c             1539 drivers/dma/ste_dma40.c 		    && !d40_tx_is_linked(d40c)
d40c             1540 drivers/dma/ste_dma40.c 		    && !d40_residue(d40c)) {
d40c             1541 drivers/dma/ste_dma40.c 			d40_lcla_free_all(d40c, d40d);
d40c             1542 drivers/dma/ste_dma40.c 			d40_desc_load(d40c, d40d);
d40c             1543 drivers/dma/ste_dma40.c 			(void) d40_start(d40c);
d40c             1549 drivers/dma/ste_dma40.c 		d40_lcla_free_all(d40c, d40d);
d40c             1552 drivers/dma/ste_dma40.c 			d40_desc_load(d40c, d40d);
d40c             1554 drivers/dma/ste_dma40.c 			(void) d40_start(d40c);
d40c             1558 drivers/dma/ste_dma40.c 		if (d40_queue_start(d40c) == NULL) {
d40c             1559 drivers/dma/ste_dma40.c 			d40c->busy = false;
d40c             1561 drivers/dma/ste_dma40.c 			pm_runtime_mark_last_busy(d40c->base->dev);
d40c             1562 drivers/dma/ste_dma40.c 			pm_runtime_put_autosuspend(d40c->base->dev);
d40c             1566 drivers/dma/ste_dma40.c 		d40_desc_done(d40c, d40d);
d40c             1569 drivers/dma/ste_dma40.c 	d40c->pending_tx++;
d40c             1570 drivers/dma/ste_dma40.c 	tasklet_schedule(&d40c->tasklet);
d40c             1576 drivers/dma/ste_dma40.c 	struct d40_chan *d40c = (struct d40_chan *) data;
d40c             1582 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             1585 drivers/dma/ste_dma40.c 	d40d = d40_first_done(d40c);
d40c             1588 drivers/dma/ste_dma40.c 		d40d = d40_first_active_get(d40c);
d40c             1600 drivers/dma/ste_dma40.c 	if (d40c->pending_tx == 0) {
d40c             1601 drivers/dma/ste_dma40.c 		spin_unlock_irqrestore(&d40c->lock, flags);
d40c             1612 drivers/dma/ste_dma40.c 			d40_desc_free(d40c, d40d);
d40c             1615 drivers/dma/ste_dma40.c 			d40_lcla_free_all(d40c, d40d);
d40c             1616 drivers/dma/ste_dma40.c 			list_add_tail(&d40d->node, &d40c->client);
d40c             1621 drivers/dma/ste_dma40.c 	d40c->pending_tx--;
d40c             1623 drivers/dma/ste_dma40.c 	if (d40c->pending_tx)
d40c             1624 drivers/dma/ste_dma40.c 		tasklet_schedule(&d40c->tasklet);
d40c             1626 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             1634 drivers/dma/ste_dma40.c 	if (d40c->pending_tx > 0)
d40c             1635 drivers/dma/ste_dma40.c 		d40c->pending_tx--;
d40c             1636 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             1645 drivers/dma/ste_dma40.c 	struct d40_chan *d40c;
d40c             1671 drivers/dma/ste_dma40.c 			d40c = base->lookup_phy_chans[idx];
d40c             1673 drivers/dma/ste_dma40.c 			d40c = base->lookup_log_chans[il[row].offset + idx];
d40c             1675 drivers/dma/ste_dma40.c 		if (!d40c) {
d40c             1686 drivers/dma/ste_dma40.c 		spin_lock(&d40c->lock);
d40c             1689 drivers/dma/ste_dma40.c 			dma_tc_handle(d40c);
d40c             1694 drivers/dma/ste_dma40.c 		spin_unlock(&d40c->lock);
d40c             1702 drivers/dma/ste_dma40.c static int d40_validate_conf(struct d40_chan *d40c,
d40c             1709 drivers/dma/ste_dma40.c 		chan_err(d40c, "Invalid direction.\n");
d40c             1713 drivers/dma/ste_dma40.c 	if ((is_log && conf->dev_type > d40c->base->num_log_chans)  ||
d40c             1714 drivers/dma/ste_dma40.c 	    (!is_log && conf->dev_type > d40c->base->num_phy_chans) ||
d40c             1716 drivers/dma/ste_dma40.c 		chan_err(d40c, "Invalid device type (%d)\n", conf->dev_type);
d40c             1725 drivers/dma/ste_dma40.c 		chan_err(d40c, "periph to periph not supported\n");
d40c             1738 drivers/dma/ste_dma40.c 		chan_err(d40c, "src (burst x width) != dst (burst x width)\n");
d40c             1832 drivers/dma/ste_dma40.c static int d40_allocate_channel(struct d40_chan *d40c, bool *first_phy_user)
d40c             1834 drivers/dma/ste_dma40.c 	int dev_type = d40c->dma_cfg.dev_type;
d40c             1843 drivers/dma/ste_dma40.c 	bool is_log = d40c->dma_cfg.mode == STEDMA40_MODE_LOGICAL;
d40c             1845 drivers/dma/ste_dma40.c 	phys = d40c->base->phy_res;
d40c             1846 drivers/dma/ste_dma40.c 	num_phy_chans = d40c->base->num_phy_chans;
d40c             1848 drivers/dma/ste_dma40.c 	if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) {
d40c             1851 drivers/dma/ste_dma40.c 	} else if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV ||
d40c             1852 drivers/dma/ste_dma40.c 		   d40c->dma_cfg.dir == DMA_MEM_TO_MEM) {
d40c             1863 drivers/dma/ste_dma40.c 		if (d40c->dma_cfg.dir == DMA_MEM_TO_MEM) {
d40c             1865 drivers/dma/ste_dma40.c 			if (d40c->dma_cfg.use_fixed_channel) {
d40c             1866 drivers/dma/ste_dma40.c 				i = d40c->dma_cfg.phy_channel;
d40c             1880 drivers/dma/ste_dma40.c 			for (j = 0; j < d40c->base->num_phy_chans; j += 8) {
d40c             1893 drivers/dma/ste_dma40.c 		d40c->phy_chan = &phys[i];
d40c             1894 drivers/dma/ste_dma40.c 		d40c->log_num = D40_PHY_CHAN;
d40c             1901 drivers/dma/ste_dma40.c 	for (j = 0; j < d40c->base->num_phy_chans; j += 8) {
d40c             1904 drivers/dma/ste_dma40.c 		if (d40c->dma_cfg.use_fixed_channel) {
d40c             1905 drivers/dma/ste_dma40.c 			i = d40c->dma_cfg.phy_channel;
d40c             1908 drivers/dma/ste_dma40.c 				dev_err(chan2dev(d40c),
d40c             1917 drivers/dma/ste_dma40.c 			dev_err(chan2dev(d40c),
d40c             1946 drivers/dma/ste_dma40.c 	d40c->phy_chan = &phys[i];
d40c             1947 drivers/dma/ste_dma40.c 	d40c->log_num = log_num;
d40c             1951 drivers/dma/ste_dma40.c 		d40c->base->lookup_log_chans[d40c->log_num] = d40c;
d40c             1953 drivers/dma/ste_dma40.c 		d40c->base->lookup_phy_chans[d40c->phy_chan->num] = d40c;
d40c             1959 drivers/dma/ste_dma40.c static int d40_config_memcpy(struct d40_chan *d40c)
d40c             1961 drivers/dma/ste_dma40.c 	dma_cap_mask_t cap = d40c->chan.device->cap_mask;
d40c             1964 drivers/dma/ste_dma40.c 		d40c->dma_cfg = dma40_memcpy_conf_log;
d40c             1965 drivers/dma/ste_dma40.c 		d40c->dma_cfg.dev_type = dma40_memcpy_channels[d40c->chan.chan_id];
d40c             1967 drivers/dma/ste_dma40.c 		d40_log_cfg(&d40c->dma_cfg,
d40c             1968 drivers/dma/ste_dma40.c 			    &d40c->log_def.lcsp1, &d40c->log_def.lcsp3);
d40c             1972 drivers/dma/ste_dma40.c 		d40c->dma_cfg = dma40_memcpy_conf_phy;
d40c             1975 drivers/dma/ste_dma40.c 		d40c->dst_def_cfg |= BIT(D40_SREG_CFG_TIM_POS);
d40c             1978 drivers/dma/ste_dma40.c 		d40c->src_def_cfg |= BIT(D40_SREG_CFG_EIM_POS);
d40c             1979 drivers/dma/ste_dma40.c 		d40c->dst_def_cfg |= BIT(D40_SREG_CFG_EIM_POS);
d40c             1982 drivers/dma/ste_dma40.c 		chan_err(d40c, "No memcpy\n");
d40c             1989 drivers/dma/ste_dma40.c static int d40_free_dma(struct d40_chan *d40c)
d40c             1993 drivers/dma/ste_dma40.c 	u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type);
d40c             1994 drivers/dma/ste_dma40.c 	struct d40_phy_res *phy = d40c->phy_chan;
d40c             1998 drivers/dma/ste_dma40.c 	d40_term_all(d40c);
d40c             2001 drivers/dma/ste_dma40.c 		chan_err(d40c, "phy == null\n");
d40c             2007 drivers/dma/ste_dma40.c 		chan_err(d40c, "channel already free\n");
d40c             2011 drivers/dma/ste_dma40.c 	if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV ||
d40c             2012 drivers/dma/ste_dma40.c 	    d40c->dma_cfg.dir == DMA_MEM_TO_MEM)
d40c             2014 drivers/dma/ste_dma40.c 	else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM)
d40c             2017 drivers/dma/ste_dma40.c 		chan_err(d40c, "Unknown direction\n");
d40c             2021 drivers/dma/ste_dma40.c 	pm_runtime_get_sync(d40c->base->dev);
d40c             2022 drivers/dma/ste_dma40.c 	res = d40_channel_execute_command(d40c, D40_DMA_STOP);
d40c             2024 drivers/dma/ste_dma40.c 		chan_err(d40c, "stop failed\n");
d40c             2028 drivers/dma/ste_dma40.c 	d40_alloc_mask_free(phy, is_src, chan_is_logical(d40c) ? event : 0);
d40c             2030 drivers/dma/ste_dma40.c 	if (chan_is_logical(d40c))
d40c             2031 drivers/dma/ste_dma40.c 		d40c->base->lookup_log_chans[d40c->log_num] = NULL;
d40c             2033 drivers/dma/ste_dma40.c 		d40c->base->lookup_phy_chans[phy->num] = NULL;
d40c             2035 drivers/dma/ste_dma40.c 	if (d40c->busy) {
d40c             2036 drivers/dma/ste_dma40.c 		pm_runtime_mark_last_busy(d40c->base->dev);
d40c             2037 drivers/dma/ste_dma40.c 		pm_runtime_put_autosuspend(d40c->base->dev);
d40c             2040 drivers/dma/ste_dma40.c 	d40c->busy = false;
d40c             2041 drivers/dma/ste_dma40.c 	d40c->phy_chan = NULL;
d40c             2042 drivers/dma/ste_dma40.c 	d40c->configured = false;
d40c             2044 drivers/dma/ste_dma40.c 	pm_runtime_mark_last_busy(d40c->base->dev);
d40c             2045 drivers/dma/ste_dma40.c 	pm_runtime_put_autosuspend(d40c->base->dev);
d40c             2049 drivers/dma/ste_dma40.c static bool d40_is_paused(struct d40_chan *d40c)
d40c             2051 drivers/dma/ste_dma40.c 	void __iomem *chanbase = chan_base(d40c);
d40c             2056 drivers/dma/ste_dma40.c 	u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type);
d40c             2058 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             2060 drivers/dma/ste_dma40.c 	if (chan_is_physical(d40c)) {
d40c             2061 drivers/dma/ste_dma40.c 		if (d40c->phy_chan->num % 2 == 0)
d40c             2062 drivers/dma/ste_dma40.c 			active_reg = d40c->base->virtbase + D40_DREG_ACTIVE;
d40c             2064 drivers/dma/ste_dma40.c 			active_reg = d40c->base->virtbase + D40_DREG_ACTIVO;
d40c             2067 drivers/dma/ste_dma40.c 			  D40_CHAN_POS_MASK(d40c->phy_chan->num)) >>
d40c             2068 drivers/dma/ste_dma40.c 			D40_CHAN_POS(d40c->phy_chan->num);
d40c             2074 drivers/dma/ste_dma40.c 	if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV ||
d40c             2075 drivers/dma/ste_dma40.c 	    d40c->dma_cfg.dir == DMA_MEM_TO_MEM) {
d40c             2077 drivers/dma/ste_dma40.c 	} else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) {
d40c             2080 drivers/dma/ste_dma40.c 		chan_err(d40c, "Unknown direction\n");
d40c             2090 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             2097 drivers/dma/ste_dma40.c 	struct d40_chan *d40c =
d40c             2102 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             2103 drivers/dma/ste_dma40.c 	bytes_left = d40_residue(d40c);
d40c             2104 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             2275 drivers/dma/ste_dma40.c 	struct d40_chan *d40c =
d40c             2280 drivers/dma/ste_dma40.c 		err = d40_validate_conf(d40c, info);
d40c             2282 drivers/dma/ste_dma40.c 			d40c->dma_cfg = *info;
d40c             2284 drivers/dma/ste_dma40.c 		err = d40_config_memcpy(d40c);
d40c             2287 drivers/dma/ste_dma40.c 		d40c->configured = true;
d40c             2293 drivers/dma/ste_dma40.c static void __d40_set_prio_rt(struct d40_chan *d40c, int dev_type, bool src)
d40c             2295 drivers/dma/ste_dma40.c 	bool realtime = d40c->dma_cfg.realtime;
d40c             2296 drivers/dma/ste_dma40.c 	bool highprio = d40c->dma_cfg.high_priority;
d40c             2302 drivers/dma/ste_dma40.c 	struct d40_gen_dmac *dmac = &d40c->base->gen_dmac;
d40c             2313 drivers/dma/ste_dma40.c 	if (!src && chan_is_logical(d40c))
d40c             2322 drivers/dma/ste_dma40.c 	writel(bit, d40c->base->virtbase + prioreg + group * 4);
d40c             2323 drivers/dma/ste_dma40.c 	writel(bit, d40c->base->virtbase + rtreg + group * 4);
d40c             2326 drivers/dma/ste_dma40.c static void d40_set_prio_realtime(struct d40_chan *d40c)
d40c             2328 drivers/dma/ste_dma40.c 	if (d40c->base->rev < 3)
d40c             2331 drivers/dma/ste_dma40.c 	if ((d40c->dma_cfg.dir ==  DMA_DEV_TO_MEM) ||
d40c             2332 drivers/dma/ste_dma40.c 	    (d40c->dma_cfg.dir == DMA_DEV_TO_DEV))
d40c             2333 drivers/dma/ste_dma40.c 		__d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, true);
d40c             2335 drivers/dma/ste_dma40.c 	if ((d40c->dma_cfg.dir ==  DMA_MEM_TO_DEV) ||
d40c             2336 drivers/dma/ste_dma40.c 	    (d40c->dma_cfg.dir == DMA_DEV_TO_DEV))
d40c             2337 drivers/dma/ste_dma40.c 		__d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, false);
d40c             2393 drivers/dma/ste_dma40.c 	struct d40_chan *d40c =
d40c             2396 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             2401 drivers/dma/ste_dma40.c 	if (!d40c->configured) {
d40c             2402 drivers/dma/ste_dma40.c 		err = d40_config_memcpy(d40c);
d40c             2404 drivers/dma/ste_dma40.c 			chan_err(d40c, "Failed to configure memcpy channel\n");
d40c             2409 drivers/dma/ste_dma40.c 	err = d40_allocate_channel(d40c, &is_free_phy);
d40c             2411 drivers/dma/ste_dma40.c 		chan_err(d40c, "Failed to allocate channel\n");
d40c             2412 drivers/dma/ste_dma40.c 		d40c->configured = false;
d40c             2416 drivers/dma/ste_dma40.c 	pm_runtime_get_sync(d40c->base->dev);
d40c             2418 drivers/dma/ste_dma40.c 	d40_set_prio_realtime(d40c);
d40c             2420 drivers/dma/ste_dma40.c 	if (chan_is_logical(d40c)) {
d40c             2421 drivers/dma/ste_dma40.c 		if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM)
d40c             2422 drivers/dma/ste_dma40.c 			d40c->lcpa = d40c->base->lcpa_base +
d40c             2423 drivers/dma/ste_dma40.c 				d40c->dma_cfg.dev_type * D40_LCPA_CHAN_SIZE;
d40c             2425 drivers/dma/ste_dma40.c 			d40c->lcpa = d40c->base->lcpa_base +
d40c             2426 drivers/dma/ste_dma40.c 				d40c->dma_cfg.dev_type *
d40c             2430 drivers/dma/ste_dma40.c 		d40c->src_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS);
d40c             2431 drivers/dma/ste_dma40.c 		d40c->dst_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS);
d40c             2434 drivers/dma/ste_dma40.c 	dev_dbg(chan2dev(d40c), "allocated %s channel (phy %d%s)\n",
d40c             2435 drivers/dma/ste_dma40.c 		 chan_is_logical(d40c) ? "logical" : "physical",
d40c             2436 drivers/dma/ste_dma40.c 		 d40c->phy_chan->num,
d40c             2437 drivers/dma/ste_dma40.c 		 d40c->dma_cfg.use_fixed_channel ? ", fixed" : "");
d40c             2446 drivers/dma/ste_dma40.c 		d40_config_write(d40c);
d40c             2448 drivers/dma/ste_dma40.c 	pm_runtime_mark_last_busy(d40c->base->dev);
d40c             2449 drivers/dma/ste_dma40.c 	pm_runtime_put_autosuspend(d40c->base->dev);
d40c             2450 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             2456 drivers/dma/ste_dma40.c 	struct d40_chan *d40c =
d40c             2461 drivers/dma/ste_dma40.c 	if (d40c->phy_chan == NULL) {
d40c             2462 drivers/dma/ste_dma40.c 		chan_err(d40c, "Cannot free unallocated channel\n");
d40c             2466 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             2468 drivers/dma/ste_dma40.c 	err = d40_free_dma(d40c);
d40c             2471 drivers/dma/ste_dma40.c 		chan_err(d40c, "Failed to free channel\n");
d40c             2472 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             2542 drivers/dma/ste_dma40.c 	struct d40_chan *d40c = container_of(chan, struct d40_chan, chan);
d40c             2545 drivers/dma/ste_dma40.c 	if (d40c->phy_chan == NULL) {
d40c             2546 drivers/dma/ste_dma40.c 		chan_err(d40c, "Cannot read status of unallocated channel\n");
d40c             2554 drivers/dma/ste_dma40.c 	if (d40_is_paused(d40c))
d40c             2562 drivers/dma/ste_dma40.c 	struct d40_chan *d40c = container_of(chan, struct d40_chan, chan);
d40c             2565 drivers/dma/ste_dma40.c 	if (d40c->phy_chan == NULL) {
d40c             2566 drivers/dma/ste_dma40.c 		chan_err(d40c, "Channel is not allocated!\n");
d40c             2570 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             2572 drivers/dma/ste_dma40.c 	list_splice_tail_init(&d40c->pending_queue, &d40c->queue);
d40c             2575 drivers/dma/ste_dma40.c 	if (!d40c->busy)
d40c             2576 drivers/dma/ste_dma40.c 		(void) d40_queue_start(d40c);
d40c             2578 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             2584 drivers/dma/ste_dma40.c 	struct d40_chan *d40c = container_of(chan, struct d40_chan, chan);
d40c             2587 drivers/dma/ste_dma40.c 	if (d40c->phy_chan == NULL) {
d40c             2588 drivers/dma/ste_dma40.c 		chan_err(d40c, "Channel is not allocated!\n");
d40c             2592 drivers/dma/ste_dma40.c 	spin_lock_irqsave(&d40c->lock, flags);
d40c             2594 drivers/dma/ste_dma40.c 	pm_runtime_get_sync(d40c->base->dev);
d40c             2595 drivers/dma/ste_dma40.c 	ret = d40_channel_execute_command(d40c, D40_DMA_STOP);
d40c             2597 drivers/dma/ste_dma40.c 		chan_err(d40c, "Failed to stop channel\n");
d40c             2599 drivers/dma/ste_dma40.c 	d40_term_all(d40c);
d40c             2600 drivers/dma/ste_dma40.c 	pm_runtime_mark_last_busy(d40c->base->dev);
d40c             2601 drivers/dma/ste_dma40.c 	pm_runtime_put_autosuspend(d40c->base->dev);
d40c             2602 drivers/dma/ste_dma40.c 	if (d40c->busy) {
d40c             2603 drivers/dma/ste_dma40.c 		pm_runtime_mark_last_busy(d40c->base->dev);
d40c             2604 drivers/dma/ste_dma40.c 		pm_runtime_put_autosuspend(d40c->base->dev);
d40c             2606 drivers/dma/ste_dma40.c 	d40c->busy = false;
d40c             2608 drivers/dma/ste_dma40.c 	spin_unlock_irqrestore(&d40c->lock, flags);
d40c             2613 drivers/dma/ste_dma40.c dma40_config_to_halfchannel(struct d40_chan *d40c,
d40c             2619 drivers/dma/ste_dma40.c 	if (chan_is_logical(d40c)) {
d40c             2648 drivers/dma/ste_dma40.c 	struct d40_chan *d40c = container_of(chan, struct d40_chan, chan);
d40c             2650 drivers/dma/ste_dma40.c 	memcpy(&d40c->slave_config, config, sizeof(*config));
d40c             2660 drivers/dma/ste_dma40.c 	struct d40_chan *d40c = container_of(chan, struct d40_chan, chan);
d40c             2661 drivers/dma/ste_dma40.c 	struct stedma40_chan_cfg *cfg = &d40c->dma_cfg;
d40c             2667 drivers/dma/ste_dma40.c 	if (d40c->phy_chan == NULL) {
d40c             2668 drivers/dma/ste_dma40.c 		chan_err(d40c, "Channel is not allocated!\n");
d40c             2681 drivers/dma/ste_dma40.c 			dev_dbg(d40c->base->dev,
d40c             2697 drivers/dma/ste_dma40.c 			dev_dbg(d40c->base->dev,
d40c             2709 drivers/dma/ste_dma40.c 		dev_err(d40c->base->dev,
d40c             2716 drivers/dma/ste_dma40.c 		dev_err(d40c->base->dev, "no address supplied\n");
d40c             2721 drivers/dma/ste_dma40.c 		dev_err(d40c->base->dev,
d40c             2750 drivers/dma/ste_dma40.c 	ret = dma40_config_to_halfchannel(d40c, &cfg->src_info,
d40c             2755 drivers/dma/ste_dma40.c 	ret = dma40_config_to_halfchannel(d40c, &cfg->dst_info,
d40c             2761 drivers/dma/ste_dma40.c 	if (chan_is_logical(d40c))
d40c             2762 drivers/dma/ste_dma40.c 		d40_log_cfg(cfg, &d40c->log_def.lcsp1, &d40c->log_def.lcsp3);
d40c             2764 drivers/dma/ste_dma40.c 		d40_phy_cfg(cfg, &d40c->src_def_cfg, &d40c->dst_def_cfg);
d40c             2767 drivers/dma/ste_dma40.c 	d40c->runtime_addr = config_addr;
d40c             2768 drivers/dma/ste_dma40.c 	d40c->runtime_direction = direction;
d40c             2769 drivers/dma/ste_dma40.c 	dev_dbg(d40c->base->dev,
d40c             2787 drivers/dma/ste_dma40.c 	struct d40_chan *d40c;
d40c             2792 drivers/dma/ste_dma40.c 		d40c = &chans[i];
d40c             2793 drivers/dma/ste_dma40.c 		d40c->base = base;
d40c             2794 drivers/dma/ste_dma40.c 		d40c->chan.device = dma;
d40c             2796 drivers/dma/ste_dma40.c 		spin_lock_init(&d40c->lock);
d40c             2798 drivers/dma/ste_dma40.c 		d40c->log_num = D40_PHY_CHAN;
d40c             2800 drivers/dma/ste_dma40.c 		INIT_LIST_HEAD(&d40c->done);
d40c             2801 drivers/dma/ste_dma40.c 		INIT_LIST_HEAD(&d40c->active);
d40c             2802 drivers/dma/ste_dma40.c 		INIT_LIST_HEAD(&d40c->queue);
d40c             2803 drivers/dma/ste_dma40.c 		INIT_LIST_HEAD(&d40c->pending_queue);
d40c             2804 drivers/dma/ste_dma40.c 		INIT_LIST_HEAD(&d40c->client);
d40c             2805 drivers/dma/ste_dma40.c 		INIT_LIST_HEAD(&d40c->prepare_queue);
d40c             2807 drivers/dma/ste_dma40.c 		tasklet_init(&d40c->tasklet, dma_tasklet,
d40c             2808 drivers/dma/ste_dma40.c 			     (unsigned long) d40c);
d40c             2810 drivers/dma/ste_dma40.c 		list_add_tail(&d40c->chan.device_node,