Lines Matching refs:d40c

599 static struct device *chan2dev(struct d40_chan *d40c)  in chan2dev()  argument
601 return &d40c->chan.dev->device; in chan2dev()
623 #define chan_err(d40c, format, arg...) \ argument
624 d40_err(chan2dev(d40c), format, ## arg)
626 static int d40_pool_lli_alloc(struct d40_chan *d40c, struct d40_desc *d40d, in d40_pool_lli_alloc() argument
629 bool is_log = chan_is_logical(d40c); in d40_pool_lli_alloc()
661 d40d->lli_pool.dma_addr = dma_map_single(d40c->base->dev, in d40_pool_lli_alloc()
666 if (dma_mapping_error(d40c->base->dev, in d40_pool_lli_alloc()
678 static void d40_pool_lli_free(struct d40_chan *d40c, struct d40_desc *d40d) in d40_pool_lli_free() argument
681 dma_unmap_single(d40c->base->dev, d40d->lli_pool.dma_addr, in d40_pool_lli_free()
693 static int d40_lcla_alloc_one(struct d40_chan *d40c, in d40_lcla_alloc_one() argument
700 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
707 int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i; in d40_lcla_alloc_one()
709 if (!d40c->base->lcla_pool.alloc_map[idx]) { in d40_lcla_alloc_one()
710 d40c->base->lcla_pool.alloc_map[idx] = d40d; in d40_lcla_alloc_one()
717 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
722 static int d40_lcla_free_all(struct d40_chan *d40c, in d40_lcla_free_all() argument
729 if (chan_is_physical(d40c)) in d40_lcla_free_all()
732 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
735 int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i; in d40_lcla_free_all()
737 if (d40c->base->lcla_pool.alloc_map[idx] == d40d) { in d40_lcla_free_all()
738 d40c->base->lcla_pool.alloc_map[idx] = NULL; in d40_lcla_free_all()
747 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
758 static struct d40_desc *d40_desc_get(struct d40_chan *d40c) in d40_desc_get() argument
762 if (!list_empty(&d40c->client)) { in d40_desc_get()
766 list_for_each_entry_safe(d, _d, &d40c->client, node) { in d40_desc_get()
777 desc = kmem_cache_zalloc(d40c->base->desc_slab, GFP_NOWAIT); in d40_desc_get()
785 static void d40_desc_free(struct d40_chan *d40c, struct d40_desc *d40d) in d40_desc_free() argument
788 d40_pool_lli_free(d40c, d40d); in d40_desc_free()
789 d40_lcla_free_all(d40c, d40d); in d40_desc_free()
790 kmem_cache_free(d40c->base->desc_slab, d40d); in d40_desc_free()
793 static void d40_desc_submit(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_submit() argument
795 list_add_tail(&desc->node, &d40c->active); in d40_desc_submit()
815 static void d40_desc_done(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_done() argument
817 list_add_tail(&desc->node, &d40c->done); in d40_desc_done()
933 static void d40_desc_load(struct d40_chan *d40c, struct d40_desc *d40d) in d40_desc_load() argument
935 if (chan_is_physical(d40c)) { in d40_desc_load()
936 d40_phy_lli_load(d40c, d40d); in d40_desc_load()
939 d40_log_lli_to_lcxa(d40c, d40d); in d40_desc_load()
942 static struct d40_desc *d40_first_active_get(struct d40_chan *d40c) in d40_first_active_get() argument
946 if (list_empty(&d40c->active)) in d40_first_active_get()
949 d = list_first_entry(&d40c->active, in d40_first_active_get()
956 static void d40_desc_queue(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_queue() argument
960 list_add_tail(&desc->node, &d40c->pending_queue); in d40_desc_queue()
963 static struct d40_desc *d40_first_pending(struct d40_chan *d40c) in d40_first_pending() argument
967 if (list_empty(&d40c->pending_queue)) in d40_first_pending()
970 d = list_first_entry(&d40c->pending_queue, in d40_first_pending()
976 static struct d40_desc *d40_first_queued(struct d40_chan *d40c) in d40_first_queued() argument
980 if (list_empty(&d40c->queue)) in d40_first_queued()
983 d = list_first_entry(&d40c->queue, in d40_first_queued()
989 static struct d40_desc *d40_first_done(struct d40_chan *d40c) in d40_first_done() argument
991 if (list_empty(&d40c->done)) in d40_first_done()
994 return list_first_entry(&d40c->done, struct d40_desc, node); in d40_first_done()
1057 static int __d40_execute_command_phy(struct d40_chan *d40c, in __d40_execute_command_phy() argument
1068 ret = __d40_execute_command_phy(d40c, D40_DMA_SUSPEND_REQ); in __d40_execute_command_phy()
1073 spin_lock_irqsave(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1075 if (d40c->phy_chan->num % 2 == 0) in __d40_execute_command_phy()
1076 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_phy()
1078 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_phy()
1082 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_phy()
1083 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_phy()
1089 wmask = 0xffffffff & ~(D40_CHAN_POS_MASK(d40c->phy_chan->num)); in __d40_execute_command_phy()
1090 writel(wmask | (command << D40_CHAN_POS(d40c->phy_chan->num)), in __d40_execute_command_phy()
1097 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_phy()
1098 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_phy()
1113 chan_err(d40c, in __d40_execute_command_phy()
1115 d40c->phy_chan->num, d40c->log_num, in __d40_execute_command_phy()
1123 spin_unlock_irqrestore(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1127 static void d40_term_all(struct d40_chan *d40c) in d40_term_all() argument
1133 while ((d40d = d40_first_done(d40c))) { in d40_term_all()
1135 d40_desc_free(d40c, d40d); in d40_term_all()
1139 while ((d40d = d40_first_active_get(d40c))) { in d40_term_all()
1141 d40_desc_free(d40c, d40d); in d40_term_all()
1145 while ((d40d = d40_first_queued(d40c))) { in d40_term_all()
1147 d40_desc_free(d40c, d40d); in d40_term_all()
1151 while ((d40d = d40_first_pending(d40c))) { in d40_term_all()
1153 d40_desc_free(d40c, d40d); in d40_term_all()
1157 if (!list_empty(&d40c->client)) in d40_term_all()
1158 list_for_each_entry_safe(d40d, _d, &d40c->client, node) { in d40_term_all()
1160 d40_desc_free(d40c, d40d); in d40_term_all()
1164 if (!list_empty(&d40c->prepare_queue)) in d40_term_all()
1166 &d40c->prepare_queue, node) { in d40_term_all()
1168 d40_desc_free(d40c, d40d); in d40_term_all()
1171 d40c->pending_tx = 0; in d40_term_all()
1174 static void __d40_config_set_event(struct d40_chan *d40c, in __d40_config_set_event() argument
1178 void __iomem *addr = chan_base(d40c) + reg; in __d40_config_set_event()
1218 chan_err(d40c, in __d40_config_set_event()
1220 "status %x\n", d40c->phy_chan->num, in __d40_config_set_event()
1221 d40c->log_num, status); in __d40_config_set_event()
1242 dev_dbg(chan2dev(d40c), in __d40_config_set_event()
1257 static void d40_config_set_event(struct d40_chan *d40c, in d40_config_set_event() argument
1260 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_config_set_event()
1263 if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) || in d40_config_set_event()
1264 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_config_set_event()
1265 __d40_config_set_event(d40c, event_type, event, in d40_config_set_event()
1268 if (d40c->dma_cfg.dir != DMA_DEV_TO_MEM) in d40_config_set_event()
1269 __d40_config_set_event(d40c, event_type, event, in d40_config_set_event()
1273 static u32 d40_chan_has_events(struct d40_chan *d40c) in d40_chan_has_events() argument
1275 void __iomem *chanbase = chan_base(d40c); in d40_chan_has_events()
1285 __d40_execute_command_log(struct d40_chan *d40c, enum d40_command command) in __d40_execute_command_log() argument
1292 if (d40c->phy_chan->num % 2 == 0) in __d40_execute_command_log()
1293 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_log()
1295 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_log()
1298 spin_lock_irqsave(&d40c->phy_chan->lock, flags); in __d40_execute_command_log()
1305 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_log()
1306 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_log()
1309 d40_config_set_event(d40c, D40_SUSPEND_REQ_EVENTLINE); in __d40_execute_command_log()
1311 d40_config_set_event(d40c, D40_DEACTIVATE_EVENTLINE); in __d40_execute_command_log()
1313 if (!d40_chan_has_events(d40c) && (command == D40_DMA_STOP)) in __d40_execute_command_log()
1314 ret = __d40_execute_command_phy(d40c, command); in __d40_execute_command_log()
1320 d40_config_set_event(d40c, D40_ACTIVATE_EVENTLINE); in __d40_execute_command_log()
1321 ret = __d40_execute_command_phy(d40c, command); in __d40_execute_command_log()
1329 spin_unlock_irqrestore(&d40c->phy_chan->lock, flags); in __d40_execute_command_log()
1333 static int d40_channel_execute_command(struct d40_chan *d40c, in d40_channel_execute_command() argument
1336 if (chan_is_logical(d40c)) in d40_channel_execute_command()
1337 return __d40_execute_command_log(d40c, command); in d40_channel_execute_command()
1339 return __d40_execute_command_phy(d40c, command); in d40_channel_execute_command()
1342 static u32 d40_get_prmo(struct d40_chan *d40c) in d40_get_prmo() argument
1361 if (chan_is_physical(d40c)) in d40_get_prmo()
1362 return phy_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1364 return log_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1367 static void d40_config_write(struct d40_chan *d40c) in d40_config_write() argument
1373 addr_base = (d40c->phy_chan->num % 2) * 4; in d40_config_write()
1375 var = ((u32)(chan_is_logical(d40c)) + 1) << in d40_config_write()
1376 D40_CHAN_POS(d40c->phy_chan->num); in d40_config_write()
1377 writel(var, d40c->base->virtbase + D40_DREG_PRMSE + addr_base); in d40_config_write()
1380 var = d40_get_prmo(d40c) << D40_CHAN_POS(d40c->phy_chan->num); in d40_config_write()
1382 writel(var, d40c->base->virtbase + D40_DREG_PRMOE + addr_base); in d40_config_write()
1384 if (chan_is_logical(d40c)) { in d40_config_write()
1385 int lidx = (d40c->phy_chan->num << D40_SREG_ELEM_LOG_LIDX_POS) in d40_config_write()
1387 void __iomem *chanbase = chan_base(d40c); in d40_config_write()
1390 writel(d40c->src_def_cfg, chanbase + D40_CHAN_REG_SSCFG); in d40_config_write()
1391 writel(d40c->dst_def_cfg, chanbase + D40_CHAN_REG_SDCFG); in d40_config_write()
1403 static u32 d40_residue(struct d40_chan *d40c) in d40_residue() argument
1407 if (chan_is_logical(d40c)) in d40_residue()
1408 num_elt = (readl(&d40c->lcpa->lcsp2) & D40_MEM_LCSP2_ECNT_MASK) in d40_residue()
1411 u32 val = readl(chan_base(d40c) + D40_CHAN_REG_SDELT); in d40_residue()
1416 return num_elt * d40c->dma_cfg.dst_info.data_width; in d40_residue()
1419 static bool d40_tx_is_linked(struct d40_chan *d40c) in d40_tx_is_linked() argument
1423 if (chan_is_logical(d40c)) in d40_tx_is_linked()
1424 is_link = readl(&d40c->lcpa->lcsp3) & D40_MEM_LCSP3_DLOS_MASK; in d40_tx_is_linked()
1426 is_link = readl(chan_base(d40c) + D40_CHAN_REG_SDLNK) in d40_tx_is_linked()
1434 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_pause() local
1438 if (d40c->phy_chan == NULL) { in d40_pause()
1439 chan_err(d40c, "Channel is not allocated!\n"); in d40_pause()
1443 if (!d40c->busy) in d40_pause()
1446 spin_lock_irqsave(&d40c->lock, flags); in d40_pause()
1447 pm_runtime_get_sync(d40c->base->dev); in d40_pause()
1449 res = d40_channel_execute_command(d40c, D40_DMA_SUSPEND_REQ); in d40_pause()
1451 pm_runtime_mark_last_busy(d40c->base->dev); in d40_pause()
1452 pm_runtime_put_autosuspend(d40c->base->dev); in d40_pause()
1453 spin_unlock_irqrestore(&d40c->lock, flags); in d40_pause()
1459 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_resume() local
1463 if (d40c->phy_chan == NULL) { in d40_resume()
1464 chan_err(d40c, "Channel is not allocated!\n"); in d40_resume()
1468 if (!d40c->busy) in d40_resume()
1471 spin_lock_irqsave(&d40c->lock, flags); in d40_resume()
1472 pm_runtime_get_sync(d40c->base->dev); in d40_resume()
1475 if (d40_residue(d40c) || d40_tx_is_linked(d40c)) in d40_resume()
1476 res = d40_channel_execute_command(d40c, D40_DMA_RUN); in d40_resume()
1478 pm_runtime_mark_last_busy(d40c->base->dev); in d40_resume()
1479 pm_runtime_put_autosuspend(d40c->base->dev); in d40_resume()
1480 spin_unlock_irqrestore(&d40c->lock, flags); in d40_resume()
1486 struct d40_chan *d40c = container_of(tx->chan, in d40_tx_submit() local
1493 spin_lock_irqsave(&d40c->lock, flags); in d40_tx_submit()
1495 d40_desc_queue(d40c, d40d); in d40_tx_submit()
1496 spin_unlock_irqrestore(&d40c->lock, flags); in d40_tx_submit()
1501 static int d40_start(struct d40_chan *d40c) in d40_start() argument
1503 return d40_channel_execute_command(d40c, D40_DMA_RUN); in d40_start()
1506 static struct d40_desc *d40_queue_start(struct d40_chan *d40c) in d40_queue_start() argument
1512 d40d = d40_first_queued(d40c); in d40_queue_start()
1515 if (!d40c->busy) { in d40_queue_start()
1516 d40c->busy = true; in d40_queue_start()
1517 pm_runtime_get_sync(d40c->base->dev); in d40_queue_start()
1524 d40_desc_submit(d40c, d40d); in d40_queue_start()
1527 d40_desc_load(d40c, d40d); in d40_queue_start()
1530 err = d40_start(d40c); in d40_queue_start()
1540 static void dma_tc_handle(struct d40_chan *d40c) in dma_tc_handle() argument
1545 d40d = d40_first_active_get(d40c); in dma_tc_handle()
1558 && !d40_tx_is_linked(d40c) in dma_tc_handle()
1559 && !d40_residue(d40c)) { in dma_tc_handle()
1560 d40_lcla_free_all(d40c, d40d); in dma_tc_handle()
1561 d40_desc_load(d40c, d40d); in dma_tc_handle()
1562 (void) d40_start(d40c); in dma_tc_handle()
1568 d40_lcla_free_all(d40c, d40d); in dma_tc_handle()
1571 d40_desc_load(d40c, d40d); in dma_tc_handle()
1573 (void) d40_start(d40c); in dma_tc_handle()
1577 if (d40_queue_start(d40c) == NULL) { in dma_tc_handle()
1578 d40c->busy = false; in dma_tc_handle()
1580 pm_runtime_mark_last_busy(d40c->base->dev); in dma_tc_handle()
1581 pm_runtime_put_autosuspend(d40c->base->dev); in dma_tc_handle()
1585 d40_desc_done(d40c, d40d); in dma_tc_handle()
1588 d40c->pending_tx++; in dma_tc_handle()
1589 tasklet_schedule(&d40c->tasklet); in dma_tc_handle()
1595 struct d40_chan *d40c = (struct d40_chan *) data; in dma_tasklet() local
1602 spin_lock_irqsave(&d40c->lock, flags); in dma_tasklet()
1605 d40d = d40_first_done(d40c); in dma_tasklet()
1608 d40d = d40_first_active_get(d40c); in dma_tasklet()
1620 if (d40c->pending_tx == 0) { in dma_tasklet()
1621 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1633 d40_desc_free(d40c, d40d); in dma_tasklet()
1636 d40_lcla_free_all(d40c, d40d); in dma_tasklet()
1637 list_add_tail(&d40d->node, &d40c->client); in dma_tasklet()
1642 d40c->pending_tx--; in dma_tasklet()
1644 if (d40c->pending_tx) in dma_tasklet()
1645 tasklet_schedule(&d40c->tasklet); in dma_tasklet()
1647 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1656 if (d40c->pending_tx > 0) in dma_tasklet()
1657 d40c->pending_tx--; in dma_tasklet()
1658 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1667 struct d40_chan *d40c; in d40_handle_interrupt() local
1693 d40c = base->lookup_phy_chans[idx]; in d40_handle_interrupt()
1695 d40c = base->lookup_log_chans[il[row].offset + idx]; in d40_handle_interrupt()
1697 if (!d40c) { in d40_handle_interrupt()
1708 spin_lock(&d40c->lock); in d40_handle_interrupt()
1711 dma_tc_handle(d40c); in d40_handle_interrupt()
1716 spin_unlock(&d40c->lock); in d40_handle_interrupt()
1724 static int d40_validate_conf(struct d40_chan *d40c, in d40_validate_conf() argument
1731 chan_err(d40c, "Invalid direction.\n"); in d40_validate_conf()
1735 if ((is_log && conf->dev_type > d40c->base->num_log_chans) || in d40_validate_conf()
1736 (!is_log && conf->dev_type > d40c->base->num_phy_chans) || in d40_validate_conf()
1738 chan_err(d40c, "Invalid device type (%d)\n", conf->dev_type); in d40_validate_conf()
1747 chan_err(d40c, "periph to periph not supported\n"); in d40_validate_conf()
1760 chan_err(d40c, "src (burst x width) != dst (burst x width)\n"); in d40_validate_conf()
1857 static int d40_allocate_channel(struct d40_chan *d40c, bool *first_phy_user) in d40_allocate_channel() argument
1859 int dev_type = d40c->dma_cfg.dev_type; in d40_allocate_channel()
1868 bool is_log = d40c->dma_cfg.mode == STEDMA40_MODE_LOGICAL; in d40_allocate_channel()
1870 phys = d40c->base->phy_res; in d40_allocate_channel()
1871 num_phy_chans = d40c->base->num_phy_chans; in d40_allocate_channel()
1873 if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) { in d40_allocate_channel()
1876 } else if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_allocate_channel()
1877 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_allocate_channel()
1888 if (d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_allocate_channel()
1890 if (d40c->dma_cfg.use_fixed_channel) { in d40_allocate_channel()
1891 i = d40c->dma_cfg.phy_channel; in d40_allocate_channel()
1905 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1918 d40c->phy_chan = &phys[i]; in d40_allocate_channel()
1919 d40c->log_num = D40_PHY_CHAN; in d40_allocate_channel()
1926 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1929 if (d40c->dma_cfg.use_fixed_channel) { in d40_allocate_channel()
1930 i = d40c->dma_cfg.phy_channel; in d40_allocate_channel()
1933 dev_err(chan2dev(d40c), in d40_allocate_channel()
1942 dev_err(chan2dev(d40c), in d40_allocate_channel()
1971 d40c->phy_chan = &phys[i]; in d40_allocate_channel()
1972 d40c->log_num = log_num; in d40_allocate_channel()
1976 d40c->base->lookup_log_chans[d40c->log_num] = d40c; in d40_allocate_channel()
1978 d40c->base->lookup_phy_chans[d40c->phy_chan->num] = d40c; in d40_allocate_channel()
1984 static int d40_config_memcpy(struct d40_chan *d40c) in d40_config_memcpy() argument
1986 dma_cap_mask_t cap = d40c->chan.device->cap_mask; in d40_config_memcpy()
1989 d40c->dma_cfg = dma40_memcpy_conf_log; in d40_config_memcpy()
1990 d40c->dma_cfg.dev_type = dma40_memcpy_channels[d40c->chan.chan_id]; in d40_config_memcpy()
1992 d40_log_cfg(&d40c->dma_cfg, in d40_config_memcpy()
1993 &d40c->log_def.lcsp1, &d40c->log_def.lcsp3); in d40_config_memcpy()
1997 d40c->dma_cfg = dma40_memcpy_conf_phy; in d40_config_memcpy()
2000 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_TIM_POS); in d40_config_memcpy()
2003 d40c->src_def_cfg |= BIT(D40_SREG_CFG_EIM_POS); in d40_config_memcpy()
2004 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_EIM_POS); in d40_config_memcpy()
2007 chan_err(d40c, "No memcpy\n"); in d40_config_memcpy()
2014 static int d40_free_dma(struct d40_chan *d40c) in d40_free_dma() argument
2018 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_free_dma()
2019 struct d40_phy_res *phy = d40c->phy_chan; in d40_free_dma()
2023 d40_term_all(d40c); in d40_free_dma()
2026 chan_err(d40c, "phy == null\n"); in d40_free_dma()
2032 chan_err(d40c, "channel already free\n"); in d40_free_dma()
2036 if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_free_dma()
2037 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) in d40_free_dma()
2039 else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) in d40_free_dma()
2042 chan_err(d40c, "Unknown direction\n"); in d40_free_dma()
2046 pm_runtime_get_sync(d40c->base->dev); in d40_free_dma()
2047 res = d40_channel_execute_command(d40c, D40_DMA_STOP); in d40_free_dma()
2049 chan_err(d40c, "stop failed\n"); in d40_free_dma()
2053 d40_alloc_mask_free(phy, is_src, chan_is_logical(d40c) ? event : 0); in d40_free_dma()
2055 if (chan_is_logical(d40c)) in d40_free_dma()
2056 d40c->base->lookup_log_chans[d40c->log_num] = NULL; in d40_free_dma()
2058 d40c->base->lookup_phy_chans[phy->num] = NULL; in d40_free_dma()
2060 if (d40c->busy) { in d40_free_dma()
2061 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2062 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2065 d40c->busy = false; in d40_free_dma()
2066 d40c->phy_chan = NULL; in d40_free_dma()
2067 d40c->configured = false; in d40_free_dma()
2070 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2071 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2075 static bool d40_is_paused(struct d40_chan *d40c) in d40_is_paused() argument
2077 void __iomem *chanbase = chan_base(d40c); in d40_is_paused()
2082 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_is_paused()
2084 spin_lock_irqsave(&d40c->lock, flags); in d40_is_paused()
2086 if (chan_is_physical(d40c)) { in d40_is_paused()
2087 if (d40c->phy_chan->num % 2 == 0) in d40_is_paused()
2088 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in d40_is_paused()
2090 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in d40_is_paused()
2093 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in d40_is_paused()
2094 D40_CHAN_POS(d40c->phy_chan->num); in d40_is_paused()
2101 if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_is_paused()
2102 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_is_paused()
2104 } else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) { in d40_is_paused()
2107 chan_err(d40c, "Unknown direction\n"); in d40_is_paused()
2117 spin_unlock_irqrestore(&d40c->lock, flags); in d40_is_paused()
2124 struct d40_chan *d40c = in stedma40_residue() local
2129 spin_lock_irqsave(&d40c->lock, flags); in stedma40_residue()
2130 bytes_left = d40_residue(d40c); in stedma40_residue()
2131 spin_unlock_irqrestore(&d40c->lock, flags); in stedma40_residue()
2299 struct d40_chan *d40c = in stedma40_filter() local
2304 err = d40_validate_conf(d40c, info); in stedma40_filter()
2306 d40c->dma_cfg = *info; in stedma40_filter()
2308 err = d40_config_memcpy(d40c); in stedma40_filter()
2311 d40c->configured = true; in stedma40_filter()
2317 static void __d40_set_prio_rt(struct d40_chan *d40c, int dev_type, bool src) in __d40_set_prio_rt() argument
2319 bool realtime = d40c->dma_cfg.realtime; in __d40_set_prio_rt()
2320 bool highprio = d40c->dma_cfg.high_priority; in __d40_set_prio_rt()
2326 struct d40_gen_dmac *dmac = &d40c->base->gen_dmac; in __d40_set_prio_rt()
2337 if (!src && chan_is_logical(d40c)) in __d40_set_prio_rt()
2346 writel(bit, d40c->base->virtbase + prioreg + group * 4); in __d40_set_prio_rt()
2347 writel(bit, d40c->base->virtbase + rtreg + group * 4); in __d40_set_prio_rt()
2350 static void d40_set_prio_realtime(struct d40_chan *d40c) in d40_set_prio_realtime() argument
2352 if (d40c->base->rev < 3) in d40_set_prio_realtime()
2355 if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) || in d40_set_prio_realtime()
2356 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_set_prio_realtime()
2357 __d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, true); in d40_set_prio_realtime()
2359 if ((d40c->dma_cfg.dir == DMA_MEM_TO_DEV) || in d40_set_prio_realtime()
2360 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_set_prio_realtime()
2361 __d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, false); in d40_set_prio_realtime()
2417 struct d40_chan *d40c = in d40_alloc_chan_resources() local
2420 spin_lock_irqsave(&d40c->lock, flags); in d40_alloc_chan_resources()
2425 if (!d40c->configured) { in d40_alloc_chan_resources()
2426 err = d40_config_memcpy(d40c); in d40_alloc_chan_resources()
2428 chan_err(d40c, "Failed to configure memcpy channel\n"); in d40_alloc_chan_resources()
2433 err = d40_allocate_channel(d40c, &is_free_phy); in d40_alloc_chan_resources()
2435 chan_err(d40c, "Failed to allocate channel\n"); in d40_alloc_chan_resources()
2436 d40c->configured = false; in d40_alloc_chan_resources()
2440 pm_runtime_get_sync(d40c->base->dev); in d40_alloc_chan_resources()
2442 d40_set_prio_realtime(d40c); in d40_alloc_chan_resources()
2444 if (chan_is_logical(d40c)) { in d40_alloc_chan_resources()
2445 if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) in d40_alloc_chan_resources()
2446 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2447 d40c->dma_cfg.dev_type * D40_LCPA_CHAN_SIZE; in d40_alloc_chan_resources()
2449 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2450 d40c->dma_cfg.dev_type * in d40_alloc_chan_resources()
2454 d40c->src_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS); in d40_alloc_chan_resources()
2455 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS); in d40_alloc_chan_resources()
2458 dev_dbg(chan2dev(d40c), "allocated %s channel (phy %d%s)\n", in d40_alloc_chan_resources()
2459 chan_is_logical(d40c) ? "logical" : "physical", in d40_alloc_chan_resources()
2460 d40c->phy_chan->num, in d40_alloc_chan_resources()
2461 d40c->dma_cfg.use_fixed_channel ? ", fixed" : ""); in d40_alloc_chan_resources()
2470 d40_config_write(d40c); in d40_alloc_chan_resources()
2472 pm_runtime_mark_last_busy(d40c->base->dev); in d40_alloc_chan_resources()
2473 pm_runtime_put_autosuspend(d40c->base->dev); in d40_alloc_chan_resources()
2474 spin_unlock_irqrestore(&d40c->lock, flags); in d40_alloc_chan_resources()
2480 struct d40_chan *d40c = in d40_free_chan_resources() local
2485 if (d40c->phy_chan == NULL) { in d40_free_chan_resources()
2486 chan_err(d40c, "Cannot free unallocated channel\n"); in d40_free_chan_resources()
2490 spin_lock_irqsave(&d40c->lock, flags); in d40_free_chan_resources()
2492 err = d40_free_dma(d40c); in d40_free_chan_resources()
2495 chan_err(d40c, "Failed to free channel\n"); in d40_free_chan_resources()
2496 spin_unlock_irqrestore(&d40c->lock, flags); in d40_free_chan_resources()
2582 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_tx_status() local
2585 if (d40c->phy_chan == NULL) { in d40_tx_status()
2586 chan_err(d40c, "Cannot read status of unallocated channel\n"); in d40_tx_status()
2594 if (d40_is_paused(d40c)) in d40_tx_status()
2602 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_issue_pending() local
2605 if (d40c->phy_chan == NULL) { in d40_issue_pending()
2606 chan_err(d40c, "Channel is not allocated!\n"); in d40_issue_pending()
2610 spin_lock_irqsave(&d40c->lock, flags); in d40_issue_pending()
2612 list_splice_tail_init(&d40c->pending_queue, &d40c->queue); in d40_issue_pending()
2615 if (!d40c->busy) in d40_issue_pending()
2616 (void) d40_queue_start(d40c); in d40_issue_pending()
2618 spin_unlock_irqrestore(&d40c->lock, flags); in d40_issue_pending()
2624 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_terminate_all() local
2627 if (d40c->phy_chan == NULL) { in d40_terminate_all()
2628 chan_err(d40c, "Channel is not allocated!\n"); in d40_terminate_all()
2632 spin_lock_irqsave(&d40c->lock, flags); in d40_terminate_all()
2634 pm_runtime_get_sync(d40c->base->dev); in d40_terminate_all()
2635 ret = d40_channel_execute_command(d40c, D40_DMA_STOP); in d40_terminate_all()
2637 chan_err(d40c, "Failed to stop channel\n"); in d40_terminate_all()
2639 d40_term_all(d40c); in d40_terminate_all()
2640 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2641 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2642 if (d40c->busy) { in d40_terminate_all()
2643 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2644 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2646 d40c->busy = false; in d40_terminate_all()
2648 spin_unlock_irqrestore(&d40c->lock, flags); in d40_terminate_all()
2653 dma40_config_to_halfchannel(struct d40_chan *d40c, in dma40_config_to_halfchannel() argument
2659 if (chan_is_logical(d40c)) { in dma40_config_to_halfchannel()
2689 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_set_runtime_config() local
2690 struct stedma40_chan_cfg *cfg = &d40c->dma_cfg; in d40_set_runtime_config()
2696 if (d40c->phy_chan == NULL) { in d40_set_runtime_config()
2697 chan_err(d40c, "Channel is not allocated!\n"); in d40_set_runtime_config()
2710 dev_dbg(d40c->base->dev, in d40_set_runtime_config()
2726 dev_dbg(d40c->base->dev, in d40_set_runtime_config()
2738 dev_err(d40c->base->dev, in d40_set_runtime_config()
2745 dev_err(d40c->base->dev, "no address supplied\n"); in d40_set_runtime_config()
2750 dev_err(d40c->base->dev, in d40_set_runtime_config()
2779 ret = dma40_config_to_halfchannel(d40c, &cfg->src_info, in d40_set_runtime_config()
2784 ret = dma40_config_to_halfchannel(d40c, &cfg->dst_info, in d40_set_runtime_config()
2790 if (chan_is_logical(d40c)) in d40_set_runtime_config()
2791 d40_log_cfg(cfg, &d40c->log_def.lcsp1, &d40c->log_def.lcsp3); in d40_set_runtime_config()
2793 d40_phy_cfg(cfg, &d40c->src_def_cfg, &d40c->dst_def_cfg); in d40_set_runtime_config()
2796 d40c->runtime_addr = config_addr; in d40_set_runtime_config()
2797 d40c->runtime_direction = config->direction; in d40_set_runtime_config()
2798 dev_dbg(d40c->base->dev, in d40_set_runtime_config()
2816 struct d40_chan *d40c; in d40_chan_init() local
2821 d40c = &chans[i]; in d40_chan_init()
2822 d40c->base = base; in d40_chan_init()
2823 d40c->chan.device = dma; in d40_chan_init()
2825 spin_lock_init(&d40c->lock); in d40_chan_init()
2827 d40c->log_num = D40_PHY_CHAN; in d40_chan_init()
2829 INIT_LIST_HEAD(&d40c->done); in d40_chan_init()
2830 INIT_LIST_HEAD(&d40c->active); in d40_chan_init()
2831 INIT_LIST_HEAD(&d40c->queue); in d40_chan_init()
2832 INIT_LIST_HEAD(&d40c->pending_queue); in d40_chan_init()
2833 INIT_LIST_HEAD(&d40c->client); in d40_chan_init()
2834 INIT_LIST_HEAD(&d40c->prepare_queue); in d40_chan_init()
2836 tasklet_init(&d40c->tasklet, dma_tasklet, in d40_chan_init()
2837 (unsigned long) d40c); in d40_chan_init()
2839 list_add_tail(&d40c->chan.device_node, in d40_chan_init()