Lines Matching refs:base
336 void *base; member
392 void *base; member
472 struct d40_base *base; member
616 return chan->base->virtbase + D40_DREG_PCBASE + in chan_base()
631 void *base; in d40_pool_lli_alloc() local
639 base = d40d->lli_pool.pre_alloc_lli; in d40_pool_lli_alloc()
641 d40d->lli_pool.base = NULL; in d40_pool_lli_alloc()
645 base = kmalloc(d40d->lli_pool.size + align, GFP_NOWAIT); in d40_pool_lli_alloc()
646 d40d->lli_pool.base = base; in d40_pool_lli_alloc()
648 if (d40d->lli_pool.base == NULL) in d40_pool_lli_alloc()
653 d40d->lli_log.src = PTR_ALIGN(base, align); in d40_pool_lli_alloc()
658 d40d->lli_phy.src = PTR_ALIGN(base, align); in d40_pool_lli_alloc()
661 d40d->lli_pool.dma_addr = dma_map_single(d40c->base->dev, in d40_pool_lli_alloc()
666 if (dma_mapping_error(d40c->base->dev, in d40_pool_lli_alloc()
668 kfree(d40d->lli_pool.base); in d40_pool_lli_alloc()
669 d40d->lli_pool.base = NULL; in d40_pool_lli_alloc()
681 dma_unmap_single(d40c->base->dev, d40d->lli_pool.dma_addr, in d40_pool_lli_free()
684 kfree(d40d->lli_pool.base); in d40_pool_lli_free()
685 d40d->lli_pool.base = NULL; in d40_pool_lli_free()
700 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
709 if (!d40c->base->lcla_pool.alloc_map[idx]) { in d40_lcla_alloc_one()
710 d40c->base->lcla_pool.alloc_map[idx] = d40d; in d40_lcla_alloc_one()
717 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
732 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
737 if (d40c->base->lcla_pool.alloc_map[idx] == d40d) { in d40_lcla_free_all()
738 d40c->base->lcla_pool.alloc_map[idx] = NULL; in d40_lcla_free_all()
747 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
777 desc = kmem_cache_zalloc(d40c->base->desc_slab, GFP_NOWAIT); in d40_desc_get()
790 kmem_cache_free(d40c->base->desc_slab, d40d); in d40_desc_free()
802 void __iomem *base = chan_base(chan); in d40_phy_lli_load() local
804 writel(lli_src->reg_cfg, base + D40_CHAN_REG_SSCFG); in d40_phy_lli_load()
805 writel(lli_src->reg_elt, base + D40_CHAN_REG_SSELT); in d40_phy_lli_load()
806 writel(lli_src->reg_ptr, base + D40_CHAN_REG_SSPTR); in d40_phy_lli_load()
807 writel(lli_src->reg_lnk, base + D40_CHAN_REG_SSLNK); in d40_phy_lli_load()
809 writel(lli_dst->reg_cfg, base + D40_CHAN_REG_SDCFG); in d40_phy_lli_load()
810 writel(lli_dst->reg_elt, base + D40_CHAN_REG_SDELT); in d40_phy_lli_load()
811 writel(lli_dst->reg_ptr, base + D40_CHAN_REG_SDPTR); in d40_phy_lli_load()
812 writel(lli_dst->reg_lnk, base + D40_CHAN_REG_SDLNK); in d40_phy_lli_load()
822 struct d40_lcla_pool *pool = &chan->base->lcla_pool; in d40_log_lli_to_lcxa()
829 bool use_esram_lcla = chan->base->plat_data->use_esram_lcla; in d40_log_lli_to_lcxa()
882 struct d40_log_lli *lcla = pool->base + lcla_offset; in d40_log_lli_to_lcxa()
916 dma_sync_single_range_for_device(chan->base->dev, in d40_log_lli_to_lcxa()
1073 spin_lock_irqsave(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1076 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_phy()
1078 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_phy()
1123 spin_unlock_irqrestore(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1293 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_log()
1295 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_log()
1377 writel(var, d40c->base->virtbase + D40_DREG_PRMSE + addr_base); in d40_config_write()
1382 writel(var, d40c->base->virtbase + D40_DREG_PRMOE + addr_base); in d40_config_write()
1447 pm_runtime_get_sync(d40c->base->dev); in d40_pause()
1451 pm_runtime_mark_last_busy(d40c->base->dev); in d40_pause()
1452 pm_runtime_put_autosuspend(d40c->base->dev); in d40_pause()
1472 pm_runtime_get_sync(d40c->base->dev); in d40_resume()
1478 pm_runtime_mark_last_busy(d40c->base->dev); in d40_resume()
1479 pm_runtime_put_autosuspend(d40c->base->dev); in d40_resume()
1517 pm_runtime_get_sync(d40c->base->dev); in d40_queue_start()
1580 pm_runtime_mark_last_busy(d40c->base->dev); in dma_tc_handle()
1581 pm_runtime_put_autosuspend(d40c->base->dev); in dma_tc_handle()
1669 struct d40_base *base = data; in d40_handle_interrupt() local
1670 u32 regs[base->gen_dmac.il_size]; in d40_handle_interrupt()
1671 struct d40_interrupt_lookup *il = base->gen_dmac.il; in d40_handle_interrupt()
1672 u32 il_size = base->gen_dmac.il_size; in d40_handle_interrupt()
1674 spin_lock_irqsave(&base->interrupt_lock, flags); in d40_handle_interrupt()
1678 regs[i] = readl(base->virtbase + il[i].src); in d40_handle_interrupt()
1693 d40c = base->lookup_phy_chans[idx]; in d40_handle_interrupt()
1695 d40c = base->lookup_log_chans[il[row].offset + idx]; in d40_handle_interrupt()
1706 writel(BIT(idx), base->virtbase + il[row].clr); in d40_handle_interrupt()
1713 d40_err(base->dev, "IRQ chan: %ld offset %d idx %d\n", in d40_handle_interrupt()
1719 spin_unlock_irqrestore(&base->interrupt_lock, flags); in d40_handle_interrupt()
1735 if ((is_log && conf->dev_type > d40c->base->num_log_chans) || in d40_validate_conf()
1736 (!is_log && conf->dev_type > d40c->base->num_phy_chans) || in d40_validate_conf()
1870 phys = d40c->base->phy_res; in d40_allocate_channel()
1871 num_phy_chans = d40c->base->num_phy_chans; in d40_allocate_channel()
1905 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1926 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1976 d40c->base->lookup_log_chans[d40c->log_num] = d40c; in d40_allocate_channel()
1978 d40c->base->lookup_phy_chans[d40c->phy_chan->num] = d40c; in d40_allocate_channel()
2046 pm_runtime_get_sync(d40c->base->dev); in d40_free_dma()
2056 d40c->base->lookup_log_chans[d40c->log_num] = NULL; in d40_free_dma()
2058 d40c->base->lookup_phy_chans[phy->num] = NULL; in d40_free_dma()
2061 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2062 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2070 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2071 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2088 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in d40_is_paused()
2090 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in d40_is_paused()
2191 dma_sync_single_for_device(chan->base->dev, desc->lli_pool.dma_addr, in d40_prep_sg_phy()
2326 struct d40_gen_dmac *dmac = &d40c->base->gen_dmac; in __d40_set_prio_rt()
2346 writel(bit, d40c->base->virtbase + prioreg + group * 4); in __d40_set_prio_rt()
2347 writel(bit, d40c->base->virtbase + rtreg + group * 4); in __d40_set_prio_rt()
2352 if (d40c->base->rev < 3) in d40_set_prio_realtime()
2440 pm_runtime_get_sync(d40c->base->dev); in d40_alloc_chan_resources()
2446 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2449 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2472 pm_runtime_mark_last_busy(d40c->base->dev); in d40_alloc_chan_resources()
2473 pm_runtime_put_autosuspend(d40c->base->dev); in d40_alloc_chan_resources()
2634 pm_runtime_get_sync(d40c->base->dev); in d40_terminate_all()
2640 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2641 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2643 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2644 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2710 dev_dbg(d40c->base->dev, in d40_set_runtime_config()
2726 dev_dbg(d40c->base->dev, in d40_set_runtime_config()
2738 dev_err(d40c->base->dev, in d40_set_runtime_config()
2745 dev_err(d40c->base->dev, "no address supplied\n"); in d40_set_runtime_config()
2750 dev_err(d40c->base->dev, in d40_set_runtime_config()
2798 dev_dbg(d40c->base->dev, in d40_set_runtime_config()
2811 static void __init d40_chan_init(struct d40_base *base, struct dma_device *dma, in d40_chan_init() argument
2822 d40c->base = base; in d40_chan_init()
2844 static void d40_ops_init(struct d40_base *base, struct dma_device *dev) in d40_ops_init() argument
2873 dev->dev = base->dev; in d40_ops_init()
2876 static int __init d40_dmaengine_init(struct d40_base *base, in d40_dmaengine_init() argument
2881 d40_chan_init(base, &base->dma_slave, base->log_chans, in d40_dmaengine_init()
2882 0, base->num_log_chans); in d40_dmaengine_init()
2884 dma_cap_zero(base->dma_slave.cap_mask); in d40_dmaengine_init()
2885 dma_cap_set(DMA_SLAVE, base->dma_slave.cap_mask); in d40_dmaengine_init()
2886 dma_cap_set(DMA_CYCLIC, base->dma_slave.cap_mask); in d40_dmaengine_init()
2888 d40_ops_init(base, &base->dma_slave); in d40_dmaengine_init()
2890 err = dma_async_device_register(&base->dma_slave); in d40_dmaengine_init()
2893 d40_err(base->dev, "Failed to register slave channels\n"); in d40_dmaengine_init()
2897 d40_chan_init(base, &base->dma_memcpy, base->log_chans, in d40_dmaengine_init()
2898 base->num_log_chans, base->num_memcpy_chans); in d40_dmaengine_init()
2900 dma_cap_zero(base->dma_memcpy.cap_mask); in d40_dmaengine_init()
2901 dma_cap_set(DMA_MEMCPY, base->dma_memcpy.cap_mask); in d40_dmaengine_init()
2902 dma_cap_set(DMA_SG, base->dma_memcpy.cap_mask); in d40_dmaengine_init()
2904 d40_ops_init(base, &base->dma_memcpy); in d40_dmaengine_init()
2906 err = dma_async_device_register(&base->dma_memcpy); in d40_dmaengine_init()
2909 d40_err(base->dev, in d40_dmaengine_init()
2914 d40_chan_init(base, &base->dma_both, base->phy_chans, in d40_dmaengine_init()
2917 dma_cap_zero(base->dma_both.cap_mask); in d40_dmaengine_init()
2918 dma_cap_set(DMA_SLAVE, base->dma_both.cap_mask); in d40_dmaengine_init()
2919 dma_cap_set(DMA_MEMCPY, base->dma_both.cap_mask); in d40_dmaengine_init()
2920 dma_cap_set(DMA_SG, base->dma_both.cap_mask); in d40_dmaengine_init()
2921 dma_cap_set(DMA_CYCLIC, base->dma_slave.cap_mask); in d40_dmaengine_init()
2923 d40_ops_init(base, &base->dma_both); in d40_dmaengine_init()
2924 err = dma_async_device_register(&base->dma_both); in d40_dmaengine_init()
2927 d40_err(base->dev, in d40_dmaengine_init()
2933 dma_async_device_unregister(&base->dma_memcpy); in d40_dmaengine_init()
2935 dma_async_device_unregister(&base->dma_slave); in d40_dmaengine_init()
2945 struct d40_base *base = platform_get_drvdata(pdev); in dma40_suspend() local
2952 if (base->lcpa_regulator) in dma40_suspend()
2953 ret = regulator_disable(base->lcpa_regulator); in dma40_suspend()
2960 struct d40_base *base = platform_get_drvdata(pdev); in dma40_resume() local
2963 if (base->lcpa_regulator) { in dma40_resume()
2964 ret = regulator_enable(base->lcpa_regulator); in dma40_resume()
2989 static void d40_save_restore_registers(struct d40_base *base, bool save) in d40_save_restore_registers() argument
2994 for (i = 0; i < base->num_phy_chans; i++) { in d40_save_restore_registers()
2998 if (base->phy_res[i].reserved) in d40_save_restore_registers()
3001 addr = base->virtbase + D40_DREG_PCBASE + i * D40_DREG_PCDELTA; in d40_save_restore_registers()
3004 dma40_backup(addr, &base->reg_val_backup_chan[idx], in d40_save_restore_registers()
3011 dma40_backup(base->virtbase, base->reg_val_backup, in d40_save_restore_registers()
3016 if (base->gen_dmac.backup) in d40_save_restore_registers()
3017 dma40_backup(base->virtbase, base->reg_val_backup_v4, in d40_save_restore_registers()
3018 base->gen_dmac.backup, in d40_save_restore_registers()
3019 base->gen_dmac.backup_size, in d40_save_restore_registers()
3026 struct d40_base *base = platform_get_drvdata(pdev); in dma40_runtime_suspend() local
3028 d40_save_restore_registers(base, true); in dma40_runtime_suspend()
3031 if (base->rev != 1) in dma40_runtime_suspend()
3032 writel_relaxed(base->gcc_pwr_off_mask, in dma40_runtime_suspend()
3033 base->virtbase + D40_DREG_GCC); in dma40_runtime_suspend()
3041 struct d40_base *base = platform_get_drvdata(pdev); in dma40_runtime_resume() local
3043 d40_save_restore_registers(base, false); in dma40_runtime_resume()
3046 base->virtbase + D40_DREG_GCC); in dma40_runtime_resume()
3060 static int __init d40_phy_res_init(struct d40_base *base) in d40_phy_res_init() argument
3068 val[0] = readl(base->virtbase + D40_DREG_PRSME); in d40_phy_res_init()
3069 val[1] = readl(base->virtbase + D40_DREG_PRSMO); in d40_phy_res_init()
3071 for (i = 0; i < base->num_phy_chans; i++) { in d40_phy_res_init()
3072 base->phy_res[i].num = i; in d40_phy_res_init()
3076 base->phy_res[i].allocated_src = D40_ALLOC_PHY; in d40_phy_res_init()
3077 base->phy_res[i].allocated_dst = D40_ALLOC_PHY; in d40_phy_res_init()
3078 base->phy_res[i].reserved = true; in d40_phy_res_init()
3086 base->phy_res[i].allocated_src = D40_ALLOC_FREE; in d40_phy_res_init()
3087 base->phy_res[i].allocated_dst = D40_ALLOC_FREE; in d40_phy_res_init()
3088 base->phy_res[i].reserved = false; in d40_phy_res_init()
3091 spin_lock_init(&base->phy_res[i].lock); in d40_phy_res_init()
3095 for (i = 0; base->plat_data->disabled_channels[i] != -1; i++) { in d40_phy_res_init()
3096 int chan = base->plat_data->disabled_channels[i]; in d40_phy_res_init()
3098 base->phy_res[chan].allocated_src = D40_ALLOC_PHY; in d40_phy_res_init()
3099 base->phy_res[chan].allocated_dst = D40_ALLOC_PHY; in d40_phy_res_init()
3100 base->phy_res[chan].reserved = true; in d40_phy_res_init()
3109 for (i = 0; i < base->plat_data->num_of_soft_lli_chans; i++) { in d40_phy_res_init()
3110 int chan = base->plat_data->soft_lli_chans[i]; in d40_phy_res_init()
3112 base->phy_res[chan].use_soft_lli = true; in d40_phy_res_init()
3115 dev_info(base->dev, "%d of %d physical DMA channels available\n", in d40_phy_res_init()
3116 num_phy_chans_avail, base->num_phy_chans); in d40_phy_res_init()
3119 val[0] = readl(base->virtbase + D40_DREG_PRTYP); in d40_phy_res_init()
3121 for (i = 0; i < base->num_phy_chans; i++) { in d40_phy_res_init()
3123 if (base->phy_res[i].allocated_src == D40_ALLOC_FREE && in d40_phy_res_init()
3125 dev_info(base->dev, in d40_phy_res_init()
3138 writel(D40_DREG_GCC_ENABLE_ALL, base->virtbase + D40_DREG_GCC); in d40_phy_res_init()
3139 base->gcc_pwr_off_mask = gcc; in d40_phy_res_init()
3150 struct d40_base *base = NULL; in d40_hw_detect_init() local
3236 base = kzalloc(ALIGN(sizeof(struct d40_base), 4) + in d40_hw_detect_init()
3240 if (base == NULL) { in d40_hw_detect_init()
3245 base->rev = rev; in d40_hw_detect_init()
3246 base->clk = clk; in d40_hw_detect_init()
3247 base->num_memcpy_chans = num_memcpy_chans; in d40_hw_detect_init()
3248 base->num_phy_chans = num_phy_chans; in d40_hw_detect_init()
3249 base->num_log_chans = num_log_chans; in d40_hw_detect_init()
3250 base->phy_start = res->start; in d40_hw_detect_init()
3251 base->phy_size = resource_size(res); in d40_hw_detect_init()
3252 base->virtbase = virtbase; in d40_hw_detect_init()
3253 base->plat_data = plat_data; in d40_hw_detect_init()
3254 base->dev = &pdev->dev; in d40_hw_detect_init()
3255 base->phy_chans = ((void *)base) + ALIGN(sizeof(struct d40_base), 4); in d40_hw_detect_init()
3256 base->log_chans = &base->phy_chans[num_phy_chans]; in d40_hw_detect_init()
3258 if (base->plat_data->num_of_phy_chans == 14) { in d40_hw_detect_init()
3259 base->gen_dmac.backup = d40_backup_regs_v4b; in d40_hw_detect_init()
3260 base->gen_dmac.backup_size = BACKUP_REGS_SZ_V4B; in d40_hw_detect_init()
3261 base->gen_dmac.interrupt_en = D40_DREG_CPCMIS; in d40_hw_detect_init()
3262 base->gen_dmac.interrupt_clear = D40_DREG_CPCICR; in d40_hw_detect_init()
3263 base->gen_dmac.realtime_en = D40_DREG_CRSEG1; in d40_hw_detect_init()
3264 base->gen_dmac.realtime_clear = D40_DREG_CRCEG1; in d40_hw_detect_init()
3265 base->gen_dmac.high_prio_en = D40_DREG_CPSEG1; in d40_hw_detect_init()
3266 base->gen_dmac.high_prio_clear = D40_DREG_CPCEG1; in d40_hw_detect_init()
3267 base->gen_dmac.il = il_v4b; in d40_hw_detect_init()
3268 base->gen_dmac.il_size = ARRAY_SIZE(il_v4b); in d40_hw_detect_init()
3269 base->gen_dmac.init_reg = dma_init_reg_v4b; in d40_hw_detect_init()
3270 base->gen_dmac.init_reg_size = ARRAY_SIZE(dma_init_reg_v4b); in d40_hw_detect_init()
3272 if (base->rev >= 3) { in d40_hw_detect_init()
3273 base->gen_dmac.backup = d40_backup_regs_v4a; in d40_hw_detect_init()
3274 base->gen_dmac.backup_size = BACKUP_REGS_SZ_V4A; in d40_hw_detect_init()
3276 base->gen_dmac.interrupt_en = D40_DREG_PCMIS; in d40_hw_detect_init()
3277 base->gen_dmac.interrupt_clear = D40_DREG_PCICR; in d40_hw_detect_init()
3278 base->gen_dmac.realtime_en = D40_DREG_RSEG1; in d40_hw_detect_init()
3279 base->gen_dmac.realtime_clear = D40_DREG_RCEG1; in d40_hw_detect_init()
3280 base->gen_dmac.high_prio_en = D40_DREG_PSEG1; in d40_hw_detect_init()
3281 base->gen_dmac.high_prio_clear = D40_DREG_PCEG1; in d40_hw_detect_init()
3282 base->gen_dmac.il = il_v4a; in d40_hw_detect_init()
3283 base->gen_dmac.il_size = ARRAY_SIZE(il_v4a); in d40_hw_detect_init()
3284 base->gen_dmac.init_reg = dma_init_reg_v4a; in d40_hw_detect_init()
3285 base->gen_dmac.init_reg_size = ARRAY_SIZE(dma_init_reg_v4a); in d40_hw_detect_init()
3288 base->phy_res = kzalloc(num_phy_chans * sizeof(struct d40_phy_res), in d40_hw_detect_init()
3290 if (!base->phy_res) in d40_hw_detect_init()
3293 base->lookup_phy_chans = kzalloc(num_phy_chans * in d40_hw_detect_init()
3296 if (!base->lookup_phy_chans) in d40_hw_detect_init()
3299 base->lookup_log_chans = kzalloc(num_log_chans * in d40_hw_detect_init()
3302 if (!base->lookup_log_chans) in d40_hw_detect_init()
3305 base->reg_val_backup_chan = kmalloc(base->num_phy_chans * in d40_hw_detect_init()
3308 if (!base->reg_val_backup_chan) in d40_hw_detect_init()
3311 base->lcla_pool.alloc_map = in d40_hw_detect_init()
3314 if (!base->lcla_pool.alloc_map) in d40_hw_detect_init()
3317 base->desc_slab = kmem_cache_create(D40_NAME, sizeof(struct d40_desc), in d40_hw_detect_init()
3320 if (base->desc_slab == NULL) in d40_hw_detect_init()
3323 return base; in d40_hw_detect_init()
3338 if (base) { in d40_hw_detect_init()
3339 kfree(base->lcla_pool.alloc_map); in d40_hw_detect_init()
3340 kfree(base->reg_val_backup_chan); in d40_hw_detect_init()
3341 kfree(base->lookup_log_chans); in d40_hw_detect_init()
3342 kfree(base->lookup_phy_chans); in d40_hw_detect_init()
3343 kfree(base->phy_res); in d40_hw_detect_init()
3344 kfree(base); in d40_hw_detect_init()
3350 static void __init d40_hw_init(struct d40_base *base) in d40_hw_init() argument
3358 struct d40_reg_val *dma_init_reg = base->gen_dmac.init_reg; in d40_hw_init()
3359 u32 reg_size = base->gen_dmac.init_reg_size; in d40_hw_init()
3363 base->virtbase + dma_init_reg[i].reg); in d40_hw_init()
3366 for (i = 0; i < base->num_phy_chans; i++) { in d40_hw_init()
3370 if (base->phy_res[base->num_phy_chans - i - 1].allocated_src in d40_hw_init()
3388 writel(prmseo[1], base->virtbase + D40_DREG_PRMSE); in d40_hw_init()
3389 writel(prmseo[0], base->virtbase + D40_DREG_PRMSO); in d40_hw_init()
3390 writel(activeo[1], base->virtbase + D40_DREG_ACTIVE); in d40_hw_init()
3391 writel(activeo[0], base->virtbase + D40_DREG_ACTIVO); in d40_hw_init()
3394 writel(pcmis, base->virtbase + base->gen_dmac.interrupt_en); in d40_hw_init()
3397 writel(pcicr, base->virtbase + base->gen_dmac.interrupt_clear); in d40_hw_init()
3400 base->gen_dmac.init_reg = NULL; in d40_hw_init()
3401 base->gen_dmac.init_reg_size = 0; in d40_hw_init()
3404 static int __init d40_lcla_allocate(struct d40_base *base) in d40_lcla_allocate() argument
3406 struct d40_lcla_pool *pool = &base->lcla_pool; in d40_lcla_allocate()
3425 base->lcla_pool.pages = SZ_1K * base->num_phy_chans / PAGE_SIZE; in d40_lcla_allocate()
3429 base->lcla_pool.pages); in d40_lcla_allocate()
3432 d40_err(base->dev, "Failed to allocate %d pages.\n", in d40_lcla_allocate()
3433 base->lcla_pool.pages); in d40_lcla_allocate()
3437 free_pages(page_list[j], base->lcla_pool.pages); in d40_lcla_allocate()
3447 free_pages(page_list[j], base->lcla_pool.pages); in d40_lcla_allocate()
3450 base->lcla_pool.base = (void *)page_list[i]; in d40_lcla_allocate()
3456 dev_warn(base->dev, in d40_lcla_allocate()
3458 __func__, base->lcla_pool.pages); in d40_lcla_allocate()
3459 base->lcla_pool.base_unaligned = kmalloc(SZ_1K * in d40_lcla_allocate()
3460 base->num_phy_chans + in d40_lcla_allocate()
3463 if (!base->lcla_pool.base_unaligned) { in d40_lcla_allocate()
3468 base->lcla_pool.base = PTR_ALIGN(base->lcla_pool.base_unaligned, in d40_lcla_allocate()
3472 pool->dma_addr = dma_map_single(base->dev, pool->base, in d40_lcla_allocate()
3473 SZ_1K * base->num_phy_chans, in d40_lcla_allocate()
3475 if (dma_mapping_error(base->dev, pool->dma_addr)) { in d40_lcla_allocate()
3481 writel(virt_to_phys(base->lcla_pool.base), in d40_lcla_allocate()
3482 base->virtbase + D40_DREG_LCLA); in d40_lcla_allocate()
3546 struct d40_base *base = NULL; in d40_probe() local
3563 base = d40_hw_detect_init(pdev); in d40_probe()
3564 if (!base) in d40_probe()
3567 num_reserved_chans = d40_phy_res_init(base); in d40_probe()
3569 platform_set_drvdata(pdev, base); in d40_probe()
3571 spin_lock_init(&base->interrupt_lock); in d40_probe()
3572 spin_lock_init(&base->execmd_lock); in d40_probe()
3581 base->lcpa_size = resource_size(res); in d40_probe()
3582 base->phy_lcpa = res->start; in d40_probe()
3592 val = readl(base->virtbase + D40_DREG_LCPA); in d40_probe()
3598 writel(res->start, base->virtbase + D40_DREG_LCPA); in d40_probe()
3600 base->lcpa_base = ioremap(res->start, resource_size(res)); in d40_probe()
3601 if (!base->lcpa_base) { in d40_probe()
3607 if (base->plat_data->use_esram_lcla) { in d40_probe()
3616 base->lcla_pool.base = ioremap(res->start, in d40_probe()
3618 if (!base->lcla_pool.base) { in d40_probe()
3623 writel(res->start, base->virtbase + D40_DREG_LCLA); in d40_probe()
3626 ret = d40_lcla_allocate(base); in d40_probe()
3633 spin_lock_init(&base->lcla_pool.lock); in d40_probe()
3635 base->irq = platform_get_irq(pdev, 0); in d40_probe()
3637 ret = request_irq(base->irq, d40_handle_interrupt, 0, D40_NAME, base); in d40_probe()
3643 if (base->plat_data->use_esram_lcla) { in d40_probe()
3645 base->lcpa_regulator = regulator_get(base->dev, "lcla_esram"); in d40_probe()
3646 if (IS_ERR(base->lcpa_regulator)) { in d40_probe()
3648 ret = PTR_ERR(base->lcpa_regulator); in d40_probe()
3649 base->lcpa_regulator = NULL; in d40_probe()
3653 ret = regulator_enable(base->lcpa_regulator); in d40_probe()
3657 regulator_put(base->lcpa_regulator); in d40_probe()
3658 base->lcpa_regulator = NULL; in d40_probe()
3663 writel_relaxed(D40_DREG_GCC_ENABLE_ALL, base->virtbase + D40_DREG_GCC); in d40_probe()
3665 pm_runtime_irq_safe(base->dev); in d40_probe()
3666 pm_runtime_set_autosuspend_delay(base->dev, DMA40_AUTOSUSPEND_DELAY); in d40_probe()
3667 pm_runtime_use_autosuspend(base->dev); in d40_probe()
3668 pm_runtime_mark_last_busy(base->dev); in d40_probe()
3669 pm_runtime_set_active(base->dev); in d40_probe()
3670 pm_runtime_enable(base->dev); in d40_probe()
3672 ret = d40_dmaengine_init(base, num_reserved_chans); in d40_probe()
3676 base->dev->dma_parms = &base->dma_parms; in d40_probe()
3677 ret = dma_set_max_seg_size(base->dev, STEDMA40_MAX_SEG_SIZE); in d40_probe()
3683 d40_hw_init(base); in d40_probe()
3692 dev_info(base->dev, "initialized\n"); in d40_probe()
3696 if (base) { in d40_probe()
3697 if (base->desc_slab) in d40_probe()
3698 kmem_cache_destroy(base->desc_slab); in d40_probe()
3699 if (base->virtbase) in d40_probe()
3700 iounmap(base->virtbase); in d40_probe()
3702 if (base->lcla_pool.base && base->plat_data->use_esram_lcla) { in d40_probe()
3703 iounmap(base->lcla_pool.base); in d40_probe()
3704 base->lcla_pool.base = NULL; in d40_probe()
3707 if (base->lcla_pool.dma_addr) in d40_probe()
3708 dma_unmap_single(base->dev, base->lcla_pool.dma_addr, in d40_probe()
3709 SZ_1K * base->num_phy_chans, in d40_probe()
3712 if (!base->lcla_pool.base_unaligned && base->lcla_pool.base) in d40_probe()
3713 free_pages((unsigned long)base->lcla_pool.base, in d40_probe()
3714 base->lcla_pool.pages); in d40_probe()
3716 kfree(base->lcla_pool.base_unaligned); in d40_probe()
3718 if (base->phy_lcpa) in d40_probe()
3719 release_mem_region(base->phy_lcpa, in d40_probe()
3720 base->lcpa_size); in d40_probe()
3721 if (base->phy_start) in d40_probe()
3722 release_mem_region(base->phy_start, in d40_probe()
3723 base->phy_size); in d40_probe()
3724 if (base->clk) { in d40_probe()
3725 clk_disable_unprepare(base->clk); in d40_probe()
3726 clk_put(base->clk); in d40_probe()
3729 if (base->lcpa_regulator) { in d40_probe()
3730 regulator_disable(base->lcpa_regulator); in d40_probe()
3731 regulator_put(base->lcpa_regulator); in d40_probe()
3734 kfree(base->lcla_pool.alloc_map); in d40_probe()
3735 kfree(base->lookup_log_chans); in d40_probe()
3736 kfree(base->lookup_phy_chans); in d40_probe()
3737 kfree(base->phy_res); in d40_probe()
3738 kfree(base); in d40_probe()