vbif 420 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_catalog.c .vbif = sdm845_vbif, vbif 674 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_catalog.h struct dpu_vbif_cfg *vbif; vbif 35 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c static void dpu_hw_clear_errors(struct dpu_hw_vbif *vbif, vbif 41 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c if (!vbif) vbif 43 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c c = &vbif->hw; vbif 55 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c static void dpu_hw_set_mem_type(struct dpu_hw_vbif *vbif, vbif 67 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c if (!vbif || xin_id >= MAX_XIN_COUNT || xin_id >= 16) vbif 70 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c c = &vbif->hw; vbif 85 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c static void dpu_hw_set_limit_conf(struct dpu_hw_vbif *vbif, vbif 88 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c struct dpu_hw_blk_reg_map *c = &vbif->hw; vbif 106 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c static u32 dpu_hw_get_limit_conf(struct dpu_hw_vbif *vbif, vbif 109 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c struct dpu_hw_blk_reg_map *c = &vbif->hw; vbif 128 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c static void dpu_hw_set_halt_ctrl(struct dpu_hw_vbif *vbif, vbif 131 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c struct dpu_hw_blk_reg_map *c = &vbif->hw; vbif 144 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c static bool dpu_hw_get_halt_ctrl(struct dpu_hw_vbif *vbif, vbif 147 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c struct dpu_hw_blk_reg_map *c = &vbif->hw; vbif 155 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c static void dpu_hw_set_qos_remap(struct dpu_hw_vbif *vbif, vbif 161 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c if (!vbif) vbif 164 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c c = &vbif->hw; vbif 184 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c static void dpu_hw_set_write_gather_en(struct dpu_hw_vbif *vbif, u32 xin_id) vbif 189 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c if (!vbif || xin_id >= MAX_XIN_COUNT) vbif 192 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c c = &vbif->hw; vbif 213 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c static const struct dpu_vbif_cfg *_top_offset(enum dpu_vbif vbif, vbif 221 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c if (vbif == m->vbif[i].id) { vbif 223 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c b->blk_off = m->vbif[i].base; vbif 224 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c b->length = m->vbif[i].len; vbif 227 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c return &m->vbif[i]; vbif 263 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c void dpu_hw_vbif_destroy(struct dpu_hw_vbif *vbif) vbif 265 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.c kfree(vbif); vbif 26 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.h void (*set_limit_conf)(struct dpu_hw_vbif *vbif, vbif 36 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.h u32 (*get_limit_conf)(struct dpu_hw_vbif *vbif, vbif 45 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.h void (*set_halt_ctrl)(struct dpu_hw_vbif *vbif, vbif 54 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.h bool (*get_halt_ctrl)(struct dpu_hw_vbif *vbif, vbif 64 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.h void (*set_qos_remap)(struct dpu_hw_vbif *vbif, vbif 73 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.h void (*set_mem_type)(struct dpu_hw_vbif *vbif, vbif 85 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.h void (*clear_errors)(struct dpu_hw_vbif *vbif, vbif 93 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.h void (*set_write_gather_en)(struct dpu_hw_vbif *vbif, u32 xin_id); vbif 118 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_vbif.h void dpu_hw_vbif_destroy(struct dpu_hw_vbif *vbif); vbif 600 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c u32 vbif_idx = dpu_kms->catalog->vbif[i].id; vbif 615 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c if (dpu_kms->vbif[VBIF_NRT]) vbif 616 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c devm_iounmap(&dpu_kms->pdev->dev, dpu_kms->vbif[VBIF_NRT]); vbif 617 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c dpu_kms->vbif[VBIF_NRT] = NULL; vbif 619 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c if (dpu_kms->vbif[VBIF_RT]) vbif 620 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c devm_iounmap(&dpu_kms->pdev->dev, dpu_kms->vbif[VBIF_RT]); vbif 621 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c dpu_kms->vbif[VBIF_RT] = NULL; vbif 829 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c dpu_kms->vbif[VBIF_RT] = msm_ioremap(dpu_kms->pdev, "vbif", "vbif"); vbif 830 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c if (IS_ERR(dpu_kms->vbif[VBIF_RT])) { vbif 831 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c rc = PTR_ERR(dpu_kms->vbif[VBIF_RT]); vbif 833 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c dpu_kms->vbif[VBIF_RT] = NULL; vbif 837 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c dpu_kms->vbif[VBIF_NRT] = msm_ioremap(dpu_kms->pdev, "vbif_nrt", "vbif_nrt"); vbif 838 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c if (IS_ERR(dpu_kms->vbif[VBIF_NRT])) { vbif 839 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c dpu_kms->vbif[VBIF_NRT] = NULL; vbif 898 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c u32 vbif_idx = dpu_kms->catalog->vbif[i].id; vbif 901 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.c dpu_kms->vbif[vbif_idx], dpu_kms->catalog); vbif 102 drivers/gpu/drm/msm/disp/dpu1/dpu_kms.h void __iomem *mmio, *vbif[VBIF_MAX], *reg_dma; vbif 20 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c static int _dpu_vbif_wait_for_xin_halt(struct dpu_hw_vbif *vbif, u32 xin_id) vbif 26 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (!vbif || !vbif->cap || !vbif->ops.get_halt_ctrl) { vbif 27 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c DPU_ERROR("invalid arguments vbif %d\n", vbif != 0); vbif 31 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c timeout = ktime_add_us(ktime_get(), vbif->cap->xin_halt_timeout); vbif 33 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c status = vbif->ops.get_halt_ctrl(vbif, xin_id); vbif 37 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c status = vbif->ops.get_halt_ctrl(vbif, xin_id); vbif 46 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->idx - VBIF_0, xin_id); vbif 50 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->idx - VBIF_0, xin_id); vbif 62 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c static void _dpu_vbif_apply_dynamic_ot_limit(struct dpu_hw_vbif *vbif, vbif 69 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (!vbif || !(vbif->cap->features & BIT(DPU_VBIF_QOS_OTLIM))) vbif 80 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c tbl = params->rd ? &vbif->cap->dynamic_ot_rd_tbl : vbif 81 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c &vbif->cap->dynamic_ot_wr_tbl; vbif 91 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->idx - VBIF_0, params->xin_id, vbif 102 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c static u32 _dpu_vbif_get_ot_limit(struct dpu_hw_vbif *vbif, vbif 108 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (!vbif || !vbif->cap) { vbif 109 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c DPU_ERROR("invalid arguments vbif %d\n", vbif != 0); vbif 113 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (vbif->cap->default_ot_wr_limit && !params->rd) vbif 114 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c ot_lim = vbif->cap->default_ot_wr_limit; vbif 115 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c else if (vbif->cap->default_ot_rd_limit && params->rd) vbif 116 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c ot_lim = vbif->cap->default_ot_rd_limit; vbif 126 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c _dpu_vbif_apply_dynamic_ot_limit(vbif, &ot_lim, params); vbif 128 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (vbif && vbif->ops.get_limit_conf) { vbif 129 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c val = vbif->ops.get_limit_conf(vbif, vbif 137 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->idx - VBIF_0, params->xin_id, ot_lim); vbif 151 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c struct dpu_hw_vbif *vbif = NULL; vbif 166 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif = dpu_kms->hw_vbif[i]; vbif 169 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (!vbif || !mdp) { vbif 171 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif != 0, mdp != 0); vbif 176 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c !vbif->ops.set_limit_conf || vbif 177 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c !vbif->ops.set_halt_ctrl) vbif 181 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (vbif->ops.set_write_gather_en && !params->rd) vbif 182 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->ops.set_write_gather_en(vbif, params->xin_id); vbif 184 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c ot_lim = _dpu_vbif_get_ot_limit(vbif, params) & 0xFF; vbif 194 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->ops.set_limit_conf(vbif, params->xin_id, params->rd, ot_lim); vbif 196 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->ops.set_halt_ctrl(vbif, params->xin_id, true); vbif 198 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c ret = _dpu_vbif_wait_for_xin_halt(vbif, params->xin_id); vbif 200 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c trace_dpu_vbif_wait_xin_halt_fail(vbif->idx, params->xin_id); vbif 202 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->ops.set_halt_ctrl(vbif, params->xin_id, false); vbif 211 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c struct dpu_hw_vbif *vbif = NULL; vbif 226 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif = dpu_kms->hw_vbif[i]; vbif 231 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (!vbif || !vbif->cap) { vbif 236 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (!vbif->ops.set_qos_remap || !mdp->ops.setup_clk_force_ctrl) { vbif 241 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c qos_tbl = params->is_rt ? &vbif->cap->qos_rt_tbl : vbif 242 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c &vbif->cap->qos_nrt_tbl; vbif 255 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->ops.set_qos_remap(vbif, params->xin_id, i, vbif 265 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c struct dpu_hw_vbif *vbif; vbif 269 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif = dpu_kms->hw_vbif[i]; vbif 270 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (vbif && vbif->ops.clear_errors) { vbif 271 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->ops.clear_errors(vbif, &pnd, &src); vbif 274 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->idx - VBIF_0, pnd, src); vbif 282 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c struct dpu_hw_vbif *vbif; vbif 286 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif = dpu_kms->hw_vbif[i]; vbif 287 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c if (vbif && vbif->cap && vbif->ops.set_mem_type) { vbif 288 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c for (j = 0; j < vbif->cap->memtype_count; j++) vbif 289 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif->ops.set_mem_type( vbif 290 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c vbif, j, vbif->cap->memtype[j]); vbif 306 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c struct dpu_vbif_cfg *vbif = &dpu_kms->catalog->vbif[i]; vbif 308 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c snprintf(vbif_name, sizeof(vbif_name), "%d", vbif->id); vbif 313 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c (u32 *)&vbif->features); vbif 316 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c (u32 *)&vbif->xin_halt_timeout); vbif 319 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c (u32 *)&vbif->default_ot_rd_limit); vbif 322 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c (u32 *)&vbif->default_ot_wr_limit); vbif 324 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c for (j = 0; j < vbif->dynamic_ot_rd_tbl.count; j++) { vbif 326 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c &vbif->dynamic_ot_rd_tbl.cfg[j]; vbif 338 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c for (j = 0; j < vbif->dynamic_ot_wr_tbl.count; j++) { vbif 340 drivers/gpu/drm/msm/disp/dpu1/dpu_vbif.c &vbif->dynamic_ot_wr_tbl.cfg[j]; vbif 17 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c void __iomem *mmio, *vbif; vbif 230 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c mdp5_mdss->vbif = msm_ioremap(pdev, "vbif_phys", "VBIF"); vbif 231 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c if (IS_ERR(mdp5_mdss->vbif)) { vbif 232 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c ret = PTR_ERR(mdp5_mdss->vbif);