msm_host 180 drivers/gpu/drm/msm/dsi/dsi.h int dsi_link_clk_enable_6g(struct msm_dsi_host *msm_host); msm_host 181 drivers/gpu/drm/msm/dsi/dsi.h int dsi_link_clk_enable_v2(struct msm_dsi_host *msm_host); msm_host 182 drivers/gpu/drm/msm/dsi/dsi.h void dsi_link_clk_disable_6g(struct msm_dsi_host *msm_host); msm_host 183 drivers/gpu/drm/msm/dsi/dsi.h void dsi_link_clk_disable_v2(struct msm_dsi_host *msm_host); msm_host 184 drivers/gpu/drm/msm/dsi/dsi.h int dsi_tx_buf_alloc_6g(struct msm_dsi_host *msm_host, int size); msm_host 185 drivers/gpu/drm/msm/dsi/dsi.h int dsi_tx_buf_alloc_v2(struct msm_dsi_host *msm_host, int size); msm_host 186 drivers/gpu/drm/msm/dsi/dsi.h void *dsi_tx_buf_get_6g(struct msm_dsi_host *msm_host); msm_host 187 drivers/gpu/drm/msm/dsi/dsi.h void *dsi_tx_buf_get_v2(struct msm_dsi_host *msm_host); msm_host 188 drivers/gpu/drm/msm/dsi/dsi.h void dsi_tx_buf_put_6g(struct msm_dsi_host *msm_host); msm_host 189 drivers/gpu/drm/msm/dsi/dsi.h int dsi_dma_base_get_6g(struct msm_dsi_host *msm_host, uint64_t *iova); msm_host 190 drivers/gpu/drm/msm/dsi/dsi.h int dsi_dma_base_get_v2(struct msm_dsi_host *msm_host, uint64_t *iova); msm_host 191 drivers/gpu/drm/msm/dsi/dsi.h int dsi_clk_init_v2(struct msm_dsi_host *msm_host); msm_host 192 drivers/gpu/drm/msm/dsi/dsi.h int dsi_clk_init_6g_v2(struct msm_dsi_host *msm_host); msm_host 193 drivers/gpu/drm/msm/dsi/dsi.h int dsi_calc_clk_rate_v2(struct msm_dsi_host *msm_host, bool is_dual_dsi); msm_host 194 drivers/gpu/drm/msm/dsi/dsi.h int dsi_calc_clk_rate_6g(struct msm_dsi_host *msm_host, bool is_dual_dsi); msm_host 37 drivers/gpu/drm/msm/dsi/dsi_cfg.h int (*link_clk_enable)(struct msm_dsi_host *msm_host); msm_host 38 drivers/gpu/drm/msm/dsi/dsi_cfg.h void (*link_clk_disable)(struct msm_dsi_host *msm_host); msm_host 39 drivers/gpu/drm/msm/dsi/dsi_cfg.h int (*clk_init_ver)(struct msm_dsi_host *msm_host); msm_host 40 drivers/gpu/drm/msm/dsi/dsi_cfg.h int (*tx_buf_alloc)(struct msm_dsi_host *msm_host, int size); msm_host 41 drivers/gpu/drm/msm/dsi/dsi_cfg.h void* (*tx_buf_get)(struct msm_dsi_host *msm_host); msm_host 42 drivers/gpu/drm/msm/dsi/dsi_cfg.h void (*tx_buf_put)(struct msm_dsi_host *msm_host); msm_host 43 drivers/gpu/drm/msm/dsi/dsi_cfg.h int (*dma_base_get)(struct msm_dsi_host *msm_host, uint64_t *iova); msm_host 44 drivers/gpu/drm/msm/dsi/dsi_cfg.h int (*calc_clk_rate)(struct msm_dsi_host *msm_host, bool is_dual_dsi); msm_host 186 drivers/gpu/drm/msm/dsi/dsi_host.c static inline u32 dsi_read(struct msm_dsi_host *msm_host, u32 reg) msm_host 188 drivers/gpu/drm/msm/dsi/dsi_host.c return msm_readl(msm_host->ctrl_base + reg); msm_host 190 drivers/gpu/drm/msm/dsi/dsi_host.c static inline void dsi_write(struct msm_dsi_host *msm_host, u32 reg, u32 data) msm_host 192 drivers/gpu/drm/msm/dsi/dsi_host.c msm_writel(data, msm_host->ctrl_base + reg); msm_host 195 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_host_regulator_enable(struct msm_dsi_host *msm_host); msm_host 196 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_host_regulator_disable(struct msm_dsi_host *msm_host); msm_host 199 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host) msm_host 202 drivers/gpu/drm/msm/dsi/dsi_host.c struct device *dev = &msm_host->pdev->dev; msm_host 214 drivers/gpu/drm/msm/dsi/dsi_host.c ahb_clk = msm_clk_get(msm_host->pdev, "iface"); msm_host 234 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_get_version(msm_host->ctrl_base, &major, &minor); msm_host 260 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_host_regulator_disable(struct msm_dsi_host *msm_host) msm_host 262 drivers/gpu/drm/msm/dsi/dsi_host.c struct regulator_bulk_data *s = msm_host->supplies; msm_host 263 drivers/gpu/drm/msm/dsi/dsi_host.c const struct dsi_reg_entry *regs = msm_host->cfg_hnd->cfg->reg_cfg.regs; msm_host 264 drivers/gpu/drm/msm/dsi/dsi_host.c int num = msm_host->cfg_hnd->cfg->reg_cfg.num; msm_host 276 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_host_regulator_enable(struct msm_dsi_host *msm_host) msm_host 278 drivers/gpu/drm/msm/dsi/dsi_host.c struct regulator_bulk_data *s = msm_host->supplies; msm_host 279 drivers/gpu/drm/msm/dsi/dsi_host.c const struct dsi_reg_entry *regs = msm_host->cfg_hnd->cfg->reg_cfg.regs; msm_host 280 drivers/gpu/drm/msm/dsi/dsi_host.c int num = msm_host->cfg_hnd->cfg->reg_cfg.num; msm_host 310 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_regulator_init(struct msm_dsi_host *msm_host) msm_host 312 drivers/gpu/drm/msm/dsi/dsi_host.c struct regulator_bulk_data *s = msm_host->supplies; msm_host 313 drivers/gpu/drm/msm/dsi/dsi_host.c const struct dsi_reg_entry *regs = msm_host->cfg_hnd->cfg->reg_cfg.regs; msm_host 314 drivers/gpu/drm/msm/dsi/dsi_host.c int num = msm_host->cfg_hnd->cfg->reg_cfg.num; msm_host 320 drivers/gpu/drm/msm/dsi/dsi_host.c ret = devm_regulator_bulk_get(&msm_host->pdev->dev, num, s); msm_host 330 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_clk_init_v2(struct msm_dsi_host *msm_host) msm_host 332 drivers/gpu/drm/msm/dsi/dsi_host.c struct platform_device *pdev = msm_host->pdev; msm_host 335 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->src_clk = msm_clk_get(pdev, "src"); msm_host 337 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->src_clk)) { msm_host 338 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->src_clk); msm_host 341 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->src_clk = NULL; msm_host 345 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->esc_clk_src = clk_get_parent(msm_host->esc_clk); msm_host 346 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->esc_clk_src) { msm_host 353 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->dsi_clk_src = clk_get_parent(msm_host->src_clk); msm_host 354 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->dsi_clk_src) { msm_host 363 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_clk_init_6g_v2(struct msm_dsi_host *msm_host) msm_host 365 drivers/gpu/drm/msm/dsi/dsi_host.c struct platform_device *pdev = msm_host->pdev; msm_host 368 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->byte_intf_clk = msm_clk_get(pdev, "byte_intf"); msm_host 369 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->byte_intf_clk)) { msm_host 370 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->byte_intf_clk); msm_host 378 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_clk_init(struct msm_dsi_host *msm_host) msm_host 380 drivers/gpu/drm/msm/dsi/dsi_host.c struct platform_device *pdev = msm_host->pdev; msm_host 381 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 387 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->bus_clks[i] = msm_clk_get(pdev, msm_host 389 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->bus_clks[i])) { msm_host 390 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->bus_clks[i]); msm_host 398 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->byte_clk = msm_clk_get(pdev, "byte"); msm_host 399 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->byte_clk)) { msm_host 400 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->byte_clk); msm_host 403 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->byte_clk = NULL; msm_host 407 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->pixel_clk = msm_clk_get(pdev, "pixel"); msm_host 408 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->pixel_clk)) { msm_host 409 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->pixel_clk); msm_host 412 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->pixel_clk = NULL; msm_host 416 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->esc_clk = msm_clk_get(pdev, "core"); msm_host 417 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->esc_clk)) { msm_host 418 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->esc_clk); msm_host 421 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->esc_clk = NULL; msm_host 425 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->byte_clk_src = clk_get_parent(msm_host->byte_clk); msm_host 426 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->byte_clk_src)) { msm_host 427 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->byte_clk_src); msm_host 432 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->pixel_clk_src = clk_get_parent(msm_host->pixel_clk); msm_host 433 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->pixel_clk_src)) { msm_host 434 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->pixel_clk_src); msm_host 440 drivers/gpu/drm/msm/dsi/dsi_host.c ret = cfg_hnd->ops->clk_init_ver(msm_host); msm_host 445 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_bus_clk_enable(struct msm_dsi_host *msm_host) msm_host 447 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_config *cfg = msm_host->cfg_hnd->cfg; msm_host 450 drivers/gpu/drm/msm/dsi/dsi_host.c DBG("id=%d", msm_host->id); msm_host 453 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_prepare_enable(msm_host->bus_clks[i]); msm_host 464 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->bus_clks[i]); msm_host 469 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_bus_clk_disable(struct msm_dsi_host *msm_host) msm_host 471 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_config *cfg = msm_host->cfg_hnd->cfg; msm_host 477 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->bus_clks[i]); msm_host 485 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 487 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->cfg_hnd) msm_host 490 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_bus_clk_disable(msm_host); msm_host 500 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 502 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->cfg_hnd) msm_host 505 drivers/gpu/drm/msm/dsi/dsi_host.c return dsi_bus_clk_enable(msm_host); msm_host 508 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_link_clk_enable_6g(struct msm_dsi_host *msm_host) msm_host 513 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->mode->clock, msm_host->byte_clk_rate); msm_host 515 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_rate(msm_host->byte_clk, msm_host->byte_clk_rate); msm_host 521 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_rate(msm_host->pixel_clk, msm_host->pixel_clk_rate); msm_host 527 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->byte_intf_clk) { msm_host 528 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_rate(msm_host->byte_intf_clk, msm_host 529 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->byte_clk_rate / 2); msm_host 537 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_prepare_enable(msm_host->esc_clk); msm_host 543 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_prepare_enable(msm_host->byte_clk); msm_host 549 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_prepare_enable(msm_host->pixel_clk); msm_host 555 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->byte_intf_clk) { msm_host 556 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_prepare_enable(msm_host->byte_intf_clk); msm_host 567 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->pixel_clk); msm_host 569 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->byte_clk); msm_host 571 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->esc_clk); msm_host 576 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_link_clk_enable_v2(struct msm_dsi_host *msm_host) msm_host 581 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->mode->clock, msm_host->byte_clk_rate, msm_host 582 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->esc_clk_rate, msm_host->src_clk_rate); msm_host 584 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_rate(msm_host->byte_clk, msm_host->byte_clk_rate); msm_host 590 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_rate(msm_host->esc_clk, msm_host->esc_clk_rate); msm_host 596 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_rate(msm_host->src_clk, msm_host->src_clk_rate); msm_host 602 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_rate(msm_host->pixel_clk, msm_host->pixel_clk_rate); msm_host 608 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_prepare_enable(msm_host->byte_clk); msm_host 614 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_prepare_enable(msm_host->esc_clk); msm_host 620 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_prepare_enable(msm_host->src_clk); msm_host 626 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_prepare_enable(msm_host->pixel_clk); msm_host 635 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->src_clk); msm_host 637 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->esc_clk); msm_host 639 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->byte_clk); msm_host 644 drivers/gpu/drm/msm/dsi/dsi_host.c void dsi_link_clk_disable_6g(struct msm_dsi_host *msm_host) msm_host 646 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->esc_clk); msm_host 647 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->pixel_clk); msm_host 648 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->byte_intf_clk) msm_host 649 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->byte_intf_clk); msm_host 650 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->byte_clk); msm_host 653 drivers/gpu/drm/msm/dsi/dsi_host.c void dsi_link_clk_disable_v2(struct msm_dsi_host *msm_host) msm_host 655 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->pixel_clk); msm_host 656 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->src_clk); msm_host 657 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->esc_clk); msm_host 658 drivers/gpu/drm/msm/dsi/dsi_host.c clk_disable_unprepare(msm_host->byte_clk); msm_host 661 drivers/gpu/drm/msm/dsi/dsi_host.c static u32 dsi_get_pclk_rate(struct msm_dsi_host *msm_host, bool is_dual_dsi) msm_host 663 drivers/gpu/drm/msm/dsi/dsi_host.c struct drm_display_mode *mode = msm_host->mode; msm_host 680 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_calc_pclk(struct msm_dsi_host *msm_host, bool is_dual_dsi) msm_host 682 drivers/gpu/drm/msm/dsi/dsi_host.c u8 lanes = msm_host->lanes; msm_host 683 drivers/gpu/drm/msm/dsi/dsi_host.c u32 bpp = dsi_get_bpp(msm_host->format); msm_host 684 drivers/gpu/drm/msm/dsi/dsi_host.c u32 pclk_rate = dsi_get_pclk_rate(msm_host, is_dual_dsi); msm_host 694 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->pixel_clk_rate = pclk_rate; msm_host 695 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->byte_clk_rate = pclk_bpp; msm_host 697 drivers/gpu/drm/msm/dsi/dsi_host.c DBG("pclk=%d, bclk=%d", msm_host->pixel_clk_rate, msm_host 698 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->byte_clk_rate); msm_host 702 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_calc_clk_rate_6g(struct msm_dsi_host *msm_host, bool is_dual_dsi) msm_host 704 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->mode) { msm_host 709 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_calc_pclk(msm_host, is_dual_dsi); msm_host 710 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->esc_clk_rate = clk_get_rate(msm_host->esc_clk); msm_host 714 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_calc_clk_rate_v2(struct msm_dsi_host *msm_host, bool is_dual_dsi) msm_host 716 drivers/gpu/drm/msm/dsi/dsi_host.c u32 bpp = dsi_get_bpp(msm_host->format); msm_host 721 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_calc_pclk(msm_host, is_dual_dsi); msm_host 723 drivers/gpu/drm/msm/dsi/dsi_host.c pclk_bpp = (u64)dsi_get_pclk_rate(msm_host, is_dual_dsi) * bpp; msm_host 725 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->src_clk_rate = pclk_bpp; msm_host 736 drivers/gpu/drm/msm/dsi/dsi_host.c byte_mhz = msm_host->byte_clk_rate / 1000000; msm_host 754 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->esc_clk_rate = msm_host->byte_clk_rate / esc_div; msm_host 756 drivers/gpu/drm/msm/dsi/dsi_host.c DBG("esc=%d, src=%d", msm_host->esc_clk_rate, msm_host 757 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->src_clk_rate); msm_host 762 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_intr_ctrl(struct msm_dsi_host *msm_host, u32 mask, int enable) msm_host 767 drivers/gpu/drm/msm/dsi/dsi_host.c spin_lock_irqsave(&msm_host->intr_lock, flags); msm_host 768 drivers/gpu/drm/msm/dsi/dsi_host.c intr = dsi_read(msm_host, REG_DSI_INTR_CTRL); msm_host 777 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_INTR_CTRL, intr); msm_host 778 drivers/gpu/drm/msm/dsi/dsi_host.c spin_unlock_irqrestore(&msm_host->intr_lock, flags); msm_host 815 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_ctrl_config(struct msm_dsi_host *msm_host, bool enable, msm_host 818 drivers/gpu/drm/msm/dsi/dsi_host.c u32 flags = msm_host->mode_flags; msm_host 819 drivers/gpu/drm/msm/dsi/dsi_host.c enum mipi_dsi_pixel_format mipi_fmt = msm_host->format; msm_host 820 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 824 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CTRL, 0); msm_host 844 drivers/gpu/drm/msm/dsi/dsi_host.c data |= DSI_VID_CFG0_VIRT_CHANNEL(msm_host->channel); msm_host 845 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_VID_CFG0, data); msm_host 849 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_VID_CFG1, 0); msm_host 854 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CMD_CFG0, data); msm_host 861 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CMD_CFG1, data); msm_host 864 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CMD_DMA_CTRL, msm_host 873 drivers/gpu/drm/msm/dsi/dsi_host.c data |= DSI_TRIG_CTRL_STREAM(msm_host->channel); msm_host 877 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_TRIG_CTRL, data); msm_host 881 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CLKOUT_TIMING_CTRL, data); msm_host 886 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_T_CLK_PRE_EXTEND, msm_host 892 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_EOT_PACKET_CTRL, data); msm_host 895 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_ERR_INT_MASK0, 0x13ff3fe0); msm_host 897 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_intr_ctrl(msm_host, DSI_IRQ_MASK_ERROR, 1); msm_host 899 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CLK_CTRL, DSI_CLK_CTRL_ENABLE_CLKS); msm_host 903 drivers/gpu/drm/msm/dsi/dsi_host.c DBG("lane number=%d", msm_host->lanes); msm_host 904 drivers/gpu/drm/msm/dsi/dsi_host.c data |= ((DSI_CTRL_LANE0 << msm_host->lanes) - DSI_CTRL_LANE0); msm_host 906 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_LANE_SWAP_CTRL, msm_host 907 drivers/gpu/drm/msm/dsi/dsi_host.c DSI_LANE_SWAP_CTRL_DLN_SWAP_SEL(msm_host->dlane_swap)); msm_host 910 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_LANE_CTRL, msm_host 915 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CTRL, data); msm_host 918 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_timing_setup(struct msm_dsi_host *msm_host, bool is_dual_dsi) msm_host 920 drivers/gpu/drm/msm/dsi/dsi_host.c struct drm_display_mode *mode = msm_host->mode; msm_host 950 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->mode_flags & MIPI_DSI_MODE_VIDEO) { msm_host 951 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_ACTIVE_H, msm_host 954 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_ACTIVE_V, msm_host 957 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_TOTAL, msm_host 961 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_ACTIVE_HSYNC, msm_host 964 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_ACTIVE_VSYNC_HPOS, 0); msm_host 965 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_ACTIVE_VSYNC_VPOS, msm_host 970 drivers/gpu/drm/msm/dsi/dsi_host.c wc = hdisplay * dsi_get_bpp(msm_host->format) / 8 + 1; msm_host 972 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CMD_MDP_STREAM_CTRL, msm_host 975 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->channel) | msm_host 979 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CMD_MDP_STREAM_TOTAL, msm_host 985 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_sw_reset(struct msm_dsi_host *msm_host) msm_host 987 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CLK_CTRL, DSI_CLK_CTRL_ENABLE_CLKS); msm_host 990 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_RESET, 1); msm_host 992 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_RESET, 0); msm_host 995 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_op_mode_config(struct msm_dsi_host *msm_host, msm_host 1000 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_ctrl = dsi_read(msm_host, REG_DSI_CTRL); msm_host 1005 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_intr_ctrl(msm_host, DSI_IRQ_MASK_CMD_MDP_DONE | msm_host 1012 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_intr_ctrl(msm_host, DSI_IRQ_MASK_CMD_MDP_DONE, 1); msm_host 1017 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CTRL, dsi_ctrl); msm_host 1020 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_set_tx_power_mode(int mode, struct msm_dsi_host *msm_host) msm_host 1024 drivers/gpu/drm/msm/dsi/dsi_host.c data = dsi_read(msm_host, REG_DSI_CMD_DMA_CTRL); msm_host 1031 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CMD_DMA_CTRL, data); msm_host 1034 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_wait4video_done(struct msm_dsi_host *msm_host) msm_host 1037 drivers/gpu/drm/msm/dsi/dsi_host.c struct device *dev = &msm_host->pdev->dev; msm_host 1039 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_intr_ctrl(msm_host, DSI_IRQ_MASK_VIDEO_DONE, 1); msm_host 1041 drivers/gpu/drm/msm/dsi/dsi_host.c reinit_completion(&msm_host->video_comp); msm_host 1043 drivers/gpu/drm/msm/dsi/dsi_host.c ret = wait_for_completion_timeout(&msm_host->video_comp, msm_host 1049 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_intr_ctrl(msm_host, DSI_IRQ_MASK_VIDEO_DONE, 0); msm_host 1052 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_wait4video_eng_busy(struct msm_dsi_host *msm_host) msm_host 1054 drivers/gpu/drm/msm/dsi/dsi_host.c if (!(msm_host->mode_flags & MIPI_DSI_MODE_VIDEO)) msm_host 1057 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->power_on && msm_host->enabled) { msm_host 1058 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_wait4video_done(msm_host); msm_host 1064 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_tx_buf_alloc_6g(struct msm_dsi_host *msm_host, int size) msm_host 1066 drivers/gpu/drm/msm/dsi/dsi_host.c struct drm_device *dev = msm_host->dev; msm_host 1073 drivers/gpu/drm/msm/dsi/dsi_host.c &msm_host->tx_gem_obj, &iova); msm_host 1076 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->tx_gem_obj = NULL; msm_host 1080 drivers/gpu/drm/msm/dsi/dsi_host.c msm_gem_object_set_name(msm_host->tx_gem_obj, "tx_gem"); msm_host 1082 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->tx_size = msm_host->tx_gem_obj->size; msm_host 1087 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_tx_buf_alloc_v2(struct msm_dsi_host *msm_host, int size) msm_host 1089 drivers/gpu/drm/msm/dsi/dsi_host.c struct drm_device *dev = msm_host->dev; msm_host 1091 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->tx_buf = dma_alloc_coherent(dev->dev, size, msm_host 1092 drivers/gpu/drm/msm/dsi/dsi_host.c &msm_host->tx_buf_paddr, GFP_KERNEL); msm_host 1093 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->tx_buf) msm_host 1096 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->tx_size = size; msm_host 1101 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_tx_buf_free(struct msm_dsi_host *msm_host) msm_host 1103 drivers/gpu/drm/msm/dsi/dsi_host.c struct drm_device *dev = msm_host->dev; msm_host 1116 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->tx_gem_obj) { msm_host 1117 drivers/gpu/drm/msm/dsi/dsi_host.c msm_gem_unpin_iova(msm_host->tx_gem_obj, priv->kms->aspace); msm_host 1118 drivers/gpu/drm/msm/dsi/dsi_host.c drm_gem_object_put_unlocked(msm_host->tx_gem_obj); msm_host 1119 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->tx_gem_obj = NULL; msm_host 1122 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->tx_buf) msm_host 1123 drivers/gpu/drm/msm/dsi/dsi_host.c dma_free_coherent(dev->dev, msm_host->tx_size, msm_host->tx_buf, msm_host 1124 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->tx_buf_paddr); msm_host 1127 drivers/gpu/drm/msm/dsi/dsi_host.c void *dsi_tx_buf_get_6g(struct msm_dsi_host *msm_host) msm_host 1129 drivers/gpu/drm/msm/dsi/dsi_host.c return msm_gem_get_vaddr(msm_host->tx_gem_obj); msm_host 1132 drivers/gpu/drm/msm/dsi/dsi_host.c void *dsi_tx_buf_get_v2(struct msm_dsi_host *msm_host) msm_host 1134 drivers/gpu/drm/msm/dsi/dsi_host.c return msm_host->tx_buf; msm_host 1137 drivers/gpu/drm/msm/dsi/dsi_host.c void dsi_tx_buf_put_6g(struct msm_dsi_host *msm_host) msm_host 1139 drivers/gpu/drm/msm/dsi/dsi_host.c msm_gem_put_vaddr(msm_host->tx_gem_obj); msm_host 1145 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_cmd_dma_add(struct msm_dsi_host *msm_host, msm_host 1148 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 1161 drivers/gpu/drm/msm/dsi/dsi_host.c if (len > msm_host->tx_size) { msm_host 1166 drivers/gpu/drm/msm/dsi/dsi_host.c data = cfg_hnd->ops->tx_buf_get(msm_host); msm_host 1192 drivers/gpu/drm/msm/dsi/dsi_host.c cfg_hnd->ops->tx_buf_put(msm_host); msm_host 1239 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_dma_base_get_6g(struct msm_dsi_host *msm_host, uint64_t *dma_base) msm_host 1241 drivers/gpu/drm/msm/dsi/dsi_host.c struct drm_device *dev = msm_host->dev; msm_host 1247 drivers/gpu/drm/msm/dsi/dsi_host.c return msm_gem_get_and_pin_iova(msm_host->tx_gem_obj, msm_host 1251 drivers/gpu/drm/msm/dsi/dsi_host.c int dsi_dma_base_get_v2(struct msm_dsi_host *msm_host, uint64_t *dma_base) msm_host 1256 drivers/gpu/drm/msm/dsi/dsi_host.c *dma_base = msm_host->tx_buf_paddr; msm_host 1260 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_cmd_dma_tx(struct msm_dsi_host *msm_host, int len) msm_host 1262 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 1267 drivers/gpu/drm/msm/dsi/dsi_host.c ret = cfg_hnd->ops->dma_base_get(msm_host, &dma_base); msm_host 1273 drivers/gpu/drm/msm/dsi/dsi_host.c reinit_completion(&msm_host->dma_comp); msm_host 1275 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_wait4video_eng_busy(msm_host); msm_host 1278 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->id, dma_base, len); msm_host 1280 drivers/gpu/drm/msm/dsi/dsi_host.c ret = wait_for_completion_timeout(&msm_host->dma_comp, msm_host 1293 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_cmd_dma_rx(struct msm_dsi_host *msm_host, msm_host 1301 drivers/gpu/drm/msm/dsi/dsi_host.c int buf_offset = buf - msm_host->rx_buf; msm_host 1332 drivers/gpu/drm/msm/dsi/dsi_host.c data = dsi_read(msm_host, REG_DSI_RDBK_DATA(i)); msm_host 1343 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_cmds2buf_tx(struct msm_dsi_host *msm_host, msm_host 1347 drivers/gpu/drm/msm/dsi/dsi_host.c int bllp_len = msm_host->mode->hdisplay * msm_host 1348 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_get_bpp(msm_host->format) / 8; msm_host 1350 drivers/gpu/drm/msm/dsi/dsi_host.c len = dsi_cmd_dma_add(msm_host, msg); msm_host 1366 drivers/gpu/drm/msm/dsi/dsi_host.c if ((msm_host->mode_flags & MIPI_DSI_MODE_VIDEO) && (len > bllp_len)) { msm_host 1372 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_cmd_dma_tx(msm_host, len); msm_host 1382 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_sw_reset_restore(struct msm_dsi_host *msm_host) msm_host 1386 drivers/gpu/drm/msm/dsi/dsi_host.c data0 = dsi_read(msm_host, REG_DSI_CTRL); msm_host 1389 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CTRL, data1); msm_host 1396 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CLK_CTRL, DSI_CLK_CTRL_ENABLE_CLKS); msm_host 1400 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_RESET, 1); msm_host 1402 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_RESET, 0); msm_host 1404 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CTRL, data0); msm_host 1410 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = msm_host 1413 drivers/gpu/drm/msm/dsi/dsi_host.c drm_helper_hpd_irq_event(msm_host->dev); msm_host 1418 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = msm_host 1420 drivers/gpu/drm/msm/dsi/dsi_host.c u32 status = msm_host->err_work_state; msm_host 1424 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_sw_reset_restore(msm_host); msm_host 1427 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->err_work_state = 0; msm_host 1430 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_intr_ctrl(msm_host, DSI_IRQ_MASK_ERROR, 1); msm_host 1433 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_ack_err_status(struct msm_dsi_host *msm_host) msm_host 1437 drivers/gpu/drm/msm/dsi/dsi_host.c status = dsi_read(msm_host, REG_DSI_ACK_ERR_STATUS); msm_host 1440 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_ACK_ERR_STATUS, status); msm_host 1442 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_ACK_ERR_STATUS, 0); msm_host 1443 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->err_work_state |= DSI_ERR_STATE_ACK; msm_host 1447 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_timeout_status(struct msm_dsi_host *msm_host) msm_host 1451 drivers/gpu/drm/msm/dsi/dsi_host.c status = dsi_read(msm_host, REG_DSI_TIMEOUT_STATUS); msm_host 1454 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_TIMEOUT_STATUS, status); msm_host 1455 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->err_work_state |= DSI_ERR_STATE_TIMEOUT; msm_host 1459 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_dln0_phy_err(struct msm_dsi_host *msm_host) msm_host 1463 drivers/gpu/drm/msm/dsi/dsi_host.c status = dsi_read(msm_host, REG_DSI_DLN0_PHY_ERR); msm_host 1470 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_DLN0_PHY_ERR, status); msm_host 1471 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->err_work_state |= DSI_ERR_STATE_DLN0_PHY; msm_host 1475 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_fifo_status(struct msm_dsi_host *msm_host) msm_host 1479 drivers/gpu/drm/msm/dsi/dsi_host.c status = dsi_read(msm_host, REG_DSI_FIFO_STATUS); msm_host 1483 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_FIFO_STATUS, status); msm_host 1484 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->err_work_state |= DSI_ERR_STATE_FIFO; msm_host 1486 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->err_work_state |= msm_host 1491 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_status(struct msm_dsi_host *msm_host) msm_host 1495 drivers/gpu/drm/msm/dsi/dsi_host.c status = dsi_read(msm_host, REG_DSI_STATUS0); msm_host 1498 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_STATUS0, status); msm_host 1499 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->err_work_state |= msm_host 1504 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_clk_status(struct msm_dsi_host *msm_host) msm_host 1508 drivers/gpu/drm/msm/dsi/dsi_host.c status = dsi_read(msm_host, REG_DSI_CLK_STATUS); msm_host 1511 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CLK_STATUS, status); msm_host 1512 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->err_work_state |= DSI_ERR_STATE_PLL_UNLOCKED; msm_host 1516 drivers/gpu/drm/msm/dsi/dsi_host.c static void dsi_error(struct msm_dsi_host *msm_host) msm_host 1519 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_intr_ctrl(msm_host, DSI_IRQ_MASK_ERROR, 0); msm_host 1521 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_clk_status(msm_host); msm_host 1522 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_fifo_status(msm_host); msm_host 1523 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_ack_err_status(msm_host); msm_host 1524 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_timeout_status(msm_host); msm_host 1525 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_status(msm_host); msm_host 1526 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_dln0_phy_err(msm_host); msm_host 1528 drivers/gpu/drm/msm/dsi/dsi_host.c queue_work(msm_host->workqueue, &msm_host->err_work); msm_host 1533 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = ptr; msm_host 1537 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->ctrl_base) msm_host 1540 drivers/gpu/drm/msm/dsi/dsi_host.c spin_lock_irqsave(&msm_host->intr_lock, flags); msm_host 1541 drivers/gpu/drm/msm/dsi/dsi_host.c isr = dsi_read(msm_host, REG_DSI_INTR_CTRL); msm_host 1542 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_INTR_CTRL, isr); msm_host 1543 drivers/gpu/drm/msm/dsi/dsi_host.c spin_unlock_irqrestore(&msm_host->intr_lock, flags); msm_host 1545 drivers/gpu/drm/msm/dsi/dsi_host.c DBG("isr=0x%x, id=%d", isr, msm_host->id); msm_host 1548 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_error(msm_host); msm_host 1551 drivers/gpu/drm/msm/dsi/dsi_host.c complete(&msm_host->video_comp); msm_host 1554 drivers/gpu/drm/msm/dsi/dsi_host.c complete(&msm_host->dma_comp); msm_host 1559 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_host_init_panel_gpios(struct msm_dsi_host *msm_host, msm_host 1562 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->disp_en_gpio = devm_gpiod_get_optional(panel_device, msm_host 1565 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->disp_en_gpio)) { msm_host 1567 drivers/gpu/drm/msm/dsi/dsi_host.c PTR_ERR(msm_host->disp_en_gpio)); msm_host 1568 drivers/gpu/drm/msm/dsi/dsi_host.c return PTR_ERR(msm_host->disp_en_gpio); msm_host 1571 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->te_gpio = devm_gpiod_get_optional(panel_device, "disp-te", msm_host 1573 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->te_gpio)) { msm_host 1574 drivers/gpu/drm/msm/dsi/dsi_host.c DBG("cannot get disp-te-gpios %ld", PTR_ERR(msm_host->te_gpio)); msm_host 1575 drivers/gpu/drm/msm/dsi/dsi_host.c return PTR_ERR(msm_host->te_gpio); msm_host 1584 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 1587 drivers/gpu/drm/msm/dsi/dsi_host.c if (dsi->lanes > msm_host->num_data_lanes) msm_host 1590 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->channel = dsi->channel; msm_host 1591 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->lanes = dsi->lanes; msm_host 1592 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->format = dsi->format; msm_host 1593 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->mode_flags = dsi->mode_flags; msm_host 1596 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_host_init_panel_gpios(msm_host, &dsi->dev); msm_host 1600 drivers/gpu/drm/msm/dsi/dsi_host.c DBG("id=%d", msm_host->id); msm_host 1601 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->dev) msm_host 1602 drivers/gpu/drm/msm/dsi/dsi_host.c queue_work(msm_host->workqueue, &msm_host->hpd_work); msm_host 1610 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 1612 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->device_node = NULL; msm_host 1614 drivers/gpu/drm/msm/dsi/dsi_host.c DBG("id=%d", msm_host->id); msm_host 1615 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->dev) msm_host 1616 drivers/gpu/drm/msm/dsi/dsi_host.c queue_work(msm_host->workqueue, &msm_host->hpd_work); msm_host 1624 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 1627 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msg || !msm_host->power_on) msm_host 1630 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_lock(&msm_host->cmd_mutex); msm_host 1631 drivers/gpu/drm/msm/dsi/dsi_host.c ret = msm_dsi_manager_cmd_xfer(msm_host->id, msg); msm_host 1632 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_unlock(&msm_host->cmd_mutex); msm_host 1660 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_host_parse_lane_data(struct msm_dsi_host *msm_host, msm_host 1663 drivers/gpu/drm/msm/dsi/dsi_host.c struct device *dev = &msm_host->pdev->dev; msm_host 1682 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->num_data_lanes = num_lanes; msm_host 1716 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->dlane_swap = i; msm_host 1724 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_host_parse_dt(struct msm_dsi_host *msm_host) msm_host 1726 drivers/gpu/drm/msm/dsi/dsi_host.c struct device *dev = &msm_host->pdev->dev; msm_host 1743 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_host_parse_lane_data(msm_host, endpoint); msm_host 1759 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->device_node = device_node; msm_host 1762 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->sfpb = syscon_regmap_lookup_by_phandle(np, msm_host 1764 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->sfpb)) { msm_host 1767 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->sfpb); msm_host 1779 drivers/gpu/drm/msm/dsi/dsi_host.c static int dsi_host_get_id(struct msm_dsi_host *msm_host) msm_host 1781 drivers/gpu/drm/msm/dsi/dsi_host.c struct platform_device *pdev = msm_host->pdev; msm_host 1782 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_config *cfg = msm_host->cfg_hnd->cfg; msm_host 1800 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = NULL; msm_host 1804 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host = devm_kzalloc(&pdev->dev, sizeof(*msm_host), GFP_KERNEL); msm_host 1805 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host) { msm_host 1812 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->pdev = pdev; msm_host 1813 drivers/gpu/drm/msm/dsi/dsi_host.c msm_dsi->host = &msm_host->base; msm_host 1815 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_host_parse_dt(msm_host); msm_host 1821 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->ctrl_base = msm_ioremap(pdev, "dsi_ctrl", "DSI CTRL"); msm_host 1822 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(msm_host->ctrl_base)) { msm_host 1824 drivers/gpu/drm/msm/dsi/dsi_host.c ret = PTR_ERR(msm_host->ctrl_base); msm_host 1830 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->cfg_hnd = dsi_get_config(msm_host); msm_host 1831 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->cfg_hnd) { msm_host 1837 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->id = dsi_host_get_id(msm_host); msm_host 1838 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->id < 0) { msm_host 1839 drivers/gpu/drm/msm/dsi/dsi_host.c ret = msm_host->id; msm_host 1845 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->ctrl_base += msm_host->cfg_hnd->cfg->io_offset; msm_host 1847 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_regulator_init(msm_host); msm_host 1853 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_clk_init(msm_host); msm_host 1859 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->rx_buf = devm_kzalloc(&pdev->dev, SZ_4K, GFP_KERNEL); msm_host 1860 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->rx_buf) { msm_host 1866 drivers/gpu/drm/msm/dsi/dsi_host.c init_completion(&msm_host->dma_comp); msm_host 1867 drivers/gpu/drm/msm/dsi/dsi_host.c init_completion(&msm_host->video_comp); msm_host 1868 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_init(&msm_host->dev_mutex); msm_host 1869 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_init(&msm_host->cmd_mutex); msm_host 1870 drivers/gpu/drm/msm/dsi/dsi_host.c spin_lock_init(&msm_host->intr_lock); msm_host 1873 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->workqueue = alloc_ordered_workqueue("dsi_drm_work", 0); msm_host 1874 drivers/gpu/drm/msm/dsi/dsi_host.c INIT_WORK(&msm_host->err_work, dsi_err_worker); msm_host 1875 drivers/gpu/drm/msm/dsi/dsi_host.c INIT_WORK(&msm_host->hpd_work, dsi_hpd_worker); msm_host 1877 drivers/gpu/drm/msm/dsi/dsi_host.c msm_dsi->id = msm_host->id; msm_host 1879 drivers/gpu/drm/msm/dsi/dsi_host.c DBG("Dsi Host %d initialized", msm_host->id); msm_host 1888 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 1891 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_tx_buf_free(msm_host); msm_host 1892 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->workqueue) { msm_host 1893 drivers/gpu/drm/msm/dsi/dsi_host.c flush_workqueue(msm_host->workqueue); msm_host 1894 drivers/gpu/drm/msm/dsi/dsi_host.c destroy_workqueue(msm_host->workqueue); msm_host 1895 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->workqueue = NULL; msm_host 1898 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_destroy(&msm_host->cmd_mutex); msm_host 1899 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_destroy(&msm_host->dev_mutex); msm_host 1901 drivers/gpu/drm/msm/dsi/dsi_host.c pm_runtime_disable(&msm_host->pdev->dev); msm_host 1907 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 1908 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 1909 drivers/gpu/drm/msm/dsi/dsi_host.c struct platform_device *pdev = msm_host->pdev; msm_host 1912 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->irq = irq_of_parse_and_map(pdev->dev.of_node, 0); msm_host 1913 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->irq < 0) { msm_host 1914 drivers/gpu/drm/msm/dsi/dsi_host.c ret = msm_host->irq; msm_host 1919 drivers/gpu/drm/msm/dsi/dsi_host.c ret = devm_request_irq(&pdev->dev, msm_host->irq, msm_host 1921 drivers/gpu/drm/msm/dsi/dsi_host.c "dsi_isr", msm_host); msm_host 1924 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->irq, ret); msm_host 1928 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->dev = dev; msm_host 1929 drivers/gpu/drm/msm/dsi/dsi_host.c ret = cfg_hnd->ops->tx_buf_alloc(msm_host, SZ_4K); msm_host 1940 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 1944 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->registered) { msm_host 1945 drivers/gpu/drm/msm/dsi/dsi_host.c host->dev = &msm_host->pdev->dev; msm_host 1951 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->registered = true; msm_host 1961 drivers/gpu/drm/msm/dsi/dsi_host.c if (check_defer && msm_host->device_node) { msm_host 1962 drivers/gpu/drm/msm/dsi/dsi_host.c if (IS_ERR(of_drm_find_panel(msm_host->device_node))) msm_host 1963 drivers/gpu/drm/msm/dsi/dsi_host.c if (!of_drm_find_bridge(msm_host->device_node)) msm_host 1973 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 1975 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->registered) { msm_host 1979 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->registered = false; msm_host 1986 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 1987 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 1999 drivers/gpu/drm/msm/dsi/dsi_host.c pm_runtime_get_sync(&msm_host->pdev->dev); msm_host 2000 drivers/gpu/drm/msm/dsi/dsi_host.c cfg_hnd->ops->link_clk_enable(msm_host); msm_host 2005 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_set_tx_power_mode(0, msm_host); msm_host 2007 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->dma_cmd_ctrl_restore = dsi_read(msm_host, REG_DSI_CTRL); msm_host 2008 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CTRL, msm_host 2009 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->dma_cmd_ctrl_restore | msm_host 2012 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_intr_ctrl(msm_host, DSI_IRQ_MASK_CMD_DMA_DONE, 1); msm_host 2020 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2021 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 2023 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_intr_ctrl(msm_host, DSI_IRQ_MASK_CMD_DMA_DONE, 0); msm_host 2024 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_CTRL, msm_host->dma_cmd_ctrl_restore); msm_host 2027 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_set_tx_power_mode(1, msm_host); msm_host 2031 drivers/gpu/drm/msm/dsi/dsi_host.c cfg_hnd->ops->link_clk_disable(msm_host); msm_host 2032 drivers/gpu/drm/msm/dsi/dsi_host.c pm_runtime_put_autosuspend(&msm_host->pdev->dev); msm_host 2038 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2040 drivers/gpu/drm/msm/dsi/dsi_host.c return dsi_cmds2buf_tx(msm_host, msg); msm_host 2046 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2047 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 2068 drivers/gpu/drm/msm/dsi/dsi_host.c buf = msm_host->rx_buf; msm_host 2082 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_cmds2buf_tx(msm_host, &max_pkt_size_msg); msm_host 2092 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_RDBK_DATA_CTRL, msm_host 2095 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_RDBK_DATA_CTRL, 0); msm_host 2099 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_cmds2buf_tx(msm_host, msg); msm_host 2112 drivers/gpu/drm/msm/dsi/dsi_host.c dlen = dsi_cmd_dma_rx(msm_host, buf, rx_byte, pkt_size); msm_host 2148 drivers/gpu/drm/msm/dsi/dsi_host.c buf = msm_host->rx_buf + (10 - rlen); msm_host 2150 drivers/gpu/drm/msm/dsi/dsi_host.c buf = msm_host->rx_buf; msm_host 2181 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2183 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_DMA_BASE, dma_base); msm_host 2184 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_DMA_LEN, len); msm_host 2185 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_TRIG_DMA, 1); msm_host 2194 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2206 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_parent(msm_host->byte_clk_src, byte_clk_provider); msm_host 2213 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_parent(msm_host->pixel_clk_src, pixel_clk_provider); msm_host 2220 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->dsi_clk_src) { msm_host 2221 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_parent(msm_host->dsi_clk_src, pixel_clk_provider); msm_host 2229 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->esc_clk_src) { msm_host 2230 drivers/gpu/drm/msm/dsi/dsi_host.c ret = clk_set_parent(msm_host->esc_clk_src, byte_clk_provider); msm_host 2244 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2247 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_PHY_RESET, DSI_PHY_RESET_RESET); msm_host 2251 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_write(msm_host, REG_DSI_PHY_RESET, 0); msm_host 2259 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2260 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 2263 drivers/gpu/drm/msm/dsi/dsi_host.c ret = cfg_hnd->ops->calc_clk_rate(msm_host, is_dual_dsi); msm_host 2269 drivers/gpu/drm/msm/dsi/dsi_host.c clk_req->bitclk_rate = msm_host->byte_clk_rate * 8; msm_host 2270 drivers/gpu/drm/msm/dsi/dsi_host.c clk_req->escclk_rate = msm_host->esc_clk_rate; msm_host 2275 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2277 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_op_mode_config(msm_host, msm_host 2278 drivers/gpu/drm/msm/dsi/dsi_host.c !!(msm_host->mode_flags & MIPI_DSI_MODE_VIDEO), true); msm_host 2289 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->enabled = true; msm_host 2295 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2297 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->enabled = false; msm_host 2298 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_op_mode_config(msm_host, msm_host 2299 drivers/gpu/drm/msm/dsi/dsi_host.c !!(msm_host->mode_flags & MIPI_DSI_MODE_VIDEO), false); msm_host 2305 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_sw_reset(msm_host); msm_host 2310 drivers/gpu/drm/msm/dsi/dsi_host.c static void msm_dsi_sfpb_config(struct msm_dsi_host *msm_host, bool enable) msm_host 2314 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->sfpb) msm_host 2319 drivers/gpu/drm/msm/dsi/dsi_host.c regmap_update_bits(msm_host->sfpb, REG_SFPB_GPREG, msm_host 2328 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2329 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 2332 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_lock(&msm_host->dev_mutex); msm_host 2333 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->power_on) { msm_host 2338 drivers/gpu/drm/msm/dsi/dsi_host.c msm_dsi_sfpb_config(msm_host, true); msm_host 2340 drivers/gpu/drm/msm/dsi/dsi_host.c ret = dsi_host_regulator_enable(msm_host); msm_host 2347 drivers/gpu/drm/msm/dsi/dsi_host.c pm_runtime_get_sync(&msm_host->pdev->dev); msm_host 2348 drivers/gpu/drm/msm/dsi/dsi_host.c ret = cfg_hnd->ops->link_clk_enable(msm_host); msm_host 2355 drivers/gpu/drm/msm/dsi/dsi_host.c ret = pinctrl_pm_select_default_state(&msm_host->pdev->dev); msm_host 2362 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_timing_setup(msm_host, is_dual_dsi); msm_host 2363 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_sw_reset(msm_host); msm_host 2364 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_ctrl_config(msm_host, true, phy_shared_timings); msm_host 2366 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->disp_en_gpio) msm_host 2367 drivers/gpu/drm/msm/dsi/dsi_host.c gpiod_set_value(msm_host->disp_en_gpio, 1); msm_host 2369 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->power_on = true; msm_host 2370 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_unlock(&msm_host->dev_mutex); msm_host 2375 drivers/gpu/drm/msm/dsi/dsi_host.c cfg_hnd->ops->link_clk_disable(msm_host); msm_host 2376 drivers/gpu/drm/msm/dsi/dsi_host.c pm_runtime_put_autosuspend(&msm_host->pdev->dev); msm_host 2378 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_host_regulator_disable(msm_host); msm_host 2380 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_unlock(&msm_host->dev_mutex); msm_host 2386 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2387 drivers/gpu/drm/msm/dsi/dsi_host.c const struct msm_dsi_cfg_handler *cfg_hnd = msm_host->cfg_hnd; msm_host 2389 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_lock(&msm_host->dev_mutex); msm_host 2390 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->power_on) { msm_host 2395 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_ctrl_config(msm_host, false, NULL); msm_host 2397 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->disp_en_gpio) msm_host 2398 drivers/gpu/drm/msm/dsi/dsi_host.c gpiod_set_value(msm_host->disp_en_gpio, 0); msm_host 2400 drivers/gpu/drm/msm/dsi/dsi_host.c pinctrl_pm_select_sleep_state(&msm_host->pdev->dev); msm_host 2402 drivers/gpu/drm/msm/dsi/dsi_host.c cfg_hnd->ops->link_clk_disable(msm_host); msm_host 2403 drivers/gpu/drm/msm/dsi/dsi_host.c pm_runtime_put_autosuspend(&msm_host->pdev->dev); msm_host 2405 drivers/gpu/drm/msm/dsi/dsi_host.c dsi_host_regulator_disable(msm_host); msm_host 2407 drivers/gpu/drm/msm/dsi/dsi_host.c msm_dsi_sfpb_config(msm_host, false); msm_host 2411 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->power_on = false; msm_host 2414 drivers/gpu/drm/msm/dsi/dsi_host.c mutex_unlock(&msm_host->dev_mutex); msm_host 2421 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2423 drivers/gpu/drm/msm/dsi/dsi_host.c if (msm_host->mode) { msm_host 2424 drivers/gpu/drm/msm/dsi/dsi_host.c drm_mode_destroy(msm_host->dev, msm_host->mode); msm_host 2425 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->mode = NULL; msm_host 2428 drivers/gpu/drm/msm/dsi/dsi_host.c msm_host->mode = drm_mode_duplicate(msm_host->dev, mode); msm_host 2429 drivers/gpu/drm/msm/dsi/dsi_host.c if (!msm_host->mode) { msm_host 2449 drivers/gpu/drm/msm/dsi/dsi_host.c struct msm_dsi_host *msm_host = to_msm_dsi_host(host); msm_host 2451 drivers/gpu/drm/msm/dsi/dsi_host.c return of_drm_find_bridge(msm_host->device_node); msm_host 119 drivers/mmc/host/sdhci-msm.c #define msm_host_readl(msm_host, host, offset) \ msm_host 120 drivers/mmc/host/sdhci-msm.c msm_host->var_ops->msm_readl_relaxed(host, offset) msm_host 122 drivers/mmc/host/sdhci-msm.c #define msm_host_writel(msm_host, val, host, offset) \ msm_host 123 drivers/mmc/host/sdhci-msm.c msm_host->var_ops->msm_writel_relaxed(val, host, offset) msm_host 263 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 265 drivers/mmc/host/sdhci-msm.c return msm_host->offset; msm_host 276 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 278 drivers/mmc/host/sdhci-msm.c return readl_relaxed(msm_host->core_mem + offset); msm_host 291 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 293 drivers/mmc/host/sdhci-msm.c writel_relaxed(val, msm_host->core_mem + offset); msm_host 324 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 326 drivers/mmc/host/sdhci-msm.c struct clk *core_clk = msm_host->bulk_clks[0].clk; msm_host 337 drivers/mmc/host/sdhci-msm.c msm_host->clk_rate = clock; msm_host 578 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 583 drivers/mmc/host/sdhci-msm.c msm_host->offset; msm_host 585 drivers/mmc/host/sdhci-msm.c if (msm_host->use_14lpp_dll_reset && !IS_ERR_OR_NULL(msm_host->xo_clk)) msm_host 586 drivers/mmc/host/sdhci-msm.c xo_clk = clk_get_rate(msm_host->xo_clk); msm_host 599 drivers/mmc/host/sdhci-msm.c if (msm_host->use_14lpp_dll_reset) { msm_host 626 drivers/mmc/host/sdhci-msm.c if (msm_host->use_14lpp_dll_reset && msm_host 627 drivers/mmc/host/sdhci-msm.c !IS_ERR_OR_NULL(msm_host->xo_clk)) { msm_host 663 drivers/mmc/host/sdhci-msm.c if (msm_host->use_14lpp_dll_reset) { msm_host 704 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 707 drivers/mmc/host/sdhci-msm.c msm_host->offset; msm_host 709 drivers/mmc/host/sdhci-msm.c if (!msm_host->use_cdclp533) { msm_host 744 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 749 drivers/mmc/host/sdhci-msm.c msm_host->offset; msm_host 761 drivers/mmc/host/sdhci-msm.c if ((msm_host->tuning_done || ios.enhanced_strobe) && msm_host 762 drivers/mmc/host/sdhci-msm.c !msm_host->calibration_done) { msm_host 770 drivers/mmc/host/sdhci-msm.c if (!msm_host->clk_rate && !msm_host->use_cdclp533) { msm_host 826 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 830 drivers/mmc/host/sdhci-msm.c msm_host->offset; msm_host 843 drivers/mmc/host/sdhci-msm.c ret = msm_config_cm_dll_phase(host, msm_host->saved_tuning_phase); msm_host 932 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 945 drivers/mmc/host/sdhci-msm.c if (msm_host->updated_ddr_cfg) msm_host 993 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 998 drivers/mmc/host/sdhci-msm.c msm_host->offset; msm_host 1013 drivers/mmc/host/sdhci-msm.c msm_host->saved_tuning_phase); msm_host 1023 drivers/mmc/host/sdhci-msm.c if (msm_host->use_cdclp533) msm_host 1054 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1070 drivers/mmc/host/sdhci-msm.c ret = msm_config_cm_dll_phase(host, msm_host->saved_tuning_phase); msm_host 1104 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1107 drivers/mmc/host/sdhci-msm.c msm_host->use_cdr = false; msm_host 1113 drivers/mmc/host/sdhci-msm.c msm_host->use_cdr = true; msm_host 1119 drivers/mmc/host/sdhci-msm.c msm_host->tuning_done = 0; msm_host 1169 drivers/mmc/host/sdhci-msm.c msm_host->saved_tuning_phase = phase; msm_host 1182 drivers/mmc/host/sdhci-msm.c msm_host->tuning_done = true; msm_host 1195 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1199 drivers/mmc/host/sdhci-msm.c (msm_host->tuning_done || ios->enhanced_strobe) && msm_host 1200 drivers/mmc/host/sdhci-msm.c !msm_host->calibration_done) { msm_host 1203 drivers/mmc/host/sdhci-msm.c msm_host->calibration_done = true; msm_host 1215 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1219 drivers/mmc/host/sdhci-msm.c msm_host->offset; msm_host 1276 drivers/mmc/host/sdhci-msm.c msm_host->calibration_done = false; msm_host 1287 drivers/mmc/host/sdhci-msm.c static inline void sdhci_msm_init_pwr_irq_wait(struct sdhci_msm_host *msm_host) msm_host 1289 drivers/mmc/host/sdhci-msm.c init_waitqueue_head(&msm_host->pwr_irq_wait); msm_host 1293 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host) msm_host 1295 drivers/mmc/host/sdhci-msm.c wake_up(&msm_host->pwr_irq_wait); msm_host 1310 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1314 drivers/mmc/host/sdhci-msm.c msm_host->offset; msm_host 1318 drivers/mmc/host/sdhci-msm.c msm_host->curr_pwr_state, msm_host->curr_io_level); msm_host 1326 drivers/mmc/host/sdhci-msm.c if (!msm_host->mci_removed) msm_host 1327 drivers/mmc/host/sdhci-msm.c val = msm_host_readl(msm_host, host, msm_host 1351 drivers/mmc/host/sdhci-msm.c if ((req_type & msm_host->curr_pwr_state) || msm_host 1352 drivers/mmc/host/sdhci-msm.c (req_type & msm_host->curr_io_level)) msm_host 1361 drivers/mmc/host/sdhci-msm.c if (!wait_event_timeout(msm_host->pwr_irq_wait, msm_host 1362 drivers/mmc/host/sdhci-msm.c msm_host->pwr_irq_flag, msm_host 1364 drivers/mmc/host/sdhci-msm.c dev_warn(&msm_host->pdev->dev, msm_host 1375 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1377 drivers/mmc/host/sdhci-msm.c msm_host->offset; msm_host 1381 drivers/mmc/host/sdhci-msm.c msm_host_readl(msm_host, host, msm_offset->core_pwrctl_status), msm_host 1382 drivers/mmc/host/sdhci-msm.c msm_host_readl(msm_host, host, msm_offset->core_pwrctl_mask), msm_host 1383 drivers/mmc/host/sdhci-msm.c msm_host_readl(msm_host, host, msm_offset->core_pwrctl_ctl)); msm_host 1389 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1394 drivers/mmc/host/sdhci-msm.c const struct sdhci_msm_offset *msm_offset = msm_host->offset; msm_host 1396 drivers/mmc/host/sdhci-msm.c irq_status = msm_host_readl(msm_host, host, msm_host 1400 drivers/mmc/host/sdhci-msm.c msm_host_writel(msm_host, irq_status, host, msm_host 1410 drivers/mmc/host/sdhci-msm.c while (irq_status & msm_host_readl(msm_host, host, msm_host 1419 drivers/mmc/host/sdhci-msm.c msm_host_writel(msm_host, irq_status, host, msm_host 1451 drivers/mmc/host/sdhci-msm.c msm_host_writel(msm_host, irq_ack, host, msm_host 1458 drivers/mmc/host/sdhci-msm.c if (msm_host->caps_0 & CORE_VOLT_SUPPORT) { msm_host 1476 drivers/mmc/host/sdhci-msm.c (msm_host->caps_0 & CORE_3_0V_SUPPORT)) msm_host 1479 drivers/mmc/host/sdhci-msm.c (msm_host->caps_0 & CORE_1_8V_SUPPORT)) msm_host 1488 drivers/mmc/host/sdhci-msm.c msm_host->curr_pwr_state = pwr_state; msm_host 1490 drivers/mmc/host/sdhci-msm.c msm_host->curr_io_level = io_level; msm_host 1493 drivers/mmc/host/sdhci-msm.c mmc_hostname(msm_host->mmc), __func__, irq, irq_status, msm_host 1501 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1504 drivers/mmc/host/sdhci-msm.c msm_host->pwr_irq_flag = 1; msm_host 1505 drivers/mmc/host/sdhci-msm.c sdhci_msm_complete_pwr_irq_wait(msm_host); msm_host 1514 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1515 drivers/mmc/host/sdhci-msm.c struct clk *core_clk = msm_host->bulk_clks[0].clk; msm_host 1562 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1565 drivers/mmc/host/sdhci-msm.c msm_host->clk_rate = clock; msm_host 1586 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1602 drivers/mmc/host/sdhci-msm.c msm_host->transfer_mode = val; msm_host 1605 drivers/mmc/host/sdhci-msm.c if (!msm_host->use_cdr) msm_host 1607 drivers/mmc/host/sdhci-msm.c if ((msm_host->transfer_mode & SDHCI_TRNS_READ) && msm_host 1617 drivers/mmc/host/sdhci-msm.c msm_host->pwr_irq_flag = 0; msm_host 1652 drivers/mmc/host/sdhci-msm.c static void sdhci_msm_set_regulator_caps(struct sdhci_msm_host *msm_host) msm_host 1654 drivers/mmc/host/sdhci-msm.c struct mmc_host *mmc = msm_host->mmc; msm_host 1658 drivers/mmc/host/sdhci-msm.c const struct sdhci_msm_offset *msm_offset = msm_host->offset; msm_host 1676 drivers/mmc/host/sdhci-msm.c u32 io_level = msm_host->curr_io_level; msm_host 1690 drivers/mmc/host/sdhci-msm.c msm_host->caps_0 |= caps; msm_host 1754 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host; msm_host 1764 drivers/mmc/host/sdhci-msm.c host = sdhci_pltfm_init(pdev, &sdhci_msm_pdata, sizeof(*msm_host)); msm_host 1770 drivers/mmc/host/sdhci-msm.c msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 1771 drivers/mmc/host/sdhci-msm.c msm_host->mmc = host->mmc; msm_host 1772 drivers/mmc/host/sdhci-msm.c msm_host->pdev = pdev; msm_host 1784 drivers/mmc/host/sdhci-msm.c msm_host->mci_removed = var_info->mci_removed; msm_host 1785 drivers/mmc/host/sdhci-msm.c msm_host->restore_dll_config = var_info->restore_dll_config; msm_host 1786 drivers/mmc/host/sdhci-msm.c msm_host->var_ops = var_info->var_ops; msm_host 1787 drivers/mmc/host/sdhci-msm.c msm_host->offset = var_info->offset; msm_host 1789 drivers/mmc/host/sdhci-msm.c msm_offset = msm_host->offset; msm_host 1793 drivers/mmc/host/sdhci-msm.c msm_host->saved_tuning_phase = INVALID_TUNING_PHASE; msm_host 1796 drivers/mmc/host/sdhci-msm.c msm_host->bus_clk = devm_clk_get(&pdev->dev, "bus"); msm_host 1797 drivers/mmc/host/sdhci-msm.c if (!IS_ERR(msm_host->bus_clk)) { msm_host 1799 drivers/mmc/host/sdhci-msm.c ret = clk_set_rate(msm_host->bus_clk, INT_MAX); msm_host 1802 drivers/mmc/host/sdhci-msm.c ret = clk_prepare_enable(msm_host->bus_clk); msm_host 1814 drivers/mmc/host/sdhci-msm.c msm_host->bulk_clks[1].clk = clk; msm_host 1823 drivers/mmc/host/sdhci-msm.c msm_host->bulk_clks[0].clk = clk; msm_host 1833 drivers/mmc/host/sdhci-msm.c msm_host->bulk_clks[2].clk = clk; msm_host 1838 drivers/mmc/host/sdhci-msm.c msm_host->bulk_clks[3].clk = clk; msm_host 1840 drivers/mmc/host/sdhci-msm.c ret = clk_bulk_prepare_enable(ARRAY_SIZE(msm_host->bulk_clks), msm_host 1841 drivers/mmc/host/sdhci-msm.c msm_host->bulk_clks); msm_host 1849 drivers/mmc/host/sdhci-msm.c msm_host->xo_clk = devm_clk_get(&pdev->dev, "xo"); msm_host 1850 drivers/mmc/host/sdhci-msm.c if (IS_ERR(msm_host->xo_clk)) { msm_host 1851 drivers/mmc/host/sdhci-msm.c ret = PTR_ERR(msm_host->xo_clk); msm_host 1855 drivers/mmc/host/sdhci-msm.c if (!msm_host->mci_removed) { msm_host 1857 drivers/mmc/host/sdhci-msm.c msm_host->core_mem = devm_ioremap_resource(&pdev->dev, msm_host 1860 drivers/mmc/host/sdhci-msm.c if (IS_ERR(msm_host->core_mem)) { msm_host 1861 drivers/mmc/host/sdhci-msm.c ret = PTR_ERR(msm_host->core_mem); msm_host 1870 drivers/mmc/host/sdhci-msm.c if (!msm_host->mci_removed) { msm_host 1872 drivers/mmc/host/sdhci-msm.c msm_host_writel(msm_host, HC_MODE_EN, host, msm_host 1874 drivers/mmc/host/sdhci-msm.c config = msm_host_readl(msm_host, host, msm_host 1877 drivers/mmc/host/sdhci-msm.c msm_host_writel(msm_host, config, host, msm_host 1886 drivers/mmc/host/sdhci-msm.c core_version = msm_host_readl(msm_host, host, msm_host 1895 drivers/mmc/host/sdhci-msm.c msm_host->use_14lpp_dll_reset = true; msm_host 1902 drivers/mmc/host/sdhci-msm.c msm_host->use_cdclp533 = true; msm_host 1916 drivers/mmc/host/sdhci-msm.c msm_host->updated_ddr_cfg = true; msm_host 1934 drivers/mmc/host/sdhci-msm.c msm_host->pwr_irq = platform_get_irq_byname(pdev, "pwr_irq"); msm_host 1935 drivers/mmc/host/sdhci-msm.c if (msm_host->pwr_irq < 0) { msm_host 1936 drivers/mmc/host/sdhci-msm.c ret = msm_host->pwr_irq; msm_host 1940 drivers/mmc/host/sdhci-msm.c sdhci_msm_init_pwr_irq_wait(msm_host); msm_host 1942 drivers/mmc/host/sdhci-msm.c msm_host_writel(msm_host, INT_MASK, host, msm_host 1945 drivers/mmc/host/sdhci-msm.c ret = devm_request_threaded_irq(&pdev->dev, msm_host->pwr_irq, NULL, msm_host 1953 drivers/mmc/host/sdhci-msm.c msm_host->mmc->caps |= MMC_CAP_WAIT_WHILE_BUSY | MMC_CAP_NEED_RSP_BUSY; msm_host 1966 drivers/mmc/host/sdhci-msm.c sdhci_msm_set_regulator_caps(msm_host); msm_host 1978 drivers/mmc/host/sdhci-msm.c clk_bulk_disable_unprepare(ARRAY_SIZE(msm_host->bulk_clks), msm_host 1979 drivers/mmc/host/sdhci-msm.c msm_host->bulk_clks); msm_host 1981 drivers/mmc/host/sdhci-msm.c if (!IS_ERR(msm_host->bus_clk)) msm_host 1982 drivers/mmc/host/sdhci-msm.c clk_disable_unprepare(msm_host->bus_clk); msm_host 1992 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 2002 drivers/mmc/host/sdhci-msm.c clk_bulk_disable_unprepare(ARRAY_SIZE(msm_host->bulk_clks), msm_host 2003 drivers/mmc/host/sdhci-msm.c msm_host->bulk_clks); msm_host 2004 drivers/mmc/host/sdhci-msm.c if (!IS_ERR(msm_host->bus_clk)) msm_host 2005 drivers/mmc/host/sdhci-msm.c clk_disable_unprepare(msm_host->bus_clk); msm_host 2014 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 2016 drivers/mmc/host/sdhci-msm.c clk_bulk_disable_unprepare(ARRAY_SIZE(msm_host->bulk_clks), msm_host 2017 drivers/mmc/host/sdhci-msm.c msm_host->bulk_clks); msm_host 2026 drivers/mmc/host/sdhci-msm.c struct sdhci_msm_host *msm_host = sdhci_pltfm_priv(pltfm_host); msm_host 2029 drivers/mmc/host/sdhci-msm.c ret = clk_bulk_prepare_enable(ARRAY_SIZE(msm_host->bulk_clks), msm_host 2030 drivers/mmc/host/sdhci-msm.c msm_host->bulk_clks); msm_host 2037 drivers/mmc/host/sdhci-msm.c if (msm_host->restore_dll_config && msm_host->clk_rate)