gmu 13 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_gmu_fault(struct a6xx_gmu *gmu) gmu 15 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); gmu 22 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->hung = true; gmu 33 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gmu *gmu = data; gmu 36 drivers/gpu/drm/msm/adreno/a6xx_gmu.c status = gmu_read(gmu, REG_A6XX_GMU_AO_HOST_INTERRUPT_STATUS); gmu 37 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_AO_HOST_INTERRUPT_CLR, status); gmu 40 drivers/gpu/drm/msm/adreno/a6xx_gmu.c dev_err_ratelimited(gmu->dev, "GMU watchdog expired\n"); gmu 42 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_fault(gmu); gmu 46 drivers/gpu/drm/msm/adreno/a6xx_gmu.c dev_err_ratelimited(gmu->dev, "GMU AHB bus error\n"); gmu 49 drivers/gpu/drm/msm/adreno/a6xx_gmu.c dev_err_ratelimited(gmu->dev, "GMU fence error: 0x%x\n", gmu 50 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_read(gmu, REG_A6XX_GMU_AHB_FENCE_STATUS)); gmu 57 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gmu *gmu = data; gmu 60 drivers/gpu/drm/msm/adreno/a6xx_gmu.c status = gmu_read(gmu, REG_A6XX_GMU_GMU2HOST_INTR_INFO); gmu 61 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_GMU2HOST_INTR_CLR, status); gmu 64 drivers/gpu/drm/msm/adreno/a6xx_gmu.c dev_err_ratelimited(gmu->dev, "GMU firmware fault\n"); gmu 66 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_fault(gmu); gmu 72 drivers/gpu/drm/msm/adreno/a6xx_gmu.c bool a6xx_gmu_sptprac_is_on(struct a6xx_gmu *gmu) gmu 77 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (!gmu->initialized) gmu 80 drivers/gpu/drm/msm/adreno/a6xx_gmu.c val = gmu_read(gmu, REG_A6XX_GMU_SPTPRAC_PWR_CLK_STATUS); gmu 88 drivers/gpu/drm/msm/adreno/a6xx_gmu.c bool a6xx_gmu_gx_is_on(struct a6xx_gmu *gmu) gmu 93 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (!gmu->initialized) gmu 96 drivers/gpu/drm/msm/adreno/a6xx_gmu.c val = gmu_read(gmu, REG_A6XX_GMU_SPTPRAC_PWR_CLK_STATUS); gmu 103 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void __a6xx_gmu_set_freq(struct a6xx_gmu *gmu, int index) gmu 105 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); gmu 110 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_DCVS_ACK_OPTION, 0); gmu 112 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_DCVS_PERF_SETTING, gmu 119 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_DCVS_BW_SETTING, 0xff); gmu 122 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_set_oob(gmu, GMU_OOB_DCVS_SET); gmu 123 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_clear_oob(gmu, GMU_OOB_DCVS_SET); gmu 125 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_read(gmu, REG_A6XX_GMU_DCVS_RETURN); gmu 127 drivers/gpu/drm/msm/adreno/a6xx_gmu.c dev_err(gmu->dev, "GMU set GPU frequency error: %d\n", ret); gmu 129 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->freq = gmu->gpu_freqs[index]; gmu 142 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gmu *gmu = &a6xx_gpu->gmu; gmu 145 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (freq == gmu->freq) gmu 148 drivers/gpu/drm/msm/adreno/a6xx_gmu.c for (perf_index = 0; perf_index < gmu->nr_gpu_freqs - 1; perf_index++) gmu 149 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (freq == gmu->gpu_freqs[perf_index]) gmu 152 drivers/gpu/drm/msm/adreno/a6xx_gmu.c __a6xx_gmu_set_freq(gmu, perf_index); gmu 159 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gmu *gmu = &a6xx_gpu->gmu; gmu 161 drivers/gpu/drm/msm/adreno/a6xx_gmu.c return gmu->freq; gmu 164 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static bool a6xx_gmu_check_idle_level(struct a6xx_gmu *gmu) gmu 167 drivers/gpu/drm/msm/adreno/a6xx_gmu.c int local = gmu->idle_level; gmu 170 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (gmu->idle_level == GMU_IDLE_STATE_SPTP) gmu 173 drivers/gpu/drm/msm/adreno/a6xx_gmu.c val = gmu_read(gmu, REG_A6XX_GPU_GMU_CX_GMU_RPMH_POWER_STATE); gmu 176 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (gmu->idle_level != GMU_IDLE_STATE_IFPC || gmu 177 drivers/gpu/drm/msm/adreno/a6xx_gmu.c !a6xx_gmu_gx_is_on(gmu)) gmu 185 drivers/gpu/drm/msm/adreno/a6xx_gmu.c int a6xx_gmu_wait_for_idle(struct a6xx_gmu *gmu) gmu 187 drivers/gpu/drm/msm/adreno/a6xx_gmu.c return spin_until(a6xx_gmu_check_idle_level(gmu)); gmu 190 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_start(struct a6xx_gmu *gmu) gmu 195 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_CM3_SYSRESET, 1); gmu 196 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_CM3_SYSRESET, 0); gmu 198 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_poll_timeout(gmu, REG_A6XX_GMU_CM3_FW_INIT_RESULT, val, gmu 202 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "GMU firmware initialization timed out\n"); gmu 207 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_hfi_start(struct a6xx_gmu *gmu) gmu 212 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_HFI_CTRL_INIT, 1); gmu 214 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_poll_timeout(gmu, REG_A6XX_GMU_HFI_CTRL_STATUS, val, gmu 217 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "Unable to start the HFI queues\n"); gmu 223 drivers/gpu/drm/msm/adreno/a6xx_gmu.c int a6xx_gmu_set_oob(struct a6xx_gmu *gmu, enum a6xx_gmu_oob_state state) gmu 251 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_HOST2GMU_INTR_SET, 1 << request); gmu 254 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_poll_timeout(gmu, REG_A6XX_GMU_GMU2HOST_INTR_INFO, val, gmu 258 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, gmu 261 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_read(gmu, REG_A6XX_GMU_GMU2HOST_INTR_INFO)); gmu 264 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_GMU2HOST_INTR_CLR, 1 << ack); gmu 270 drivers/gpu/drm/msm/adreno/a6xx_gmu.c void a6xx_gmu_clear_oob(struct a6xx_gmu *gmu, enum a6xx_gmu_oob_state state) gmu 274 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_HOST2GMU_INTR_SET, gmu 278 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_HOST2GMU_INTR_SET, gmu 282 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_HOST2GMU_INTR_SET, gmu 289 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_sptprac_enable(struct a6xx_gmu *gmu) gmu 294 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_GX_SPTPRAC_POWER_CONTROL, 0x778000); gmu 296 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_poll_timeout(gmu, REG_A6XX_GMU_SPTPRAC_PWR_CLK_STATUS, val, gmu 300 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "Unable to power on SPTPRAC: 0x%x\n", gmu 301 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_read(gmu, REG_A6XX_GMU_SPTPRAC_PWR_CLK_STATUS)); gmu 308 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_sptprac_disable(struct a6xx_gmu *gmu) gmu 314 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_rmw(gmu, REG_A6XX_GPU_CC_GX_GDSCR, 0, (1 << 11)); gmu 316 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_GX_SPTPRAC_POWER_CONTROL, 0x778001); gmu 318 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_poll_timeout(gmu, REG_A6XX_GMU_SPTPRAC_PWR_CLK_STATUS, val, gmu 322 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "failed to power off SPTPRAC: 0x%x\n", gmu 323 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_read(gmu, REG_A6XX_GMU_SPTPRAC_PWR_CLK_STATUS)); gmu 327 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_gfx_rail_on(struct a6xx_gmu *gmu) gmu 332 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_BOOT_SLUMBER_OPTION, 0); gmu 335 drivers/gpu/drm/msm/adreno/a6xx_gmu.c vote = gmu->gx_arc_votes[gmu->nr_gpu_freqs - 1]; gmu 337 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_GX_VOTE_IDX, vote & 0xff); gmu 338 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_MX_VOTE_IDX, (vote >> 8) & 0xff); gmu 341 drivers/gpu/drm/msm/adreno/a6xx_gmu.c return a6xx_gmu_set_oob(gmu, GMU_OOB_BOOT_SLUMBER); gmu 345 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_notify_slumber(struct a6xx_gmu *gmu) gmu 350 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_CX_GMU_POWER_COUNTER_ENABLE, 0); gmu 353 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (gmu->idle_level < GMU_IDLE_STATE_SPTP) gmu 354 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_sptprac_disable(gmu); gmu 357 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_BOOT_SLUMBER_OPTION, 1); gmu 359 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_gmu_set_oob(gmu, GMU_OOB_BOOT_SLUMBER); gmu 360 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_clear_oob(gmu, GMU_OOB_BOOT_SLUMBER); gmu 364 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (gmu_read(gmu, REG_A6XX_GPU_GMU_CX_GMU_RPMH_POWER_STATE) gmu 366 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "The GMU did not go into slumber\n"); gmu 372 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_AO_AHB_FENCE_CTRL, 0); gmu 376 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_rpmh_start(struct a6xx_gmu *gmu) gmu 381 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_RSCC_CONTROL_REQ, 1 << 1); gmu 385 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_poll_timeout(gmu, REG_A6XX_GMU_RSCC_CONTROL_ACK, val, gmu 388 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "Unable to power on the GPU RSC\n"); gmu 392 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_poll_timeout(gmu, REG_A6XX_RSCC_SEQ_BUSY_DRV0, val, gmu 396 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "GPU RSC sequence stuck while waking up the GPU\n"); gmu 400 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_RSCC_CONTROL_REQ, 0); gmu 403 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GPU_GMU_AO_GPU_CX_BUSY_MASK, 0xff000000); gmu 404 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_rmw(gmu, REG_A6XX_GMU_CX_GMU_POWER_COUNTER_SELECT_0, 0xff, 0x20); gmu 407 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_CX_GMU_POWER_COUNTER_ENABLE, 1); gmu 411 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_rpmh_stop(struct a6xx_gmu *gmu) gmu 416 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_RSCC_CONTROL_REQ, 1); gmu 418 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_poll_timeout(gmu, REG_A6XX_GPU_RSCC_RSC_STATUS0_DRV0, gmu 421 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "Unable to power off the GPU RSC\n"); gmu 423 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_RSCC_CONTROL_REQ, 0); gmu 434 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_gmu_rpmh_init(struct a6xx_gmu *gmu) gmu 436 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct platform_device *pdev = to_platform_device(gmu->dev); gmu 444 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GPU_RSCC_RSC_STATUS0_DRV0, BIT(24)); gmu 447 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_PDC_SLAVE_ID_DRV0, 1); gmu 448 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_HIDDEN_TCS_CMD0_DATA, 0); gmu 449 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_HIDDEN_TCS_CMD0_ADDR, 0); gmu 450 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_HIDDEN_TCS_CMD0_DATA + 2, 0); gmu 451 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_HIDDEN_TCS_CMD0_ADDR + 2, 0); gmu 452 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_HIDDEN_TCS_CMD0_DATA + 4, 0x80000000); gmu 453 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_HIDDEN_TCS_CMD0_ADDR + 4, 0); gmu 454 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_OVERRIDE_START_ADDR, 0); gmu 455 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_PDC_SEQ_START_ADDR, 0x4520); gmu 456 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_PDC_MATCH_VALUE_LO, 0x4510); gmu 457 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_PDC_MATCH_VALUE_HI, 0x4514); gmu 460 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_SEQ_MEM_0_DRV0, 0xa7a506a0); gmu 461 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_SEQ_MEM_0_DRV0 + 1, 0xa1e6a6e7); gmu 462 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_SEQ_MEM_0_DRV0 + 2, 0xa2e081e1); gmu 463 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_SEQ_MEM_0_DRV0 + 3, 0xe9a982e2); gmu 464 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_RSCC_SEQ_MEM_0_DRV0 + 4, 0x0020e8a8); gmu 522 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_gmu_power_config(struct a6xx_gmu *gmu) gmu 525 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_SYS_BUS_CONFIG, 0x1); gmu 527 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_PWR_COL_INTER_FRAME_CTRL, 0x9c40400); gmu 529 drivers/gpu/drm/msm/adreno/a6xx_gmu.c switch (gmu->idle_level) { gmu 531 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_PWR_COL_INTER_FRAME_HYST, gmu 533 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_rmw(gmu, REG_A6XX_GMU_PWR_COL_INTER_FRAME_CTRL, 0, gmu 538 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_PWR_COL_SPTPRAC_HYST, gmu 540 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_rmw(gmu, REG_A6XX_GMU_PWR_COL_INTER_FRAME_CTRL, 0, gmu 546 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_rmw(gmu, REG_A6XX_GMU_RPMH_CTRL, 0, gmu 555 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_fw_start(struct a6xx_gmu *gmu, unsigned int state) gmu 558 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); gmu 565 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_rpmh_start(gmu); gmu 575 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, gmu 581 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_GENERAL_7, 1); gmu 585 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_rpmh_init(gmu); gmu 588 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_rpmh_start(gmu); gmu 596 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_CM3_ITCM_START + i, gmu 600 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_CM3_FW_INIT_RESULT, 0); gmu 601 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_CM3_BOOT_CONFIG, 0x02); gmu 604 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_HFI_QTBL_ADDR, gmu->hfi->iova); gmu 605 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_HFI_QTBL_INFO, 1); gmu 607 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_AHB_FENCE_RANGE_0, gmu 615 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_HFI_SFR_ADDR, chipid); gmu 618 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_power_config(gmu); gmu 620 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_gmu_start(gmu); gmu 624 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_gmu_gfx_rail_on(gmu); gmu 629 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (gmu->idle_level < GMU_IDLE_STATE_SPTP) { gmu 630 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_sptprac_enable(gmu); gmu 635 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_gmu_hfi_start(gmu); gmu 653 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_gmu_irq_disable(struct a6xx_gmu *gmu) gmu 655 drivers/gpu/drm/msm/adreno/a6xx_gmu.c disable_irq(gmu->gmu_irq); gmu 656 drivers/gpu/drm/msm/adreno/a6xx_gmu.c disable_irq(gmu->hfi_irq); gmu 658 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_AO_HOST_INTERRUPT_MASK, ~0); gmu 659 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_GMU2HOST_INTR_MASK, ~0); gmu 662 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_gmu_rpmh_off(struct a6xx_gmu *gmu) gmu 667 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_poll_timeout(gmu, REG_A6XX_RSCC_TCS0_DRV0_STATUS, val, gmu 669 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_poll_timeout(gmu, REG_A6XX_RSCC_TCS1_DRV0_STATUS, val, gmu 671 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_poll_timeout(gmu, REG_A6XX_RSCC_TCS2_DRV0_STATUS, val, gmu 673 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_poll_timeout(gmu, REG_A6XX_RSCC_TCS3_DRV0_STATUS, val, gmu 678 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_gmu_force_off(struct a6xx_gmu *gmu) gmu 681 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_hfi_stop(gmu); gmu 684 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_irq_disable(gmu); gmu 687 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_sptprac_disable(gmu); gmu 690 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_rpmh_off(gmu); gmu 697 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gmu *gmu = &a6xx_gpu->gmu; gmu 700 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (WARN(!gmu->initialized, "The GMU is not set up yet\n")) gmu 703 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->hung = false; gmu 706 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pm_runtime_get_sync(gmu->dev); gmu 709 drivers/gpu/drm/msm/adreno/a6xx_gmu.c clk_set_rate(gmu->core_clk, 200000000); gmu 710 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = clk_bulk_prepare_enable(gmu->nr_clocks, gmu->clocks); gmu 712 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pm_runtime_put(gmu->dev); gmu 720 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_AO_HOST_INTERRUPT_CLR, ~0); gmu 721 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_AO_HOST_INTERRUPT_MASK, ~A6XX_GMU_IRQ_MASK); gmu 722 drivers/gpu/drm/msm/adreno/a6xx_gmu.c enable_irq(gmu->gmu_irq); gmu 725 drivers/gpu/drm/msm/adreno/a6xx_gmu.c status = gmu_read(gmu, REG_A6XX_GMU_GENERAL_7) == 1 ? gmu 728 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_gmu_fw_start(gmu, status); gmu 732 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_hfi_start(gmu, status); gmu 740 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_GMU2HOST_INTR_CLR, ~0); gmu 741 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_write(gmu, REG_A6XX_GMU_GMU2HOST_INTR_MASK, ~A6XX_HFI_IRQ_MASK); gmu 742 drivers/gpu/drm/msm/adreno/a6xx_gmu.c enable_irq(gmu->hfi_irq); gmu 745 drivers/gpu/drm/msm/adreno/a6xx_gmu.c __a6xx_gmu_set_freq(gmu, gmu->nr_gpu_freqs - 1); gmu 752 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (!IS_ERR_OR_NULL(gmu->gxpd)) gmu 753 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pm_runtime_get(gmu->gxpd); gmu 758 drivers/gpu/drm/msm/adreno/a6xx_gmu.c disable_irq(gmu->gmu_irq); gmu 759 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_rpmh_stop(gmu); gmu 760 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pm_runtime_put(gmu->dev); gmu 766 drivers/gpu/drm/msm/adreno/a6xx_gmu.c bool a6xx_gmu_isidle(struct a6xx_gmu *gmu) gmu 770 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (!gmu->initialized) gmu 773 drivers/gpu/drm/msm/adreno/a6xx_gmu.c reg = gmu_read(gmu, REG_A6XX_GPU_GMU_AO_GPU_CX_BUSY_STATUS); gmu 782 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_gmu_shutdown(struct a6xx_gmu *gmu) gmu 784 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); gmu 793 drivers/gpu/drm/msm/adreno/a6xx_gmu.c val = gmu_read(gmu, REG_A6XX_GPU_GMU_CX_GMU_RPMH_POWER_STATE); gmu 796 drivers/gpu/drm/msm/adreno/a6xx_gmu.c int ret = a6xx_gmu_wait_for_idle(gmu); gmu 800 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_force_off(gmu); gmu 811 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_notify_slumber(gmu); gmu 813 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = gmu_poll_timeout(gmu, gmu 824 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, gmu 826 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_read(gmu, gmu 828 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu_read(gmu, gmu 833 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_hfi_stop(gmu); gmu 836 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_irq_disable(gmu); gmu 839 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_rpmh_stop(gmu); gmu 845 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gmu *gmu = &a6xx_gpu->gmu; gmu 848 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (!pm_runtime_active(gmu->dev)) gmu 855 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (gmu->hung) gmu 856 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_force_off(gmu); gmu 858 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_shutdown(gmu); gmu 868 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (!IS_ERR_OR_NULL(gmu->gxpd)) gmu 869 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pm_runtime_put_sync(gmu->gxpd); gmu 871 drivers/gpu/drm/msm/adreno/a6xx_gmu.c clk_bulk_disable_unprepare(gmu->nr_clocks, gmu->clocks); gmu 873 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pm_runtime_put_sync(gmu->dev); gmu 878 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static void a6xx_gmu_memory_free(struct a6xx_gmu *gmu, struct a6xx_gmu_bo *bo) gmu 890 drivers/gpu/drm/msm/adreno/a6xx_gmu.c iommu_unmap(gmu->domain, iova, PAGE_SIZE); gmu 898 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static struct a6xx_gmu_bo *a6xx_gmu_memory_alloc(struct a6xx_gmu *gmu, gmu 924 drivers/gpu/drm/msm/adreno/a6xx_gmu.c bo->iova = gmu->uncached_iova_base; gmu 927 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = iommu_map(gmu->domain, gmu 933 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "Unable to map GMU buffer object\n"); gmu 936 drivers/gpu/drm/msm/adreno/a6xx_gmu.c iommu_unmap(gmu->domain, gmu 950 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->uncached_iova_base += ALIGN(size, SZ_1M); gmu 966 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_memory_probe(struct a6xx_gmu *gmu) gmu 975 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->uncached_iova_base = 0x60000000; gmu 978 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->domain = iommu_domain_alloc(&platform_bus_type); gmu 979 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (!gmu->domain) gmu 982 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = iommu_attach_device(gmu->domain, gmu->dev); gmu 985 drivers/gpu/drm/msm/adreno/a6xx_gmu.c iommu_domain_free(gmu->domain); gmu 986 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->domain = NULL; gmu 1091 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_rpmh_votes_init(struct a6xx_gmu *gmu) gmu 1093 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); gmu 1099 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_gmu_rpmh_arc_votes_init(&gpu->pdev->dev, gmu->gx_arc_votes, gmu 1100 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->gpu_freqs, gmu->nr_gpu_freqs, "gfx.lvl"); gmu 1103 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret |= a6xx_gmu_rpmh_arc_votes_init(gmu->dev, gmu->cx_arc_votes, gmu 1104 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->gmu_freqs, gmu->nr_gmu_freqs, "cx.lvl"); gmu 1141 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_pwrlevels_probe(struct a6xx_gmu *gmu) gmu 1143 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gpu *a6xx_gpu = container_of(gmu, struct a6xx_gpu, gmu); gmu 1153 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = dev_pm_opp_of_add_table(gmu->dev); gmu 1155 drivers/gpu/drm/msm/adreno/a6xx_gmu.c DRM_DEV_ERROR(gmu->dev, "Unable to set the OPP table for the GMU\n"); gmu 1159 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->nr_gmu_freqs = a6xx_gmu_build_freq_table(gmu->dev, gmu 1160 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->gmu_freqs, ARRAY_SIZE(gmu->gmu_freqs)); gmu 1166 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->nr_gpu_freqs = a6xx_gmu_build_freq_table(&gpu->pdev->dev, gmu 1167 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->gpu_freqs, ARRAY_SIZE(gmu->gpu_freqs)); gmu 1170 drivers/gpu/drm/msm/adreno/a6xx_gmu.c return a6xx_gmu_rpmh_votes_init(gmu); gmu 1173 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_clocks_probe(struct a6xx_gmu *gmu) gmu 1175 drivers/gpu/drm/msm/adreno/a6xx_gmu.c int ret = devm_clk_bulk_get_all(gmu->dev, &gmu->clocks); gmu 1180 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->nr_clocks = ret; gmu 1182 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->core_clk = msm_clk_bulk_get_clock(gmu->clocks, gmu 1183 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->nr_clocks, "gmu"); gmu 1209 drivers/gpu/drm/msm/adreno/a6xx_gmu.c static int a6xx_gmu_get_irq(struct a6xx_gmu *gmu, struct platform_device *pdev, gmu 1216 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = request_irq(irq, handler, IRQF_TRIGGER_HIGH, name, gmu); gmu 1230 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gmu *gmu = &a6xx_gpu->gmu; gmu 1232 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (!gmu->initialized) gmu 1235 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pm_runtime_force_suspend(gmu->dev); gmu 1237 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (!IS_ERR_OR_NULL(gmu->gxpd)) { gmu 1238 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pm_runtime_disable(gmu->gxpd); gmu 1239 drivers/gpu/drm/msm/adreno/a6xx_gmu.c dev_pm_domain_detach(gmu->gxpd, false); gmu 1242 drivers/gpu/drm/msm/adreno/a6xx_gmu.c iounmap(gmu->mmio); gmu 1243 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->mmio = NULL; gmu 1245 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_memory_free(gmu, gmu->hfi); gmu 1247 drivers/gpu/drm/msm/adreno/a6xx_gmu.c iommu_detach_device(gmu->domain, gmu->dev); gmu 1249 drivers/gpu/drm/msm/adreno/a6xx_gmu.c iommu_domain_free(gmu->domain); gmu 1251 drivers/gpu/drm/msm/adreno/a6xx_gmu.c free_irq(gmu->gmu_irq, gmu); gmu 1252 drivers/gpu/drm/msm/adreno/a6xx_gmu.c free_irq(gmu->hfi_irq, gmu); gmu 1255 drivers/gpu/drm/msm/adreno/a6xx_gmu.c put_device(gmu->dev); gmu 1257 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->initialized = false; gmu 1262 drivers/gpu/drm/msm/adreno/a6xx_gmu.c struct a6xx_gmu *gmu = &a6xx_gpu->gmu; gmu 1269 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->dev = &pdev->dev; gmu 1271 drivers/gpu/drm/msm/adreno/a6xx_gmu.c of_dma_configure(gmu->dev, node, true); gmu 1274 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->idle_level = GMU_IDLE_STATE_ACTIVE; gmu 1276 drivers/gpu/drm/msm/adreno/a6xx_gmu.c pm_runtime_enable(gmu->dev); gmu 1279 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_gmu_clocks_probe(gmu); gmu 1284 drivers/gpu/drm/msm/adreno/a6xx_gmu.c ret = a6xx_gmu_memory_probe(gmu); gmu 1289 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->hfi = a6xx_gmu_memory_alloc(gmu, SZ_16K); gmu 1290 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (IS_ERR(gmu->hfi)) gmu 1294 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->debug = a6xx_gmu_memory_alloc(gmu, SZ_16K); gmu 1295 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (IS_ERR(gmu->debug)) gmu 1299 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->mmio = a6xx_gmu_get_mmio(pdev, "gmu"); gmu 1300 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (IS_ERR(gmu->mmio)) gmu 1304 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->hfi_irq = a6xx_gmu_get_irq(gmu, pdev, "hfi", a6xx_hfi_irq); gmu 1305 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->gmu_irq = a6xx_gmu_get_irq(gmu, pdev, "gmu", a6xx_gmu_irq); gmu 1307 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (gmu->hfi_irq < 0 || gmu->gmu_irq < 0) gmu 1314 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->gxpd = dev_pm_domain_attach_by_name(gmu->dev, "gx"); gmu 1317 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_pwrlevels_probe(gmu); gmu 1320 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_hfi_init(gmu); gmu 1322 drivers/gpu/drm/msm/adreno/a6xx_gmu.c gmu->initialized = true; gmu 1327 drivers/gpu/drm/msm/adreno/a6xx_gmu.c iounmap(gmu->mmio); gmu 1328 drivers/gpu/drm/msm/adreno/a6xx_gmu.c free_irq(gmu->gmu_irq, gmu); gmu 1329 drivers/gpu/drm/msm/adreno/a6xx_gmu.c free_irq(gmu->hfi_irq, gmu); gmu 1331 drivers/gpu/drm/msm/adreno/a6xx_gmu.c a6xx_gmu_memory_free(gmu, gmu->hfi); gmu 1333 drivers/gpu/drm/msm/adreno/a6xx_gmu.c if (gmu->domain) { gmu 1334 drivers/gpu/drm/msm/adreno/a6xx_gmu.c iommu_detach_device(gmu->domain, gmu->dev); gmu 1336 drivers/gpu/drm/msm/adreno/a6xx_gmu.c iommu_domain_free(gmu->domain); gmu 1342 drivers/gpu/drm/msm/adreno/a6xx_gmu.c put_device(gmu->dev); gmu 82 drivers/gpu/drm/msm/adreno/a6xx_gmu.h static inline u32 gmu_read(struct a6xx_gmu *gmu, u32 offset) gmu 84 drivers/gpu/drm/msm/adreno/a6xx_gmu.h return msm_readl(gmu->mmio + (offset << 2)); gmu 87 drivers/gpu/drm/msm/adreno/a6xx_gmu.h static inline void gmu_write(struct a6xx_gmu *gmu, u32 offset, u32 value) gmu 89 drivers/gpu/drm/msm/adreno/a6xx_gmu.h return msm_writel(value, gmu->mmio + (offset << 2)); gmu 92 drivers/gpu/drm/msm/adreno/a6xx_gmu.h static inline void gmu_rmw(struct a6xx_gmu *gmu, u32 reg, u32 mask, u32 or) gmu 94 drivers/gpu/drm/msm/adreno/a6xx_gmu.h u32 val = gmu_read(gmu, reg); gmu 98 drivers/gpu/drm/msm/adreno/a6xx_gmu.h gmu_write(gmu, reg, val | or); gmu 101 drivers/gpu/drm/msm/adreno/a6xx_gmu.h static inline u64 gmu_read64(struct a6xx_gmu *gmu, u32 lo, u32 hi) gmu 105 drivers/gpu/drm/msm/adreno/a6xx_gmu.h val = (u64) msm_readl(gmu->mmio + (lo << 2)); gmu 106 drivers/gpu/drm/msm/adreno/a6xx_gmu.h val |= ((u64) msm_readl(gmu->mmio + (hi << 2)) << 32); gmu 111 drivers/gpu/drm/msm/adreno/a6xx_gmu.h #define gmu_poll_timeout(gmu, addr, val, cond, interval, timeout) \ gmu 112 drivers/gpu/drm/msm/adreno/a6xx_gmu.h readl_poll_timeout((gmu)->mmio + ((addr) << 2), val, cond, \ gmu 161 drivers/gpu/drm/msm/adreno/a6xx_gmu.h void a6xx_hfi_init(struct a6xx_gmu *gmu); gmu 162 drivers/gpu/drm/msm/adreno/a6xx_gmu.h int a6xx_hfi_start(struct a6xx_gmu *gmu, int boot_state); gmu 163 drivers/gpu/drm/msm/adreno/a6xx_gmu.h void a6xx_hfi_stop(struct a6xx_gmu *gmu); gmu 165 drivers/gpu/drm/msm/adreno/a6xx_gmu.h bool a6xx_gmu_gx_is_on(struct a6xx_gmu *gmu); gmu 166 drivers/gpu/drm/msm/adreno/a6xx_gmu.h bool a6xx_gmu_sptprac_is_on(struct a6xx_gmu *gmu); gmu 21 drivers/gpu/drm/msm/adreno/a6xx_gpu.c if (!a6xx_gmu_isidle(&a6xx_gpu->gmu)) gmu 148 drivers/gpu/drm/msm/adreno/a6xx_gpu.c gmu_read64(&a6xx_gpu->gmu, REG_A6XX_GMU_ALWAYS_ON_COUNTER_L, gmu 269 drivers/gpu/drm/msm/adreno/a6xx_gpu.c struct a6xx_gmu *gmu = &a6xx_gpu->gmu; gmu 280 drivers/gpu/drm/msm/adreno/a6xx_gpu.c gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 1, 0); gmu 287 drivers/gpu/drm/msm/adreno/a6xx_gpu.c gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 0, 1); gmu 382 drivers/gpu/drm/msm/adreno/a6xx_gpu.c a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); gmu 560 drivers/gpu/drm/msm/adreno/a6xx_gpu.c a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); gmu 563 drivers/gpu/drm/msm/adreno/a6xx_gpu.c a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_BOOT_SLUMBER); gmu 597 drivers/gpu/drm/msm/adreno/a6xx_gpu.c gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 0); gmu 673 drivers/gpu/drm/msm/adreno/a6xx_gpu.c gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 1); gmu 767 drivers/gpu/drm/msm/adreno/a6xx_gpu.c a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); gmu 772 drivers/gpu/drm/msm/adreno/a6xx_gpu.c a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); gmu 806 drivers/gpu/drm/msm/adreno/a6xx_gpu.c busy_cycles = gmu_read64(&a6xx_gpu->gmu, gmu 23 drivers/gpu/drm/msm/adreno/a6xx_gpu.h struct a6xx_gmu gmu; gmu 49 drivers/gpu/drm/msm/adreno/a6xx_gpu.h int a6xx_gmu_wait_for_idle(struct a6xx_gmu *gmu); gmu 51 drivers/gpu/drm/msm/adreno/a6xx_gpu.h bool a6xx_gmu_isidle(struct a6xx_gmu *gmu); gmu 53 drivers/gpu/drm/msm/adreno/a6xx_gpu.h int a6xx_gmu_set_oob(struct a6xx_gmu *gmu, enum a6xx_gmu_oob_state state); gmu 54 drivers/gpu/drm/msm/adreno/a6xx_gpu.h void a6xx_gmu_clear_oob(struct a6xx_gmu *gmu, enum a6xx_gmu_oob_state state); gmu 136 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!a6xx_gmu_sptprac_is_on(&a6xx_gpu->gmu)) gmu 724 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c struct a6xx_gmu *gmu = &a6xx_gpu->gmu; gmu 740 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c obj->data[index++] = gmu_read(gmu, gmu 763 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!a6xx_gmu_gx_is_on(&a6xx_gpu->gmu)) gmu 884 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (!a6xx_gmu_gx_is_on(&a6xx_gpu->gmu)) gmu 54 drivers/gpu/drm/msm/adreno/a6xx_hfi.c static int a6xx_hfi_queue_write(struct a6xx_gmu *gmu, gmu 78 drivers/gpu/drm/msm/adreno/a6xx_hfi.c gmu_write(gmu, REG_A6XX_GMU_HOST2GMU_INTR_SET, 0x01); gmu 82 drivers/gpu/drm/msm/adreno/a6xx_hfi.c static int a6xx_hfi_wait_for_ack(struct a6xx_gmu *gmu, u32 id, u32 seqnum, gmu 85 drivers/gpu/drm/msm/adreno/a6xx_hfi.c struct a6xx_hfi_queue *queue = &gmu->queues[HFI_RESPONSE_QUEUE]; gmu 90 drivers/gpu/drm/msm/adreno/a6xx_hfi.c ret = gmu_poll_timeout(gmu, REG_A6XX_GMU_GMU2HOST_INTR_INFO, val, gmu 94 drivers/gpu/drm/msm/adreno/a6xx_hfi.c DRM_DEV_ERROR(gmu->dev, gmu 101 drivers/gpu/drm/msm/adreno/a6xx_hfi.c gmu_write(gmu, REG_A6XX_GMU_GMU2HOST_INTR_CLR, gmu 113 drivers/gpu/drm/msm/adreno/a6xx_hfi.c DRM_DEV_ERROR(gmu->dev, gmu 123 drivers/gpu/drm/msm/adreno/a6xx_hfi.c DRM_DEV_ERROR(gmu->dev, "GMU firmware error %d\n", gmu 129 drivers/gpu/drm/msm/adreno/a6xx_hfi.c DRM_DEV_ERROR(gmu->dev, gmu 136 drivers/gpu/drm/msm/adreno/a6xx_hfi.c DRM_DEV_ERROR(gmu->dev, gmu 151 drivers/gpu/drm/msm/adreno/a6xx_hfi.c static int a6xx_hfi_send_msg(struct a6xx_gmu *gmu, int id, gmu 154 drivers/gpu/drm/msm/adreno/a6xx_hfi.c struct a6xx_hfi_queue *queue = &gmu->queues[HFI_COMMAND_QUEUE]; gmu 164 drivers/gpu/drm/msm/adreno/a6xx_hfi.c ret = a6xx_hfi_queue_write(gmu, queue, data, dwords); gmu 166 drivers/gpu/drm/msm/adreno/a6xx_hfi.c DRM_DEV_ERROR(gmu->dev, "Unable to send message %s id %d\n", gmu 171 drivers/gpu/drm/msm/adreno/a6xx_hfi.c return a6xx_hfi_wait_for_ack(gmu, id, seqnum, payload, payload_size); gmu 174 drivers/gpu/drm/msm/adreno/a6xx_hfi.c static int a6xx_hfi_send_gmu_init(struct a6xx_gmu *gmu, int boot_state) gmu 178 drivers/gpu/drm/msm/adreno/a6xx_hfi.c msg.dbg_buffer_addr = (u32) gmu->debug->iova; gmu 179 drivers/gpu/drm/msm/adreno/a6xx_hfi.c msg.dbg_buffer_size = (u32) gmu->debug->size; gmu 182 drivers/gpu/drm/msm/adreno/a6xx_hfi.c return a6xx_hfi_send_msg(gmu, HFI_H2F_MSG_INIT, &msg, sizeof(msg), gmu 186 drivers/gpu/drm/msm/adreno/a6xx_hfi.c static int a6xx_hfi_get_fw_version(struct a6xx_gmu *gmu, u32 *version) gmu 193 drivers/gpu/drm/msm/adreno/a6xx_hfi.c return a6xx_hfi_send_msg(gmu, HFI_H2F_MSG_FW_VERSION, &msg, sizeof(msg), gmu 197 drivers/gpu/drm/msm/adreno/a6xx_hfi.c static int a6xx_hfi_send_perf_table(struct a6xx_gmu *gmu) gmu 202 drivers/gpu/drm/msm/adreno/a6xx_hfi.c msg.num_gpu_levels = gmu->nr_gpu_freqs; gmu 203 drivers/gpu/drm/msm/adreno/a6xx_hfi.c msg.num_gmu_levels = gmu->nr_gmu_freqs; gmu 205 drivers/gpu/drm/msm/adreno/a6xx_hfi.c for (i = 0; i < gmu->nr_gpu_freqs; i++) { gmu 206 drivers/gpu/drm/msm/adreno/a6xx_hfi.c msg.gx_votes[i].vote = gmu->gx_arc_votes[i]; gmu 207 drivers/gpu/drm/msm/adreno/a6xx_hfi.c msg.gx_votes[i].freq = gmu->gpu_freqs[i] / 1000; gmu 210 drivers/gpu/drm/msm/adreno/a6xx_hfi.c for (i = 0; i < gmu->nr_gmu_freqs; i++) { gmu 211 drivers/gpu/drm/msm/adreno/a6xx_hfi.c msg.cx_votes[i].vote = gmu->cx_arc_votes[i]; gmu 212 drivers/gpu/drm/msm/adreno/a6xx_hfi.c msg.cx_votes[i].freq = gmu->gmu_freqs[i] / 1000; gmu 215 drivers/gpu/drm/msm/adreno/a6xx_hfi.c return a6xx_hfi_send_msg(gmu, HFI_H2F_MSG_PERF_TABLE, &msg, sizeof(msg), gmu 219 drivers/gpu/drm/msm/adreno/a6xx_hfi.c static int a6xx_hfi_send_bw_table(struct a6xx_gmu *gmu) gmu 262 drivers/gpu/drm/msm/adreno/a6xx_hfi.c return a6xx_hfi_send_msg(gmu, HFI_H2F_MSG_BW_TABLE, &msg, sizeof(msg), gmu 266 drivers/gpu/drm/msm/adreno/a6xx_hfi.c static int a6xx_hfi_send_test(struct a6xx_gmu *gmu) gmu 270 drivers/gpu/drm/msm/adreno/a6xx_hfi.c return a6xx_hfi_send_msg(gmu, HFI_H2F_MSG_TEST, &msg, sizeof(msg), gmu 274 drivers/gpu/drm/msm/adreno/a6xx_hfi.c int a6xx_hfi_start(struct a6xx_gmu *gmu, int boot_state) gmu 278 drivers/gpu/drm/msm/adreno/a6xx_hfi.c ret = a6xx_hfi_send_gmu_init(gmu, boot_state); gmu 282 drivers/gpu/drm/msm/adreno/a6xx_hfi.c ret = a6xx_hfi_get_fw_version(gmu, NULL); gmu 292 drivers/gpu/drm/msm/adreno/a6xx_hfi.c ret = a6xx_hfi_send_perf_table(gmu); gmu 296 drivers/gpu/drm/msm/adreno/a6xx_hfi.c ret = a6xx_hfi_send_bw_table(gmu); gmu 304 drivers/gpu/drm/msm/adreno/a6xx_hfi.c a6xx_hfi_send_test(gmu); gmu 309 drivers/gpu/drm/msm/adreno/a6xx_hfi.c void a6xx_hfi_stop(struct a6xx_gmu *gmu) gmu 313 drivers/gpu/drm/msm/adreno/a6xx_hfi.c for (i = 0; i < ARRAY_SIZE(gmu->queues); i++) { gmu 314 drivers/gpu/drm/msm/adreno/a6xx_hfi.c struct a6xx_hfi_queue *queue = &gmu->queues[i]; gmu 320 drivers/gpu/drm/msm/adreno/a6xx_hfi.c DRM_DEV_ERROR(gmu->dev, "HFI queue %d is not empty\n", i); gmu 351 drivers/gpu/drm/msm/adreno/a6xx_hfi.c void a6xx_hfi_init(struct a6xx_gmu *gmu) gmu 353 drivers/gpu/drm/msm/adreno/a6xx_hfi.c struct a6xx_gmu_bo *hfi = gmu->hfi; gmu 364 drivers/gpu/drm/msm/adreno/a6xx_hfi.c table_size += (ARRAY_SIZE(gmu->queues) * gmu 372 drivers/gpu/drm/msm/adreno/a6xx_hfi.c table->num_queues = ARRAY_SIZE(gmu->queues); gmu 373 drivers/gpu/drm/msm/adreno/a6xx_hfi.c table->active_queues = ARRAY_SIZE(gmu->queues); gmu 377 drivers/gpu/drm/msm/adreno/a6xx_hfi.c a6xx_hfi_queue_init(&gmu->queues[0], &headers[0], hfi->virt + offset, gmu 382 drivers/gpu/drm/msm/adreno/a6xx_hfi.c a6xx_hfi_queue_init(&gmu->queues[1], &headers[1], hfi->virt + offset,