new_ps 1225 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(rps); new_ps 1229 drivers/gpu/drm/amd/amdgpu/kv_dpm.c pi->current_ps = *new_ps; new_ps 1237 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(rps); new_ps 1241 drivers/gpu/drm/amd/amdgpu/kv_dpm.c pi->requested_ps = *new_ps; new_ps 1774 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(new_rps); new_ps 1782 drivers/gpu/drm/amd/amdgpu/kv_dpm.c if ((table->entries[i].clk >= new_ps->levels[0].sclk) || new_ps 1790 drivers/gpu/drm/amd/amdgpu/kv_dpm.c if (table->entries[i].clk <= new_ps->levels[new_ps->num_levels - 1].sclk) new_ps 1796 drivers/gpu/drm/amd/amdgpu/kv_dpm.c if ((new_ps->levels[0].sclk - table->entries[pi->highest_valid].clk) > new_ps 1797 drivers/gpu/drm/amd/amdgpu/kv_dpm.c (table->entries[pi->lowest_valid].clk - new_ps->levels[new_ps->num_levels - 1].sclk)) new_ps 1807 drivers/gpu/drm/amd/amdgpu/kv_dpm.c if (table->entries[i].sclk_frequency >= new_ps->levels[0].sclk || new_ps 1816 drivers/gpu/drm/amd/amdgpu/kv_dpm.c new_ps->levels[new_ps->num_levels - 1].sclk) new_ps 1822 drivers/gpu/drm/amd/amdgpu/kv_dpm.c if ((new_ps->levels[0].sclk - new_ps 1825 drivers/gpu/drm/amd/amdgpu/kv_dpm.c new_ps->levels[new_ps->num_levels -1].sclk)) new_ps 1836 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(new_rps); new_ps 1842 drivers/gpu/drm/amd/amdgpu/kv_dpm.c clk_bypass_cntl = new_ps->need_dfs_bypass ? new_ps 1909 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct amdgpu_ps *new_ps = &requested_ps; new_ps 1911 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_update_requested_ps(adev, new_ps); new_ps 1924 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct amdgpu_ps *new_ps = &pi->requested_rps; new_ps 1938 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_set_valid_clock_range(adev, new_ps); new_ps 1939 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_update_dfs_bypass_settings(adev, new_ps); new_ps 1950 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_program_nbps_index_settings(adev, new_ps); new_ps 1956 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ret = kv_update_vce_dpm(adev, new_ps, old_ps); new_ps 1967 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_set_valid_clock_range(adev, new_ps); new_ps 1968 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_update_dfs_bypass_settings(adev, new_ps); new_ps 1978 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_program_nbps_index_settings(adev, new_ps); new_ps 1981 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ret = kv_update_vce_dpm(adev, new_ps, old_ps); new_ps 1999 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct amdgpu_ps *new_ps = &pi->requested_rps; new_ps 2001 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_update_current_ps(adev, new_ps); new_ps 2499 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(new_rps); new_ps 2512 drivers/gpu/drm/amd/amdgpu/kv_dpm.c nbdpmconfig1 |= (new_ps->dpm0_pg_nb_ps_lo << NB_DPM_CONFIG_1__Dpm0PgNbPsLo__SHIFT) | new_ps 2513 drivers/gpu/drm/amd/amdgpu/kv_dpm.c (new_ps->dpm0_pg_nb_ps_hi << NB_DPM_CONFIG_1__Dpm0PgNbPsHi__SHIFT) | new_ps 2514 drivers/gpu/drm/amd/amdgpu/kv_dpm.c (new_ps->dpmx_nb_ps_lo << NB_DPM_CONFIG_1__DpmXNbPsLo__SHIFT) | new_ps 2515 drivers/gpu/drm/amd/amdgpu/kv_dpm.c (new_ps->dpmx_nb_ps_hi << NB_DPM_CONFIG_1__DpmXNbPsHi__SHIFT); new_ps 3149 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *new_ps = si_get_ps(rps); new_ps 3154 drivers/gpu/drm/amd/amdgpu/si_dpm.c ni_pi->current_ps = *new_ps; new_ps 3162 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *new_ps = si_get_ps(rps); new_ps 3167 drivers/gpu/drm/amd/amdgpu/si_dpm.c ni_pi->requested_ps = *new_ps; new_ps 3173 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct amdgpu_ps *new_ps, new_ps 3176 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *new_state = si_get_ps(new_ps); new_ps 3179 drivers/gpu/drm/amd/amdgpu/si_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 3180 drivers/gpu/drm/amd/amdgpu/si_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 3187 drivers/gpu/drm/amd/amdgpu/si_dpm.c amdgpu_asic_set_uvd_clocks(adev, new_ps->vclk, new_ps->dclk); new_ps 3191 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct amdgpu_ps *new_ps, new_ps 3194 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *new_state = si_get_ps(new_ps); new_ps 3197 drivers/gpu/drm/amd/amdgpu/si_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 3198 drivers/gpu/drm/amd/amdgpu/si_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 3205 drivers/gpu/drm/amd/amdgpu/si_dpm.c amdgpu_asic_set_uvd_clocks(adev, new_ps->vclk, new_ps->dclk); new_ps 6943 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct amdgpu_ps *new_ps = &requested_ps; new_ps 6945 drivers/gpu/drm/amd/amdgpu/si_dpm.c ni_update_requested_ps(adev, new_ps); new_ps 6953 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct amdgpu_ps *new_ps = adev->pm.dpm.requested_ps; new_ps 6962 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_populate_smc_tdp_limits(adev, new_ps); new_ps 6965 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_populate_smc_tdp_limits_2(adev, new_ps); new_ps 6981 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct amdgpu_ps *new_ps = &eg_pi->requested_rps; new_ps 6996 drivers/gpu/drm/amd/amdgpu/si_dpm.c si_request_link_speed_change_before_state_change(adev, new_ps, old_ps); new_ps 6997 drivers/gpu/drm/amd/amdgpu/si_dpm.c ni_set_uvd_clock_before_set_eng_clock(adev, new_ps, old_ps); new_ps 6998 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_enable_power_containment(adev, new_ps, false); new_ps 7003 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_enable_smc_cac(adev, new_ps, false); new_ps 7013 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_upload_sw_state(adev, new_ps); new_ps 7029 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_upload_mc_reg_table(adev, new_ps); new_ps 7035 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_program_memory_timing_parameters(adev, new_ps); new_ps 7040 drivers/gpu/drm/amd/amdgpu/si_dpm.c si_set_pcie_lane_width_in_smc(adev, new_ps, old_ps); new_ps 7052 drivers/gpu/drm/amd/amdgpu/si_dpm.c ni_set_uvd_clock_after_set_eng_clock(adev, new_ps, old_ps); new_ps 7054 drivers/gpu/drm/amd/amdgpu/si_dpm.c si_notify_link_speed_change_after_state_change(adev, new_ps, old_ps); new_ps 7055 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_set_power_state_conditionally_enable_ulv(adev, new_ps); new_ps 7060 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_enable_smc_cac(adev, new_ps, true); new_ps 7065 drivers/gpu/drm/amd/amdgpu/si_dpm.c ret = si_enable_power_containment(adev, new_ps, true); new_ps 7084 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct amdgpu_ps *new_ps = &eg_pi->requested_rps; new_ps 7086 drivers/gpu/drm/amd/amdgpu/si_dpm.c ni_update_current_ps(adev, new_ps); new_ps 235 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c struct pp_power_state *new_ps) new_ps 241 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c if (new_ps != NULL) new_ps 242 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c requested = new_ps; new_ps 260 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c struct pp_power_state *new_ps) new_ps 269 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c power_state_management(hwmgr, new_ps); new_ps 38 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.h struct pp_power_state *new_ps); new_ps 2262 drivers/gpu/drm/radeon/btc_dpm.c struct rv7xx_ps *new_ps = rv770_get_ps(rps); new_ps 2266 drivers/gpu/drm/radeon/btc_dpm.c eg_pi->current_ps = *new_ps; new_ps 2273 drivers/gpu/drm/radeon/btc_dpm.c struct rv7xx_ps *new_ps = rv770_get_ps(rps); new_ps 2277 drivers/gpu/drm/radeon/btc_dpm.c eg_pi->requested_ps = *new_ps; new_ps 2295 drivers/gpu/drm/radeon/btc_dpm.c struct radeon_ps *new_ps = &requested_ps; new_ps 2297 drivers/gpu/drm/radeon/btc_dpm.c btc_update_requested_ps(rdev, new_ps); new_ps 2307 drivers/gpu/drm/radeon/btc_dpm.c struct radeon_ps *new_ps = &eg_pi->requested_rps; new_ps 2319 drivers/gpu/drm/radeon/btc_dpm.c cypress_notify_link_speed_change_before_state_change(rdev, new_ps, old_ps); new_ps 2321 drivers/gpu/drm/radeon/btc_dpm.c rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); new_ps 2327 drivers/gpu/drm/radeon/btc_dpm.c btc_set_at_for_uvd(rdev, new_ps); new_ps 2329 drivers/gpu/drm/radeon/btc_dpm.c btc_notify_uvd_to_smc(rdev, new_ps); new_ps 2330 drivers/gpu/drm/radeon/btc_dpm.c ret = cypress_upload_sw_state(rdev, new_ps); new_ps 2336 drivers/gpu/drm/radeon/btc_dpm.c ret = cypress_upload_mc_reg_table(rdev, new_ps); new_ps 2343 drivers/gpu/drm/radeon/btc_dpm.c cypress_program_memory_timing_parameters(rdev, new_ps); new_ps 2355 drivers/gpu/drm/radeon/btc_dpm.c rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); new_ps 2358 drivers/gpu/drm/radeon/btc_dpm.c cypress_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps); new_ps 2360 drivers/gpu/drm/radeon/btc_dpm.c ret = btc_set_power_state_conditionally_enable_ulv(rdev, new_ps); new_ps 2372 drivers/gpu/drm/radeon/btc_dpm.c struct radeon_ps *new_ps = &eg_pi->requested_rps; new_ps 2374 drivers/gpu/drm/radeon/btc_dpm.c btc_update_current_ps(rdev, new_ps); new_ps 5111 drivers/gpu/drm/radeon/ci_dpm.c struct ci_ps *new_ps = ci_get_ps(rps); new_ps 5115 drivers/gpu/drm/radeon/ci_dpm.c pi->current_ps = *new_ps; new_ps 5122 drivers/gpu/drm/radeon/ci_dpm.c struct ci_ps *new_ps = ci_get_ps(rps); new_ps 5126 drivers/gpu/drm/radeon/ci_dpm.c pi->requested_ps = *new_ps; new_ps 5134 drivers/gpu/drm/radeon/ci_dpm.c struct radeon_ps *new_ps = &requested_ps; new_ps 5136 drivers/gpu/drm/radeon/ci_dpm.c ci_update_requested_ps(rdev, new_ps); new_ps 5146 drivers/gpu/drm/radeon/ci_dpm.c struct radeon_ps *new_ps = &pi->requested_rps; new_ps 5148 drivers/gpu/drm/radeon/ci_dpm.c ci_update_current_ps(rdev, new_ps); new_ps 5354 drivers/gpu/drm/radeon/ci_dpm.c struct radeon_ps *new_ps = &pi->requested_rps; new_ps 5358 drivers/gpu/drm/radeon/ci_dpm.c ci_find_dpm_states_clocks_in_dpm_table(rdev, new_ps); new_ps 5360 drivers/gpu/drm/radeon/ci_dpm.c ci_request_link_speed_change_before_state_change(rdev, new_ps, old_ps); new_ps 5366 drivers/gpu/drm/radeon/ci_dpm.c ret = ci_populate_and_upload_sclk_mclk_dpm_levels(rdev, new_ps); new_ps 5371 drivers/gpu/drm/radeon/ci_dpm.c ret = ci_generate_dpm_level_enable_mask(rdev, new_ps); new_ps 5377 drivers/gpu/drm/radeon/ci_dpm.c ret = ci_update_vce_dpm(rdev, new_ps, old_ps); new_ps 5411 drivers/gpu/drm/radeon/ci_dpm.c ci_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps); new_ps 1958 drivers/gpu/drm/radeon/cypress_dpm.c struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; new_ps 1968 drivers/gpu/drm/radeon/cypress_dpm.c cypress_notify_link_speed_change_before_state_change(rdev, new_ps, old_ps); new_ps 1970 drivers/gpu/drm/radeon/cypress_dpm.c rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); new_ps 1976 drivers/gpu/drm/radeon/cypress_dpm.c ret = cypress_upload_sw_state(rdev, new_ps); new_ps 1982 drivers/gpu/drm/radeon/cypress_dpm.c ret = cypress_upload_mc_reg_table(rdev, new_ps); new_ps 1989 drivers/gpu/drm/radeon/cypress_dpm.c cypress_program_memory_timing_parameters(rdev, new_ps); new_ps 2001 drivers/gpu/drm/radeon/cypress_dpm.c rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); new_ps 2004 drivers/gpu/drm/radeon/cypress_dpm.c cypress_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps); new_ps 1143 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(rps); new_ps 1147 drivers/gpu/drm/radeon/kv_dpm.c pi->current_ps = *new_ps; new_ps 1154 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(rps); new_ps 1158 drivers/gpu/drm/radeon/kv_dpm.c pi->requested_ps = *new_ps; new_ps 1710 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(new_rps); new_ps 1718 drivers/gpu/drm/radeon/kv_dpm.c if ((table->entries[i].clk >= new_ps->levels[0].sclk) || new_ps 1726 drivers/gpu/drm/radeon/kv_dpm.c if (table->entries[i].clk <= new_ps->levels[new_ps->num_levels - 1].sclk) new_ps 1732 drivers/gpu/drm/radeon/kv_dpm.c if ((new_ps->levels[0].sclk - table->entries[pi->highest_valid].clk) > new_ps 1733 drivers/gpu/drm/radeon/kv_dpm.c (table->entries[pi->lowest_valid].clk - new_ps->levels[new_ps->num_levels - 1].sclk)) new_ps 1743 drivers/gpu/drm/radeon/kv_dpm.c if (table->entries[i].sclk_frequency >= new_ps->levels[0].sclk || new_ps 1752 drivers/gpu/drm/radeon/kv_dpm.c new_ps->levels[new_ps->num_levels - 1].sclk) new_ps 1758 drivers/gpu/drm/radeon/kv_dpm.c if ((new_ps->levels[0].sclk - new_ps 1761 drivers/gpu/drm/radeon/kv_dpm.c new_ps->levels[new_ps->num_levels -1].sclk)) new_ps 1772 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(new_rps); new_ps 1778 drivers/gpu/drm/radeon/kv_dpm.c clk_bypass_cntl = new_ps->need_dfs_bypass ? new_ps 1843 drivers/gpu/drm/radeon/kv_dpm.c struct radeon_ps *new_ps = &requested_ps; new_ps 1845 drivers/gpu/drm/radeon/kv_dpm.c kv_update_requested_ps(rdev, new_ps); new_ps 1857 drivers/gpu/drm/radeon/kv_dpm.c struct radeon_ps *new_ps = &pi->requested_rps; new_ps 1871 drivers/gpu/drm/radeon/kv_dpm.c kv_set_valid_clock_range(rdev, new_ps); new_ps 1872 drivers/gpu/drm/radeon/kv_dpm.c kv_update_dfs_bypass_settings(rdev, new_ps); new_ps 1883 drivers/gpu/drm/radeon/kv_dpm.c kv_program_nbps_index_settings(rdev, new_ps); new_ps 1889 drivers/gpu/drm/radeon/kv_dpm.c ret = kv_update_vce_dpm(rdev, new_ps, old_ps); new_ps 1900 drivers/gpu/drm/radeon/kv_dpm.c kv_set_valid_clock_range(rdev, new_ps); new_ps 1901 drivers/gpu/drm/radeon/kv_dpm.c kv_update_dfs_bypass_settings(rdev, new_ps); new_ps 1911 drivers/gpu/drm/radeon/kv_dpm.c kv_program_nbps_index_settings(rdev, new_ps); new_ps 1914 drivers/gpu/drm/radeon/kv_dpm.c ret = kv_update_vce_dpm(rdev, new_ps, old_ps); new_ps 1931 drivers/gpu/drm/radeon/kv_dpm.c struct radeon_ps *new_ps = &pi->requested_rps; new_ps 1933 drivers/gpu/drm/radeon/kv_dpm.c kv_update_current_ps(rdev, new_ps); new_ps 2434 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *new_ps = kv_get_ps(new_rps); new_ps 2445 drivers/gpu/drm/radeon/kv_dpm.c nbdpmconfig1 |= (Dpm0PgNbPsLo(new_ps->dpm0_pg_nb_ps_lo) | new_ps 2446 drivers/gpu/drm/radeon/kv_dpm.c Dpm0PgNbPsHi(new_ps->dpm0_pg_nb_ps_hi) | new_ps 2447 drivers/gpu/drm/radeon/kv_dpm.c DpmXNbPsLo(new_ps->dpmx_nb_ps_lo) | new_ps 2448 drivers/gpu/drm/radeon/kv_dpm.c DpmXNbPsHi(new_ps->dpmx_nb_ps_hi)); new_ps 3509 drivers/gpu/drm/radeon/ni_dpm.c struct radeon_ps *new_ps, new_ps 3512 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *new_state = ni_get_ps(new_ps); new_ps 3515 drivers/gpu/drm/radeon/ni_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 3516 drivers/gpu/drm/radeon/ni_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 3523 drivers/gpu/drm/radeon/ni_dpm.c radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); new_ps 3527 drivers/gpu/drm/radeon/ni_dpm.c struct radeon_ps *new_ps, new_ps 3530 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *new_state = ni_get_ps(new_ps); new_ps 3533 drivers/gpu/drm/radeon/ni_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 3534 drivers/gpu/drm/radeon/ni_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 3541 drivers/gpu/drm/radeon/ni_dpm.c radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); new_ps 3564 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *new_ps = ni_get_ps(rps); new_ps 3569 drivers/gpu/drm/radeon/ni_dpm.c ni_pi->current_ps = *new_ps; new_ps 3576 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *new_ps = ni_get_ps(rps); new_ps 3581 drivers/gpu/drm/radeon/ni_dpm.c ni_pi->requested_ps = *new_ps; new_ps 3743 drivers/gpu/drm/radeon/ni_dpm.c struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; new_ps 3752 drivers/gpu/drm/radeon/ni_dpm.c ret = ni_populate_smc_tdp_limits(rdev, new_ps); new_ps 3769 drivers/gpu/drm/radeon/ni_dpm.c struct radeon_ps *new_ps = &requested_ps; new_ps 3771 drivers/gpu/drm/radeon/ni_dpm.c ni_update_requested_ps(rdev, new_ps); new_ps 3781 drivers/gpu/drm/radeon/ni_dpm.c struct radeon_ps *new_ps = &eg_pi->requested_rps; new_ps 3790 drivers/gpu/drm/radeon/ni_dpm.c ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); new_ps 3791 drivers/gpu/drm/radeon/ni_dpm.c ret = ni_enable_power_containment(rdev, new_ps, false); new_ps 3796 drivers/gpu/drm/radeon/ni_dpm.c ret = ni_enable_smc_cac(rdev, new_ps, false); new_ps 3807 drivers/gpu/drm/radeon/ni_dpm.c btc_notify_uvd_to_smc(rdev, new_ps); new_ps 3808 drivers/gpu/drm/radeon/ni_dpm.c ret = ni_upload_sw_state(rdev, new_ps); new_ps 3814 drivers/gpu/drm/radeon/ni_dpm.c ret = ni_upload_mc_reg_table(rdev, new_ps); new_ps 3820 drivers/gpu/drm/radeon/ni_dpm.c ret = ni_program_memory_timing_parameters(rdev, new_ps); new_ps 3835 drivers/gpu/drm/radeon/ni_dpm.c ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); new_ps 3836 drivers/gpu/drm/radeon/ni_dpm.c ret = ni_enable_smc_cac(rdev, new_ps, true); new_ps 3841 drivers/gpu/drm/radeon/ni_dpm.c ret = ni_enable_power_containment(rdev, new_ps, true); new_ps 3860 drivers/gpu/drm/radeon/ni_dpm.c struct radeon_ps *new_ps = &eg_pi->requested_rps; new_ps 3862 drivers/gpu/drm/radeon/ni_dpm.c ni_update_current_ps(rdev, new_ps); new_ps 242 drivers/gpu/drm/radeon/ni_dpm.h struct radeon_ps *new_ps, new_ps 245 drivers/gpu/drm/radeon/ni_dpm.h struct radeon_ps *new_ps, new_ps 428 drivers/gpu/drm/radeon/rs780_dpm.c struct radeon_ps *new_ps, new_ps 432 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *new_state = rs780_get_ps(new_ps); new_ps 476 drivers/gpu/drm/radeon/rs780_dpm.c struct radeon_ps *new_ps, new_ps 479 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *new_state = rs780_get_ps(new_ps); new_ps 495 drivers/gpu/drm/radeon/rs780_dpm.c struct radeon_ps *new_ps, new_ps 498 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *new_state = rs780_get_ps(new_ps); new_ps 525 drivers/gpu/drm/radeon/rs780_dpm.c struct radeon_ps *new_ps) new_ps 527 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *new_state = rs780_get_ps(new_ps); new_ps 566 drivers/gpu/drm/radeon/rs780_dpm.c struct radeon_ps *new_ps, new_ps 569 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *new_state = rs780_get_ps(new_ps); new_ps 572 drivers/gpu/drm/radeon/rs780_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 573 drivers/gpu/drm/radeon/rs780_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 579 drivers/gpu/drm/radeon/rs780_dpm.c radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); new_ps 583 drivers/gpu/drm/radeon/rs780_dpm.c struct radeon_ps *new_ps, new_ps 586 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *new_state = rs780_get_ps(new_ps); new_ps 589 drivers/gpu/drm/radeon/rs780_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 590 drivers/gpu/drm/radeon/rs780_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 596 drivers/gpu/drm/radeon/rs780_dpm.c radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); new_ps 654 drivers/gpu/drm/radeon/rs780_dpm.c struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; new_ps 660 drivers/gpu/drm/radeon/rs780_dpm.c rs780_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); new_ps 667 drivers/gpu/drm/radeon/rs780_dpm.c ret = rs780_set_engine_clock_scaling(rdev, new_ps, old_ps); new_ps 670 drivers/gpu/drm/radeon/rs780_dpm.c rs780_set_engine_clock_spc(rdev, new_ps, old_ps); new_ps 672 drivers/gpu/drm/radeon/rs780_dpm.c rs780_activate_engine_clk_scaling(rdev, new_ps, old_ps); new_ps 675 drivers/gpu/drm/radeon/rs780_dpm.c rs780_enable_voltage_scaling(rdev, new_ps); new_ps 677 drivers/gpu/drm/radeon/rs780_dpm.c rs780_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); new_ps 966 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 969 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1046 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps) new_ps 1048 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1199 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 1202 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1229 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 1232 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1243 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 1246 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1305 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 1308 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1320 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 1323 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1418 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 1421 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1432 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps) new_ps 1434 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1452 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps) new_ps 1454 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1494 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 1497 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1512 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 1515 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1518 drivers/gpu/drm/radeon/rv6xx_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 1519 drivers/gpu/drm/radeon/rv6xx_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 1525 drivers/gpu/drm/radeon/rv6xx_dpm.c radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); new_ps 1529 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps, new_ps 1532 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *new_state = rv6xx_get_ps(new_ps); new_ps 1535 drivers/gpu/drm/radeon/rv6xx_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 1536 drivers/gpu/drm/radeon/rv6xx_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 1542 drivers/gpu/drm/radeon/rv6xx_dpm.c radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); new_ps 1659 drivers/gpu/drm/radeon/rv6xx_dpm.c struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; new_ps 1665 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); new_ps 1678 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_generate_transition_stepping(rdev, new_ps, old_ps); new_ps 1682 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_set_sw_voltage_to_safe(rdev, new_ps, old_ps); new_ps 1688 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_set_safe_backbias(rdev, new_ps, old_ps); new_ps 1691 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_set_safe_pcie_gen2(rdev, new_ps, old_ps); new_ps 1701 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_step_voltage_if_increasing(rdev, new_ps, old_ps); new_ps 1709 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_generate_low_step(rdev, new_ps); new_ps 1711 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_calculate_stepping_parameters(rdev, new_ps); new_ps 1721 drivers/gpu/drm/radeon/rv6xx_dpm.c ret = rv6xx_step_voltage_if_decreasing(rdev, new_ps, old_ps); new_ps 1732 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_enable_dynamic_pcie_gen2(rdev, new_ps, true); new_ps 1736 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_generate_stepping_table(rdev, new_ps); new_ps 1749 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); new_ps 1432 drivers/gpu/drm/radeon/rv770_dpm.c struct radeon_ps *new_ps, new_ps 1435 drivers/gpu/drm/radeon/rv770_dpm.c struct rv7xx_ps *new_state = rv770_get_ps(new_ps); new_ps 1438 drivers/gpu/drm/radeon/rv770_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 1439 drivers/gpu/drm/radeon/rv770_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 1445 drivers/gpu/drm/radeon/rv770_dpm.c radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); new_ps 1449 drivers/gpu/drm/radeon/rv770_dpm.c struct radeon_ps *new_ps, new_ps 1452 drivers/gpu/drm/radeon/rv770_dpm.c struct rv7xx_ps *new_state = rv770_get_ps(new_ps); new_ps 1455 drivers/gpu/drm/radeon/rv770_dpm.c if ((new_ps->vclk == old_ps->vclk) && new_ps 1456 drivers/gpu/drm/radeon/rv770_dpm.c (new_ps->dclk == old_ps->dclk)) new_ps 1462 drivers/gpu/drm/radeon/rv770_dpm.c radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); new_ps 2039 drivers/gpu/drm/radeon/rv770_dpm.c struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; new_ps 2048 drivers/gpu/drm/radeon/rv770_dpm.c rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); new_ps 2054 drivers/gpu/drm/radeon/rv770_dpm.c ret = rv770_upload_sw_state(rdev, new_ps); new_ps 2059 drivers/gpu/drm/radeon/rv770_dpm.c r7xx_program_memory_timing_parameters(rdev, new_ps); new_ps 2061 drivers/gpu/drm/radeon/rv770_dpm.c rv770_program_dcodt_before_state_switch(rdev, new_ps, old_ps); new_ps 2073 drivers/gpu/drm/radeon/rv770_dpm.c rv770_program_dcodt_after_state_switch(rdev, new_ps, old_ps); new_ps 2074 drivers/gpu/drm/radeon/rv770_dpm.c rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); new_ps 274 drivers/gpu/drm/radeon/rv770_dpm.h struct radeon_ps *new_ps, new_ps 277 drivers/gpu/drm/radeon/rv770_dpm.h struct radeon_ps *new_ps, new_ps 6524 drivers/gpu/drm/radeon/si_dpm.c struct radeon_ps *new_ps = &requested_ps; new_ps 6526 drivers/gpu/drm/radeon/si_dpm.c ni_update_requested_ps(rdev, new_ps); new_ps 6535 drivers/gpu/drm/radeon/si_dpm.c struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; new_ps 6544 drivers/gpu/drm/radeon/si_dpm.c ret = si_populate_smc_tdp_limits(rdev, new_ps); new_ps 6547 drivers/gpu/drm/radeon/si_dpm.c ret = si_populate_smc_tdp_limits_2(rdev, new_ps); new_ps 6562 drivers/gpu/drm/radeon/si_dpm.c struct radeon_ps *new_ps = &eg_pi->requested_rps; new_ps 6577 drivers/gpu/drm/radeon/si_dpm.c si_request_link_speed_change_before_state_change(rdev, new_ps, old_ps); new_ps 6578 drivers/gpu/drm/radeon/si_dpm.c ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); new_ps 6579 drivers/gpu/drm/radeon/si_dpm.c ret = si_enable_power_containment(rdev, new_ps, false); new_ps 6584 drivers/gpu/drm/radeon/si_dpm.c ret = si_enable_smc_cac(rdev, new_ps, false); new_ps 6594 drivers/gpu/drm/radeon/si_dpm.c ret = si_upload_sw_state(rdev, new_ps); new_ps 6610 drivers/gpu/drm/radeon/si_dpm.c ret = si_upload_mc_reg_table(rdev, new_ps); new_ps 6616 drivers/gpu/drm/radeon/si_dpm.c ret = si_program_memory_timing_parameters(rdev, new_ps); new_ps 6621 drivers/gpu/drm/radeon/si_dpm.c si_set_pcie_lane_width_in_smc(rdev, new_ps, old_ps); new_ps 6633 drivers/gpu/drm/radeon/si_dpm.c ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); new_ps 6634 drivers/gpu/drm/radeon/si_dpm.c si_set_vce_clock(rdev, new_ps, old_ps); new_ps 6636 drivers/gpu/drm/radeon/si_dpm.c si_notify_link_speed_change_after_state_change(rdev, new_ps, old_ps); new_ps 6637 drivers/gpu/drm/radeon/si_dpm.c ret = si_set_power_state_conditionally_enable_ulv(rdev, new_ps); new_ps 6642 drivers/gpu/drm/radeon/si_dpm.c ret = si_enable_smc_cac(rdev, new_ps, true); new_ps 6647 drivers/gpu/drm/radeon/si_dpm.c ret = si_enable_power_containment(rdev, new_ps, true); new_ps 6665 drivers/gpu/drm/radeon/si_dpm.c struct radeon_ps *new_ps = &eg_pi->requested_rps; new_ps 6667 drivers/gpu/drm/radeon/si_dpm.c ni_update_current_ps(rdev, new_ps); new_ps 667 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(rps); new_ps 669 drivers/gpu/drm/radeon/sumo_dpm.c if (new_ps->flags & SUMO_POWERSTATE_FLAGS_BOOST_STATE) { new_ps 670 drivers/gpu/drm/radeon/sumo_dpm.c pi->boost_pl = new_ps->levels[new_ps->num_levels - 1]; new_ps 681 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(new_rps); new_ps 689 drivers/gpu/drm/radeon/sumo_dpm.c nbps1_new = (new_ps->flags & SUMO_POWERSTATE_FLAGS_FORCE_NBPS1_STATE) ? 1 : 0; new_ps 699 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(new_rps); new_ps 707 drivers/gpu/drm/radeon/sumo_dpm.c nbps1_new = (new_ps->flags & SUMO_POWERSTATE_FLAGS_FORCE_NBPS1_STATE)? 1 : 0; new_ps 717 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(rps); new_ps 720 drivers/gpu/drm/radeon/sumo_dpm.c if (new_ps->flags & SUMO_POWERSTATE_FLAGS_BOOST_STATE) new_ps 739 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(rps); new_ps 743 drivers/gpu/drm/radeon/sumo_dpm.c dpm_ctrl4 |= (1 << (new_ps->num_levels - 1)); new_ps 745 drivers/gpu/drm/radeon/sumo_dpm.c if (new_ps->flags & SUMO_POWERSTATE_FLAGS_BOOST_STATE) new_ps 756 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(new_rps); new_ps 761 drivers/gpu/drm/radeon/sumo_dpm.c for (i = 0; i < new_ps->num_levels; i++) { new_ps 762 drivers/gpu/drm/radeon/sumo_dpm.c sumo_program_power_level(rdev, &new_ps->levels[i], i); new_ps 766 drivers/gpu/drm/radeon/sumo_dpm.c for (i = new_ps->num_levels; i < n_current_state_levels; i++) new_ps 769 drivers/gpu/drm/radeon/sumo_dpm.c if (new_ps->flags & SUMO_POWERSTATE_FLAGS_BOOST_STATE) new_ps 837 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(new_rps); new_ps 844 drivers/gpu/drm/radeon/sumo_dpm.c if (new_ps->levels[new_ps->num_levels - 1].sclk >= new_ps 855 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(new_rps); new_ps 862 drivers/gpu/drm/radeon/sumo_dpm.c if (new_ps->levels[new_ps->num_levels - 1].sclk < new_ps 988 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(rps); new_ps 991 drivers/gpu/drm/radeon/sumo_dpm.c if (new_ps->flags & SUMO_POWERSTATE_FLAGS_FORCE_NBPS1_STATE) new_ps 1183 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(rps); new_ps 1187 drivers/gpu/drm/radeon/sumo_dpm.c pi->current_ps = *new_ps; new_ps 1194 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *new_ps = sumo_get_ps(rps); new_ps 1198 drivers/gpu/drm/radeon/sumo_dpm.c pi->requested_ps = *new_ps; new_ps 1284 drivers/gpu/drm/radeon/sumo_dpm.c struct radeon_ps *new_ps = &requested_ps; new_ps 1286 drivers/gpu/drm/radeon/sumo_dpm.c sumo_update_requested_ps(rdev, new_ps); new_ps 1299 drivers/gpu/drm/radeon/sumo_dpm.c struct radeon_ps *new_ps = &pi->requested_rps; new_ps 1303 drivers/gpu/drm/radeon/sumo_dpm.c sumo_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); new_ps 1305 drivers/gpu/drm/radeon/sumo_dpm.c sumo_enable_boost(rdev, new_ps, false); new_ps 1306 drivers/gpu/drm/radeon/sumo_dpm.c sumo_patch_boost_state(rdev, new_ps); new_ps 1309 drivers/gpu/drm/radeon/sumo_dpm.c sumo_pre_notify_alt_vddnb_change(rdev, new_ps, old_ps); new_ps 1314 drivers/gpu/drm/radeon/sumo_dpm.c sumo_program_power_levels_0_to_n(rdev, new_ps, old_ps); new_ps 1315 drivers/gpu/drm/radeon/sumo_dpm.c sumo_program_wl(rdev, new_ps); new_ps 1316 drivers/gpu/drm/radeon/sumo_dpm.c sumo_program_bsp(rdev, new_ps); new_ps 1317 drivers/gpu/drm/radeon/sumo_dpm.c sumo_program_at(rdev, new_ps); new_ps 1318 drivers/gpu/drm/radeon/sumo_dpm.c sumo_force_nbp_state(rdev, new_ps); new_ps 1322 drivers/gpu/drm/radeon/sumo_dpm.c sumo_post_notify_alt_vddnb_change(rdev, new_ps, old_ps); new_ps 1325 drivers/gpu/drm/radeon/sumo_dpm.c sumo_enable_boost(rdev, new_ps, true); new_ps 1327 drivers/gpu/drm/radeon/sumo_dpm.c sumo_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); new_ps 1335 drivers/gpu/drm/radeon/sumo_dpm.c struct radeon_ps *new_ps = &pi->requested_rps; new_ps 1337 drivers/gpu/drm/radeon/sumo_dpm.c sumo_update_current_ps(rdev, new_ps); new_ps 844 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *new_ps = trinity_get_ps(new_rps); new_ps 849 drivers/gpu/drm/radeon/trinity_dpm.c for (i = 0; i < new_ps->num_levels; i++) { new_ps 850 drivers/gpu/drm/radeon/trinity_dpm.c trinity_program_power_level(rdev, &new_ps->levels[i], i); new_ps 854 drivers/gpu/drm/radeon/trinity_dpm.c for (i = new_ps->num_levels; i < n_current_state_levels; i++) new_ps 967 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *new_ps = trinity_get_ps(new_rps); new_ps 970 drivers/gpu/drm/radeon/trinity_dpm.c if (new_ps->levels[new_ps->num_levels - 1].sclk >= new_ps 981 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *new_ps = trinity_get_ps(new_rps); new_ps 984 drivers/gpu/drm/radeon/trinity_dpm.c if (new_ps->levels[new_ps->num_levels - 1].sclk < new_ps 1071 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *new_ps = trinity_get_ps(rps); new_ps 1075 drivers/gpu/drm/radeon/trinity_dpm.c pi->current_ps = *new_ps; new_ps 1082 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *new_ps = trinity_get_ps(rps); new_ps 1086 drivers/gpu/drm/radeon/trinity_dpm.c pi->requested_ps = *new_ps; new_ps 1188 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *new_ps = trinity_get_ps(rps); new_ps 1194 drivers/gpu/drm/radeon/trinity_dpm.c nbpsconfig |= (Dpm0PgNbPsLo(new_ps->Dpm0PgNbPsLo) | new_ps 1195 drivers/gpu/drm/radeon/trinity_dpm.c Dpm0PgNbPsHi(new_ps->Dpm0PgNbPsHi) | new_ps 1196 drivers/gpu/drm/radeon/trinity_dpm.c DpmXNbPsLo(new_ps->DpmXNbPsLo) | new_ps 1197 drivers/gpu/drm/radeon/trinity_dpm.c DpmXNbPsHi(new_ps->DpmXNbPsHi)); new_ps 1237 drivers/gpu/drm/radeon/trinity_dpm.c struct radeon_ps *new_ps = &requested_ps; new_ps 1239 drivers/gpu/drm/radeon/trinity_dpm.c trinity_update_requested_ps(rdev, new_ps); new_ps 1251 drivers/gpu/drm/radeon/trinity_dpm.c struct radeon_ps *new_ps = &pi->requested_rps; new_ps 1258 drivers/gpu/drm/radeon/trinity_dpm.c trinity_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); new_ps 1262 drivers/gpu/drm/radeon/trinity_dpm.c trinity_setup_nbp_sim(rdev, new_ps); new_ps 1263 drivers/gpu/drm/radeon/trinity_dpm.c trinity_program_power_levels_0_to_n(rdev, new_ps, old_ps); new_ps 1266 drivers/gpu/drm/radeon/trinity_dpm.c trinity_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); new_ps 1267 drivers/gpu/drm/radeon/trinity_dpm.c trinity_set_vce_clock(rdev, new_ps, old_ps); new_ps 1277 drivers/gpu/drm/radeon/trinity_dpm.c struct radeon_ps *new_ps = &pi->requested_rps; new_ps 1279 drivers/gpu/drm/radeon/trinity_dpm.c trinity_update_current_ps(rdev, new_ps); new_ps 79 drivers/net/wireless/ath/carl9170/rx.c bool new_ps; new_ps 83 drivers/net/wireless/ath/carl9170/rx.c new_ps = (ps & CARL9170_PSM_COUNTER) != CARL9170_PSM_WAKE; new_ps 84 drivers/net/wireless/ath/carl9170/rx.c if (ar->ps.state != new_ps) { new_ps 85 drivers/net/wireless/ath/carl9170/rx.c if (!new_ps) { new_ps 92 drivers/net/wireless/ath/carl9170/rx.c ar->ps.state = new_ps; new_ps 230 drivers/net/wireless/ath/dfs_pri_detector.c struct pri_sequence ps, *new_ps; new_ps 286 drivers/net/wireless/ath/dfs_pri_detector.c new_ps = pool_get_pseq_elem(); new_ps 287 drivers/net/wireless/ath/dfs_pri_detector.c if (new_ps == NULL) { new_ps 288 drivers/net/wireless/ath/dfs_pri_detector.c new_ps = kmalloc(sizeof(*new_ps), GFP_ATOMIC); new_ps 289 drivers/net/wireless/ath/dfs_pri_detector.c if (new_ps == NULL) { new_ps 296 drivers/net/wireless/ath/dfs_pri_detector.c memcpy(new_ps, &ps, sizeof(ps)); new_ps 297 drivers/net/wireless/ath/dfs_pri_detector.c INIT_LIST_HEAD(&new_ps->head); new_ps 298 drivers/net/wireless/ath/dfs_pri_detector.c list_add(&new_ps->head, &pde->sequences);