Lines Matching refs:rdev

114 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh);
115 static void si_pcie_gen3_enable(struct radeon_device *rdev);
116 static void si_program_aspm(struct radeon_device *rdev);
117 extern void sumo_rlc_fini(struct radeon_device *rdev);
118 extern int sumo_rlc_init(struct radeon_device *rdev);
119 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
120 extern void r600_ih_ring_fini(struct radeon_device *rdev);
121 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
122 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
123 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
124 extern u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev);
125 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
126 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
127 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
129 static void si_init_pg(struct radeon_device *rdev);
130 static void si_init_cg(struct radeon_device *rdev);
131 static void si_fini_pg(struct radeon_device *rdev);
132 static void si_fini_cg(struct radeon_device *rdev);
133 static void si_rlc_stop(struct radeon_device *rdev);
1198 static void si_init_golden_registers(struct radeon_device *rdev) in si_init_golden_registers() argument
1200 switch (rdev->family) { in si_init_golden_registers()
1202 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1205 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1208 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1211 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1216 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1219 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1222 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1227 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1230 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1233 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1236 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1241 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1244 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1247 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1252 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1255 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1258 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1277 int si_get_allowed_info_register(struct radeon_device *rdev, in si_get_allowed_info_register() argument
1308 u32 si_get_xclk(struct radeon_device *rdev) in si_get_xclk() argument
1310 u32 reference_clock = rdev->clock.spll.reference_freq; in si_get_xclk()
1325 int si_get_temp(struct radeon_device *rdev) in si_get_temp() argument
1541 int si_mc_load_microcode(struct radeon_device *rdev) in si_mc_load_microcode() argument
1550 if (!rdev->mc_fw) in si_mc_load_microcode()
1553 if (rdev->new_fw) { in si_mc_load_microcode()
1555 (const struct mc_firmware_header_v1_0 *)rdev->mc_fw->data; in si_mc_load_microcode()
1560 (rdev->mc_fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in si_mc_load_microcode()
1563 (rdev->mc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in si_mc_load_microcode()
1565 ucode_size = rdev->mc_fw->size / 4; in si_mc_load_microcode()
1567 switch (rdev->family) { in si_mc_load_microcode()
1590 fw_data = (const __be32 *)rdev->mc_fw->data; in si_mc_load_microcode()
1607 if (rdev->new_fw) { in si_mc_load_microcode()
1617 if (rdev->new_fw) in si_mc_load_microcode()
1629 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1634 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1647 static int si_init_microcode(struct radeon_device *rdev) in si_init_microcode() argument
1659 switch (rdev->family) { in si_init_microcode()
1719 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in si_init_microcode()
1722 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in si_init_microcode()
1725 if (rdev->pfp_fw->size != pfp_req_size) { in si_init_microcode()
1728 rdev->pfp_fw->size, fw_name); in si_init_microcode()
1733 err = radeon_ucode_validate(rdev->pfp_fw); in si_init_microcode()
1745 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in si_init_microcode()
1748 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in si_init_microcode()
1751 if (rdev->me_fw->size != me_req_size) { in si_init_microcode()
1754 rdev->me_fw->size, fw_name); in si_init_microcode()
1758 err = radeon_ucode_validate(rdev->me_fw); in si_init_microcode()
1770 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); in si_init_microcode()
1773 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); in si_init_microcode()
1776 if (rdev->ce_fw->size != ce_req_size) { in si_init_microcode()
1779 rdev->ce_fw->size, fw_name); in si_init_microcode()
1783 err = radeon_ucode_validate(rdev->ce_fw); in si_init_microcode()
1795 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in si_init_microcode()
1798 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in si_init_microcode()
1801 if (rdev->rlc_fw->size != rlc_req_size) { in si_init_microcode()
1804 rdev->rlc_fw->size, fw_name); in si_init_microcode()
1808 err = radeon_ucode_validate(rdev->rlc_fw); in si_init_microcode()
1820 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in si_init_microcode()
1823 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in si_init_microcode()
1826 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in si_init_microcode()
1830 if ((rdev->mc_fw->size != mc_req_size) && in si_init_microcode()
1831 (rdev->mc_fw->size != mc2_req_size)) { in si_init_microcode()
1834 rdev->mc_fw->size, fw_name); in si_init_microcode()
1837 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size); in si_init_microcode()
1839 err = radeon_ucode_validate(rdev->mc_fw); in si_init_microcode()
1851 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in si_init_microcode()
1854 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in si_init_microcode()
1859 release_firmware(rdev->smc_fw); in si_init_microcode()
1860 rdev->smc_fw = NULL; in si_init_microcode()
1862 } else if (rdev->smc_fw->size != smc_req_size) { in si_init_microcode()
1865 rdev->smc_fw->size, fw_name); in si_init_microcode()
1869 err = radeon_ucode_validate(rdev->smc_fw); in si_init_microcode()
1881 rdev->new_fw = false; in si_init_microcode()
1886 rdev->new_fw = true; in si_init_microcode()
1894 release_firmware(rdev->pfp_fw); in si_init_microcode()
1895 rdev->pfp_fw = NULL; in si_init_microcode()
1896 release_firmware(rdev->me_fw); in si_init_microcode()
1897 rdev->me_fw = NULL; in si_init_microcode()
1898 release_firmware(rdev->ce_fw); in si_init_microcode()
1899 rdev->ce_fw = NULL; in si_init_microcode()
1900 release_firmware(rdev->rlc_fw); in si_init_microcode()
1901 rdev->rlc_fw = NULL; in si_init_microcode()
1902 release_firmware(rdev->mc_fw); in si_init_microcode()
1903 rdev->mc_fw = NULL; in si_init_microcode()
1904 release_firmware(rdev->smc_fw); in si_init_microcode()
1905 rdev->smc_fw = NULL; in si_init_microcode()
1911 static u32 dce6_line_buffer_adjust(struct radeon_device *rdev, in dce6_line_buffer_adjust() argument
1949 for (i = 0; i < rdev->usec_timeout; i++) { in dce6_line_buffer_adjust()
1970 static u32 si_get_number_of_dram_channels(struct radeon_device *rdev) in si_get_number_of_dram_channels() argument
2251 static void dce6_program_watermarks(struct radeon_device *rdev, in dce6_program_watermarks() argument
2273 if (rdev->family == CHIP_ARUBA) in dce6_program_watermarks()
2274 dram_channels = evergreen_get_number_of_dram_channels(rdev); in dce6_program_watermarks()
2276 dram_channels = si_get_number_of_dram_channels(rdev); in dce6_program_watermarks()
2279 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in dce6_program_watermarks()
2281 radeon_dpm_get_mclk(rdev, false) * 10; in dce6_program_watermarks()
2283 radeon_dpm_get_sclk(rdev, false) * 10; in dce6_program_watermarks()
2285 wm_high.yclk = rdev->pm.current_mclk * 10; in dce6_program_watermarks()
2286 wm_high.sclk = rdev->pm.current_sclk * 10; in dce6_program_watermarks()
2306 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in dce6_program_watermarks()
2308 radeon_dpm_get_mclk(rdev, true) * 10; in dce6_program_watermarks()
2310 radeon_dpm_get_sclk(rdev, true) * 10; in dce6_program_watermarks()
2312 wm_low.yclk = rdev->pm.current_mclk * 10; in dce6_program_watermarks()
2313 wm_low.sclk = rdev->pm.current_sclk * 10; in dce6_program_watermarks()
2342 (rdev->disp_priority == 2)) { in dce6_program_watermarks()
2350 (rdev->disp_priority == 2)) { in dce6_program_watermarks()
2411 void dce6_bandwidth_update(struct radeon_device *rdev) in dce6_bandwidth_update() argument
2418 if (!rdev->mode_info.mode_config_initialized) in dce6_bandwidth_update()
2421 radeon_update_display_priority(rdev); in dce6_bandwidth_update()
2423 for (i = 0; i < rdev->num_crtc; i++) { in dce6_bandwidth_update()
2424 if (rdev->mode_info.crtcs[i]->base.enabled) in dce6_bandwidth_update()
2427 for (i = 0; i < rdev->num_crtc; i += 2) { in dce6_bandwidth_update()
2428 mode0 = &rdev->mode_info.crtcs[i]->base.mode; in dce6_bandwidth_update()
2429 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode; in dce6_bandwidth_update()
2430 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1); in dce6_bandwidth_update()
2431 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads); in dce6_bandwidth_update()
2432 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0); in dce6_bandwidth_update()
2433 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads); in dce6_bandwidth_update()
2440 static void si_tiling_mode_table_init(struct radeon_device *rdev) in si_tiling_mode_table_init() argument
2445 switch (rdev->config.si.mem_row_size_in_kb) { in si_tiling_mode_table_init()
2458 if ((rdev->family == CHIP_TAHITI) || in si_tiling_mode_table_init()
2459 (rdev->family == CHIP_PITCAIRN)) { in si_tiling_mode_table_init()
2696 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden; in si_tiling_mode_table_init()
2699 } else if ((rdev->family == CHIP_VERDE) || in si_tiling_mode_table_init()
2700 (rdev->family == CHIP_OLAND) || in si_tiling_mode_table_init()
2701 (rdev->family == CHIP_HAINAN)) { in si_tiling_mode_table_init()
2938 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden; in si_tiling_mode_table_init()
2942 DRM_ERROR("unknown asic: 0x%x\n", rdev->family); in si_tiling_mode_table_init()
2945 static void si_select_se_sh(struct radeon_device *rdev, in si_select_se_sh() argument
2972 static u32 si_get_cu_enabled(struct radeon_device *rdev, u32 cu_per_sh) in si_get_cu_enabled() argument
2990 static void si_setup_spi(struct radeon_device *rdev, in si_setup_spi() argument
2999 si_select_se_sh(rdev, i, j); in si_setup_spi()
3001 active_cu = si_get_cu_enabled(rdev, cu_per_sh); in si_setup_spi()
3014 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_setup_spi()
3017 static u32 si_get_rb_disabled(struct radeon_device *rdev, in si_get_rb_disabled() argument
3037 static void si_setup_rb(struct radeon_device *rdev, in si_setup_rb() argument
3048 si_select_se_sh(rdev, i, j); in si_setup_rb()
3049 data = si_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se); in si_setup_rb()
3053 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_setup_rb()
3062 rdev->config.si.backend_enable_mask = enabled_rbs; in si_setup_rb()
3065 si_select_se_sh(rdev, i, 0xffffffff); in si_setup_rb()
3084 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_setup_rb()
3087 static void si_gpu_init(struct radeon_device *rdev) in si_gpu_init() argument
3096 switch (rdev->family) { in si_gpu_init()
3098 rdev->config.si.max_shader_engines = 2; in si_gpu_init()
3099 rdev->config.si.max_tile_pipes = 12; in si_gpu_init()
3100 rdev->config.si.max_cu_per_sh = 8; in si_gpu_init()
3101 rdev->config.si.max_sh_per_se = 2; in si_gpu_init()
3102 rdev->config.si.max_backends_per_se = 4; in si_gpu_init()
3103 rdev->config.si.max_texture_channel_caches = 12; in si_gpu_init()
3104 rdev->config.si.max_gprs = 256; in si_gpu_init()
3105 rdev->config.si.max_gs_threads = 32; in si_gpu_init()
3106 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3108 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3109 rdev->config.si.sc_prim_fifo_size_backend = 0x100; in si_gpu_init()
3110 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3111 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3115 rdev->config.si.max_shader_engines = 2; in si_gpu_init()
3116 rdev->config.si.max_tile_pipes = 8; in si_gpu_init()
3117 rdev->config.si.max_cu_per_sh = 5; in si_gpu_init()
3118 rdev->config.si.max_sh_per_se = 2; in si_gpu_init()
3119 rdev->config.si.max_backends_per_se = 4; in si_gpu_init()
3120 rdev->config.si.max_texture_channel_caches = 8; in si_gpu_init()
3121 rdev->config.si.max_gprs = 256; in si_gpu_init()
3122 rdev->config.si.max_gs_threads = 32; in si_gpu_init()
3123 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3125 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3126 rdev->config.si.sc_prim_fifo_size_backend = 0x100; in si_gpu_init()
3127 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3128 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3133 rdev->config.si.max_shader_engines = 1; in si_gpu_init()
3134 rdev->config.si.max_tile_pipes = 4; in si_gpu_init()
3135 rdev->config.si.max_cu_per_sh = 5; in si_gpu_init()
3136 rdev->config.si.max_sh_per_se = 2; in si_gpu_init()
3137 rdev->config.si.max_backends_per_se = 4; in si_gpu_init()
3138 rdev->config.si.max_texture_channel_caches = 4; in si_gpu_init()
3139 rdev->config.si.max_gprs = 256; in si_gpu_init()
3140 rdev->config.si.max_gs_threads = 32; in si_gpu_init()
3141 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3143 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3144 rdev->config.si.sc_prim_fifo_size_backend = 0x40; in si_gpu_init()
3145 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3146 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3150 rdev->config.si.max_shader_engines = 1; in si_gpu_init()
3151 rdev->config.si.max_tile_pipes = 4; in si_gpu_init()
3152 rdev->config.si.max_cu_per_sh = 6; in si_gpu_init()
3153 rdev->config.si.max_sh_per_se = 1; in si_gpu_init()
3154 rdev->config.si.max_backends_per_se = 2; in si_gpu_init()
3155 rdev->config.si.max_texture_channel_caches = 4; in si_gpu_init()
3156 rdev->config.si.max_gprs = 256; in si_gpu_init()
3157 rdev->config.si.max_gs_threads = 16; in si_gpu_init()
3158 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3160 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3161 rdev->config.si.sc_prim_fifo_size_backend = 0x40; in si_gpu_init()
3162 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3163 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3167 rdev->config.si.max_shader_engines = 1; in si_gpu_init()
3168 rdev->config.si.max_tile_pipes = 4; in si_gpu_init()
3169 rdev->config.si.max_cu_per_sh = 5; in si_gpu_init()
3170 rdev->config.si.max_sh_per_se = 1; in si_gpu_init()
3171 rdev->config.si.max_backends_per_se = 1; in si_gpu_init()
3172 rdev->config.si.max_texture_channel_caches = 2; in si_gpu_init()
3173 rdev->config.si.max_gprs = 256; in si_gpu_init()
3174 rdev->config.si.max_gs_threads = 16; in si_gpu_init()
3175 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3177 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3178 rdev->config.si.sc_prim_fifo_size_backend = 0x40; in si_gpu_init()
3179 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3180 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3198 evergreen_fix_pci_max_read_req_size(rdev); in si_gpu_init()
3205 rdev->config.si.num_tile_pipes = rdev->config.si.max_tile_pipes; in si_gpu_init()
3206 rdev->config.si.mem_max_burst_length_bytes = 256; in si_gpu_init()
3208 rdev->config.si.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024; in si_gpu_init()
3209 if (rdev->config.si.mem_row_size_in_kb > 4) in si_gpu_init()
3210 rdev->config.si.mem_row_size_in_kb = 4; in si_gpu_init()
3212 rdev->config.si.shader_engine_tile_size = 32; in si_gpu_init()
3213 rdev->config.si.num_gpus = 1; in si_gpu_init()
3214 rdev->config.si.multi_gpu_tile_size = 64; in si_gpu_init()
3218 switch (rdev->config.si.mem_row_size_in_kb) { in si_gpu_init()
3238 rdev->config.si.tile_config = 0; in si_gpu_init()
3239 switch (rdev->config.si.num_tile_pipes) { in si_gpu_init()
3241 rdev->config.si.tile_config |= (0 << 0); in si_gpu_init()
3244 rdev->config.si.tile_config |= (1 << 0); in si_gpu_init()
3247 rdev->config.si.tile_config |= (2 << 0); in si_gpu_init()
3252 rdev->config.si.tile_config |= (3 << 0); in si_gpu_init()
3257 rdev->config.si.tile_config |= 0 << 4; in si_gpu_init()
3260 rdev->config.si.tile_config |= 1 << 4; in si_gpu_init()
3264 rdev->config.si.tile_config |= 2 << 4; in si_gpu_init()
3267 rdev->config.si.tile_config |= in si_gpu_init()
3269 rdev->config.si.tile_config |= in si_gpu_init()
3278 if (rdev->has_uvd) { in si_gpu_init()
3284 si_tiling_mode_table_init(rdev); in si_gpu_init()
3286 si_setup_rb(rdev, rdev->config.si.max_shader_engines, in si_gpu_init()
3287 rdev->config.si.max_sh_per_se, in si_gpu_init()
3288 rdev->config.si.max_backends_per_se); in si_gpu_init()
3290 si_setup_spi(rdev, rdev->config.si.max_shader_engines, in si_gpu_init()
3291 rdev->config.si.max_sh_per_se, in si_gpu_init()
3292 rdev->config.si.max_cu_per_sh); in si_gpu_init()
3294 rdev->config.si.active_cus = 0; in si_gpu_init()
3295 for (i = 0; i < rdev->config.si.max_shader_engines; i++) { in si_gpu_init()
3296 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) { in si_gpu_init()
3297 rdev->config.si.active_cus += in si_gpu_init()
3298 hweight32(si_get_cu_active_bitmap(rdev, i, j)); in si_gpu_init()
3312 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_frontend) | in si_gpu_init()
3313 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_backend) | in si_gpu_init()
3314 SC_HIZ_TILE_FIFO_SIZE(rdev->config.si.sc_hiz_tile_fifo_size) | in si_gpu_init()
3315 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.si.sc_earlyz_tile_fifo_size))); in si_gpu_init()
3356 static void si_scratch_init(struct radeon_device *rdev) in si_scratch_init() argument
3360 rdev->scratch.num_reg = 7; in si_scratch_init()
3361 rdev->scratch.reg_base = SCRATCH_REG0; in si_scratch_init()
3362 for (i = 0; i < rdev->scratch.num_reg; i++) { in si_scratch_init()
3363 rdev->scratch.free[i] = true; in si_scratch_init()
3364 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4); in si_scratch_init()
3368 void si_fence_ring_emit(struct radeon_device *rdev, in si_fence_ring_emit() argument
3371 struct radeon_ring *ring = &rdev->ring[fence->ring]; in si_fence_ring_emit()
3372 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in si_fence_ring_emit()
3398 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in si_ring_ib_execute() argument
3400 struct radeon_ring *ring = &rdev->ring[ib->ring]; in si_ring_ib_execute()
3418 } else if (rdev->wb.enabled) { in si_ring_ib_execute()
3458 static void si_cp_enable(struct radeon_device *rdev, bool enable) in si_cp_enable() argument
3463 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in si_cp_enable()
3464 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); in si_cp_enable()
3467 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in si_cp_enable()
3468 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_enable()
3469 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_enable()
3474 static int si_cp_load_microcode(struct radeon_device *rdev) in si_cp_load_microcode() argument
3478 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw) in si_cp_load_microcode()
3481 si_cp_enable(rdev, false); in si_cp_load_microcode()
3483 if (rdev->new_fw) { in si_cp_load_microcode()
3485 (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data; in si_cp_load_microcode()
3487 (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data; in si_cp_load_microcode()
3489 (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data; in si_cp_load_microcode()
3499 (rdev->pfp_fw->data + le32_to_cpu(pfp_hdr->header.ucode_array_offset_bytes)); in si_cp_load_microcode()
3508 (rdev->ce_fw->data + le32_to_cpu(ce_hdr->header.ucode_array_offset_bytes)); in si_cp_load_microcode()
3517 (rdev->me_fw->data + le32_to_cpu(me_hdr->header.ucode_array_offset_bytes)); in si_cp_load_microcode()
3527 fw_data = (const __be32 *)rdev->pfp_fw->data; in si_cp_load_microcode()
3534 fw_data = (const __be32 *)rdev->ce_fw->data; in si_cp_load_microcode()
3541 fw_data = (const __be32 *)rdev->me_fw->data; in si_cp_load_microcode()
3555 static int si_cp_start(struct radeon_device *rdev) in si_cp_start() argument
3557 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_start()
3560 r = radeon_ring_lock(rdev, ring, 7 + 4); in si_cp_start()
3569 radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1); in si_cp_start()
3579 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3581 si_cp_enable(rdev, true); in si_cp_start()
3583 r = radeon_ring_lock(rdev, ring, si_default_size + 10); in si_cp_start()
3608 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3611 ring = &rdev->ring[i]; in si_cp_start()
3612 r = radeon_ring_lock(rdev, ring, 2); in si_cp_start()
3618 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3624 static void si_cp_fini(struct radeon_device *rdev) in si_cp_fini() argument
3627 si_cp_enable(rdev, false); in si_cp_fini()
3629 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_fini()
3630 radeon_ring_fini(rdev, ring); in si_cp_fini()
3631 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3633 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_cp_fini()
3634 radeon_ring_fini(rdev, ring); in si_cp_fini()
3635 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3637 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_cp_fini()
3638 radeon_ring_fini(rdev, ring); in si_cp_fini()
3639 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3642 static int si_cp_resume(struct radeon_device *rdev) in si_cp_resume() argument
3649 si_enable_gui_idle_interrupt(rdev, false); in si_cp_resume()
3658 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in si_cp_resume()
3662 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_resume()
3676 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume()
3677 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); in si_cp_resume()
3679 if (rdev->wb.enabled) in si_cp_resume()
3693 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_cp_resume()
3707 WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume()
3708 WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF); in si_cp_resume()
3717 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_cp_resume()
3731 WREG32(CP_RB2_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume()
3732 WREG32(CP_RB2_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFF); in si_cp_resume()
3740 si_cp_start(rdev); in si_cp_resume()
3741 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; in si_cp_resume()
3742 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true; in si_cp_resume()
3743 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true; in si_cp_resume()
3744 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); in si_cp_resume()
3746 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in si_cp_resume()
3747 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_resume()
3748 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_resume()
3751 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]); in si_cp_resume()
3753 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_resume()
3755 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]); in si_cp_resume()
3757 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_resume()
3760 si_enable_gui_idle_interrupt(rdev, true); in si_cp_resume()
3762 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in si_cp_resume()
3763 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); in si_cp_resume()
3768 u32 si_gpu_check_soft_reset(struct radeon_device *rdev) in si_gpu_check_soft_reset() argument
3832 if (evergreen_is_display_hung(rdev)) in si_gpu_check_soft_reset()
3849 static void si_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask) in si_gpu_soft_reset() argument
3858 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); in si_gpu_soft_reset()
3860 evergreen_print_gpu_status_regs(rdev); in si_gpu_soft_reset()
3861 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in si_gpu_soft_reset()
3863 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in si_gpu_soft_reset()
3867 si_fini_pg(rdev); in si_gpu_soft_reset()
3868 si_fini_cg(rdev); in si_gpu_soft_reset()
3871 si_rlc_stop(rdev); in si_gpu_soft_reset()
3891 evergreen_mc_stop(rdev, &save); in si_gpu_soft_reset()
3892 if (evergreen_mc_wait_for_idle(rdev)) { in si_gpu_soft_reset()
3893 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in si_gpu_soft_reset()
3947 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); in si_gpu_soft_reset()
3961 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in si_gpu_soft_reset()
3975 evergreen_mc_resume(rdev, &save); in si_gpu_soft_reset()
3978 evergreen_print_gpu_status_regs(rdev); in si_gpu_soft_reset()
3981 static void si_set_clk_bypass_mode(struct radeon_device *rdev) in si_set_clk_bypass_mode() argument
3993 for (i = 0; i < rdev->usec_timeout; i++) { in si_set_clk_bypass_mode()
4008 static void si_spll_powerdown(struct radeon_device *rdev) in si_spll_powerdown() argument
4029 static void si_gpu_pci_config_reset(struct radeon_device *rdev) in si_gpu_pci_config_reset() argument
4034 dev_info(rdev->dev, "GPU pci config reset\n"); in si_gpu_pci_config_reset()
4039 si_fini_pg(rdev); in si_gpu_pci_config_reset()
4040 si_fini_cg(rdev); in si_gpu_pci_config_reset()
4055 si_rlc_stop(rdev); in si_gpu_pci_config_reset()
4060 evergreen_mc_stop(rdev, &save); in si_gpu_pci_config_reset()
4061 if (evergreen_mc_wait_for_idle(rdev)) { in si_gpu_pci_config_reset()
4062 dev_warn(rdev->dev, "Wait for MC idle timed out !\n"); in si_gpu_pci_config_reset()
4066 si_set_clk_bypass_mode(rdev); in si_gpu_pci_config_reset()
4068 si_spll_powerdown(rdev); in si_gpu_pci_config_reset()
4070 pci_clear_master(rdev->pdev); in si_gpu_pci_config_reset()
4072 radeon_pci_config_reset(rdev); in si_gpu_pci_config_reset()
4074 for (i = 0; i < rdev->usec_timeout; i++) { in si_gpu_pci_config_reset()
4081 int si_asic_reset(struct radeon_device *rdev) in si_asic_reset() argument
4085 reset_mask = si_gpu_check_soft_reset(rdev); in si_asic_reset()
4088 r600_set_bios_scratch_engine_hung(rdev, true); in si_asic_reset()
4091 si_gpu_soft_reset(rdev, reset_mask); in si_asic_reset()
4093 reset_mask = si_gpu_check_soft_reset(rdev); in si_asic_reset()
4097 si_gpu_pci_config_reset(rdev); in si_asic_reset()
4099 reset_mask = si_gpu_check_soft_reset(rdev); in si_asic_reset()
4102 r600_set_bios_scratch_engine_hung(rdev, false); in si_asic_reset()
4116 bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in si_gfx_is_lockup() argument
4118 u32 reset_mask = si_gpu_check_soft_reset(rdev); in si_gfx_is_lockup()
4123 radeon_ring_lockup_update(rdev, ring); in si_gfx_is_lockup()
4126 return radeon_ring_test_lockup(rdev, ring); in si_gfx_is_lockup()
4130 static void si_mc_program(struct radeon_device *rdev) in si_mc_program() argument
4146 evergreen_mc_stop(rdev, &save); in si_mc_program()
4147 if (radeon_mc_wait_for_idle(rdev)) { in si_mc_program()
4148 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in si_mc_program()
4150 if (!ASIC_IS_NODCE(rdev)) in si_mc_program()
4155 rdev->mc.vram_start >> 12); in si_mc_program()
4157 rdev->mc.vram_end >> 12); in si_mc_program()
4159 rdev->vram_scratch.gpu_addr >> 12); in si_mc_program()
4160 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; in si_mc_program()
4161 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); in si_mc_program()
4164 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); in si_mc_program()
4170 if (radeon_mc_wait_for_idle(rdev)) { in si_mc_program()
4171 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in si_mc_program()
4173 evergreen_mc_resume(rdev, &save); in si_mc_program()
4174 if (!ASIC_IS_NODCE(rdev)) { in si_mc_program()
4177 rv515_vga_render_disable(rdev); in si_mc_program()
4181 void si_vram_gtt_location(struct radeon_device *rdev, in si_vram_gtt_location() argument
4186 dev_warn(rdev->dev, "limiting VRAM\n"); in si_vram_gtt_location()
4190 radeon_vram_location(rdev, &rdev->mc, 0); in si_vram_gtt_location()
4191 rdev->mc.gtt_base_align = 0; in si_vram_gtt_location()
4192 radeon_gtt_location(rdev, mc); in si_vram_gtt_location()
4195 static int si_mc_init(struct radeon_device *rdev) in si_mc_init() argument
4201 rdev->mc.vram_is_ddr = true; in si_mc_init()
4241 rdev->mc.vram_width = numchan * chansize; in si_mc_init()
4243 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); in si_mc_init()
4244 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); in si_mc_init()
4253 rdev->mc.mc_vram_size = tmp * 1024ULL * 1024ULL; in si_mc_init()
4254 rdev->mc.real_vram_size = rdev->mc.mc_vram_size; in si_mc_init()
4255 rdev->mc.visible_vram_size = rdev->mc.aper_size; in si_mc_init()
4256 si_vram_gtt_location(rdev, &rdev->mc); in si_mc_init()
4257 radeon_update_bandwidth_info(rdev); in si_mc_init()
4265 void si_pcie_gart_tlb_flush(struct radeon_device *rdev) in si_pcie_gart_tlb_flush() argument
4274 static int si_pcie_gart_enable(struct radeon_device *rdev) in si_pcie_gart_enable() argument
4278 if (rdev->gart.robj == NULL) { in si_pcie_gart_enable()
4279 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); in si_pcie_gart_enable()
4282 r = radeon_gart_table_vram_pin(rdev); in si_pcie_gart_enable()
4305 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); in si_pcie_gart_enable()
4306 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); in si_pcie_gart_enable()
4307 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in si_pcie_gart_enable()
4309 (u32)(rdev->dummy_page.addr >> 12)); in si_pcie_gart_enable()
4321 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1); in si_pcie_gart_enable()
4329 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable()
4332 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable()
4337 (u32)(rdev->dummy_page.addr >> 12)); in si_pcie_gart_enable()
4354 si_pcie_gart_tlb_flush(rdev); in si_pcie_gart_enable()
4356 (unsigned)(rdev->mc.gtt_size >> 20), in si_pcie_gart_enable()
4357 (unsigned long long)rdev->gart.table_addr); in si_pcie_gart_enable()
4358 rdev->gart.ready = true; in si_pcie_gart_enable()
4362 static void si_pcie_gart_disable(struct radeon_device *rdev) in si_pcie_gart_disable() argument
4372 rdev->vm_manager.saved_table_addr[i] = RREG32(reg); in si_pcie_gart_disable()
4389 radeon_gart_table_vram_unpin(rdev); in si_pcie_gart_disable()
4392 static void si_pcie_gart_fini(struct radeon_device *rdev) in si_pcie_gart_fini() argument
4394 si_pcie_gart_disable(rdev); in si_pcie_gart_fini()
4395 radeon_gart_table_vram_free(rdev); in si_pcie_gart_fini()
4396 radeon_gart_fini(rdev); in si_pcie_gart_fini()
4441 static int si_vm_packet3_ce_check(struct radeon_device *rdev, in si_vm_packet3_ce_check() argument
4514 static int si_vm_packet3_gfx_check(struct radeon_device *rdev, in si_vm_packet3_gfx_check() argument
4632 static int si_vm_packet3_compute_check(struct radeon_device *rdev, in si_vm_packet3_compute_check() argument
4720 int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib) in si_ib_parse() argument
4733 dev_err(rdev->dev, "Packet0 not allowed!\n"); in si_ib_parse()
4742 ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt); in si_ib_parse()
4746 ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt); in si_ib_parse()
4750 ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt); in si_ib_parse()
4753 dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring); in si_ib_parse()
4761 dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type); in si_ib_parse()
4782 int si_vm_init(struct radeon_device *rdev) in si_vm_init() argument
4785 rdev->vm_manager.nvm = 16; in si_vm_init()
4787 rdev->vm_manager.vram_base_offset = 0; in si_vm_init()
4792 void si_vm_fini(struct radeon_device *rdev) in si_vm_fini() argument
4805 static void si_vm_decode_fault(struct radeon_device *rdev, in si_vm_decode_fault() argument
4813 if (rdev->family == CHIP_TAHITI) { in si_vm_decode_fault()
5060 void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, in si_vm_flush() argument
5112 static void si_wait_for_rlc_serdes(struct radeon_device *rdev) in si_wait_for_rlc_serdes() argument
5116 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5122 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5129 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev, in si_enable_gui_idle_interrupt() argument
5147 for (i = 0; i < rdev->usec_timeout; i++) { in si_enable_gui_idle_interrupt()
5155 static void si_set_uvd_dcm(struct radeon_device *rdev, in si_set_uvd_dcm() argument
5176 void si_init_uvd_internal_cg(struct radeon_device *rdev) in si_init_uvd_internal_cg() argument
5181 si_set_uvd_dcm(rdev, false); in si_init_uvd_internal_cg()
5189 static u32 si_halt_rlc(struct radeon_device *rdev) in si_halt_rlc() argument
5199 si_wait_for_rlc_serdes(rdev); in si_halt_rlc()
5205 static void si_update_rlc(struct radeon_device *rdev, u32 rlc) in si_update_rlc() argument
5214 static void si_enable_dma_pg(struct radeon_device *rdev, bool enable) in si_enable_dma_pg() argument
5219 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA)) in si_enable_dma_pg()
5227 static void si_init_dma_pg(struct radeon_device *rdev) in si_init_dma_pg() argument
5238 static void si_enable_gfx_cgpg(struct radeon_device *rdev, in si_enable_gfx_cgpg() argument
5243 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) { in si_enable_gfx_cgpg()
5263 static void si_init_gfx_cgpg(struct radeon_device *rdev) in si_init_gfx_cgpg() argument
5267 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in si_init_gfx_cgpg()
5273 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in si_init_gfx_cgpg()
5283 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh) in si_get_cu_active_bitmap() argument
5288 si_select_se_sh(rdev, se, sh); in si_get_cu_active_bitmap()
5291 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_get_cu_active_bitmap()
5298 for (i = 0; i < rdev->config.si.max_cu_per_sh; i ++) { in si_get_cu_active_bitmap()
5306 static void si_init_ao_cu_mask(struct radeon_device *rdev) in si_init_ao_cu_mask() argument
5312 for (i = 0; i < rdev->config.si.max_shader_engines; i++) { in si_init_ao_cu_mask()
5313 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) { in si_init_ao_cu_mask()
5317 for (k = 0; k < rdev->config.si.max_cu_per_sh; k++) { in si_init_ao_cu_mask()
5318 if (si_get_cu_active_bitmap(rdev, i, j) & mask) { in si_init_ao_cu_mask()
5339 static void si_enable_cgcg(struct radeon_device *rdev, in si_enable_cgcg() argument
5346 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) { in si_enable_cgcg()
5347 si_enable_gui_idle_interrupt(rdev, true); in si_enable_cgcg()
5351 tmp = si_halt_rlc(rdev); in si_enable_cgcg()
5357 si_wait_for_rlc_serdes(rdev); in si_enable_cgcg()
5359 si_update_rlc(rdev, tmp); in si_enable_cgcg()
5365 si_enable_gui_idle_interrupt(rdev, false); in si_enable_cgcg()
5379 static void si_enable_mgcg(struct radeon_device *rdev, in si_enable_mgcg() argument
5384 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) { in si_enable_mgcg()
5390 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) { in si_enable_mgcg()
5402 tmp = si_halt_rlc(rdev); in si_enable_mgcg()
5408 si_update_rlc(rdev, tmp); in si_enable_mgcg()
5425 tmp = si_halt_rlc(rdev); in si_enable_mgcg()
5431 si_update_rlc(rdev, tmp); in si_enable_mgcg()
5435 static void si_enable_uvd_mgcg(struct radeon_device *rdev, in si_enable_uvd_mgcg() argument
5440 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) { in si_enable_uvd_mgcg()
5480 static void si_enable_mc_ls(struct radeon_device *rdev, in si_enable_mc_ls() argument
5488 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS)) in si_enable_mc_ls()
5497 static void si_enable_mc_mgcg(struct radeon_device *rdev, in si_enable_mc_mgcg() argument
5505 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG)) in si_enable_mc_mgcg()
5514 static void si_enable_dma_mgcg(struct radeon_device *rdev, in si_enable_dma_mgcg() argument
5520 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) { in si_enable_dma_mgcg()
5551 static void si_enable_bif_mgls(struct radeon_device *rdev, in si_enable_bif_mgls() argument
5558 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS)) in si_enable_bif_mgls()
5569 static void si_enable_hdp_mgcg(struct radeon_device *rdev, in si_enable_hdp_mgcg() argument
5576 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG)) in si_enable_hdp_mgcg()
5585 static void si_enable_hdp_ls(struct radeon_device *rdev, in si_enable_hdp_ls() argument
5592 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS)) in si_enable_hdp_ls()
5601 static void si_update_cg(struct radeon_device *rdev, in si_update_cg() argument
5605 si_enable_gui_idle_interrupt(rdev, false); in si_update_cg()
5608 si_enable_mgcg(rdev, true); in si_update_cg()
5609 si_enable_cgcg(rdev, true); in si_update_cg()
5611 si_enable_cgcg(rdev, false); in si_update_cg()
5612 si_enable_mgcg(rdev, false); in si_update_cg()
5614 si_enable_gui_idle_interrupt(rdev, true); in si_update_cg()
5618 si_enable_mc_mgcg(rdev, enable); in si_update_cg()
5619 si_enable_mc_ls(rdev, enable); in si_update_cg()
5623 si_enable_dma_mgcg(rdev, enable); in si_update_cg()
5627 si_enable_bif_mgls(rdev, enable); in si_update_cg()
5631 if (rdev->has_uvd) { in si_update_cg()
5632 si_enable_uvd_mgcg(rdev, enable); in si_update_cg()
5637 si_enable_hdp_mgcg(rdev, enable); in si_update_cg()
5638 si_enable_hdp_ls(rdev, enable); in si_update_cg()
5642 static void si_init_cg(struct radeon_device *rdev) in si_init_cg() argument
5644 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX | in si_init_cg()
5649 if (rdev->has_uvd) { in si_init_cg()
5650 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, true); in si_init_cg()
5651 si_init_uvd_internal_cg(rdev); in si_init_cg()
5655 static void si_fini_cg(struct radeon_device *rdev) in si_fini_cg() argument
5657 if (rdev->has_uvd) { in si_fini_cg()
5658 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, false); in si_fini_cg()
5660 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX | in si_fini_cg()
5667 u32 si_get_csb_size(struct radeon_device *rdev) in si_get_csb_size() argument
5673 if (rdev->rlc.cs_data == NULL) in si_get_csb_size()
5681 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { in si_get_csb_size()
5699 void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer) in si_get_csb_buffer() argument
5705 if (rdev->rlc.cs_data == NULL) in si_get_csb_buffer()
5717 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { in si_get_csb_buffer()
5733 switch (rdev->family) { in si_get_csb_buffer()
5759 static void si_init_pg(struct radeon_device *rdev) in si_init_pg() argument
5761 if (rdev->pg_flags) { in si_init_pg()
5762 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA) { in si_init_pg()
5763 si_init_dma_pg(rdev); in si_init_pg()
5765 si_init_ao_cu_mask(rdev); in si_init_pg()
5766 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) { in si_init_pg()
5767 si_init_gfx_cgpg(rdev); in si_init_pg()
5769 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in si_init_pg()
5770 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in si_init_pg()
5772 si_enable_dma_pg(rdev, true); in si_init_pg()
5773 si_enable_gfx_cgpg(rdev, true); in si_init_pg()
5775 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in si_init_pg()
5776 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in si_init_pg()
5780 static void si_fini_pg(struct radeon_device *rdev) in si_fini_pg() argument
5782 if (rdev->pg_flags) { in si_fini_pg()
5783 si_enable_dma_pg(rdev, false); in si_fini_pg()
5784 si_enable_gfx_cgpg(rdev, false); in si_fini_pg()
5791 void si_rlc_reset(struct radeon_device *rdev) in si_rlc_reset() argument
5803 static void si_rlc_stop(struct radeon_device *rdev) in si_rlc_stop() argument
5807 si_enable_gui_idle_interrupt(rdev, false); in si_rlc_stop()
5809 si_wait_for_rlc_serdes(rdev); in si_rlc_stop()
5812 static void si_rlc_start(struct radeon_device *rdev) in si_rlc_start() argument
5816 si_enable_gui_idle_interrupt(rdev, true); in si_rlc_start()
5821 static bool si_lbpw_supported(struct radeon_device *rdev) in si_lbpw_supported() argument
5832 static void si_enable_lbpw(struct radeon_device *rdev, bool enable) in si_enable_lbpw() argument
5844 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_enable_lbpw()
5849 static int si_rlc_resume(struct radeon_device *rdev) in si_rlc_resume() argument
5853 if (!rdev->rlc_fw) in si_rlc_resume()
5856 si_rlc_stop(rdev); in si_rlc_resume()
5858 si_rlc_reset(rdev); in si_rlc_resume()
5860 si_init_pg(rdev); in si_rlc_resume()
5862 si_init_cg(rdev); in si_rlc_resume()
5874 if (rdev->new_fw) { in si_rlc_resume()
5876 (const struct rlc_firmware_header_v1_0 *)rdev->rlc_fw->data; in si_rlc_resume()
5879 (rdev->rlc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in si_rlc_resume()
5889 (const __be32 *)rdev->rlc_fw->data; in si_rlc_resume()
5897 si_enable_lbpw(rdev, si_lbpw_supported(rdev)); in si_rlc_resume()
5899 si_rlc_start(rdev); in si_rlc_resume()
5904 static void si_enable_interrupts(struct radeon_device *rdev) in si_enable_interrupts() argument
5913 rdev->ih.enabled = true; in si_enable_interrupts()
5916 static void si_disable_interrupts(struct radeon_device *rdev) in si_disable_interrupts() argument
5928 rdev->ih.enabled = false; in si_disable_interrupts()
5929 rdev->ih.rptr = 0; in si_disable_interrupts()
5932 static void si_disable_interrupt_state(struct radeon_device *rdev) in si_disable_interrupt_state() argument
5947 if (rdev->num_crtc >= 2) { in si_disable_interrupt_state()
5951 if (rdev->num_crtc >= 4) { in si_disable_interrupt_state()
5955 if (rdev->num_crtc >= 6) { in si_disable_interrupt_state()
5960 if (rdev->num_crtc >= 2) { in si_disable_interrupt_state()
5964 if (rdev->num_crtc >= 4) { in si_disable_interrupt_state()
5968 if (rdev->num_crtc >= 6) { in si_disable_interrupt_state()
5973 if (!ASIC_IS_NODCE(rdev)) { in si_disable_interrupt_state()
5991 static int si_irq_init(struct radeon_device *rdev) in si_irq_init() argument
5998 ret = r600_ih_ring_alloc(rdev); in si_irq_init()
6003 si_disable_interrupts(rdev); in si_irq_init()
6006 ret = si_rlc_resume(rdev); in si_irq_init()
6008 r600_ih_ring_fini(rdev); in si_irq_init()
6014 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8); in si_irq_init()
6024 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8); in si_irq_init()
6025 rb_bufsz = order_base_2(rdev->ih.ring_size / 4); in si_irq_init()
6031 if (rdev->wb.enabled) in si_irq_init()
6035 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC); in si_irq_init()
6036 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF); in si_irq_init()
6047 if (rdev->msi_enabled) in si_irq_init()
6052 si_disable_interrupt_state(rdev); in si_irq_init()
6054 pci_set_master(rdev->pdev); in si_irq_init()
6057 si_enable_interrupts(rdev); in si_irq_init()
6062 int si_irq_set(struct radeon_device *rdev) in si_irq_set() argument
6072 if (!rdev->irq.installed) { in si_irq_set()
6077 if (!rdev->ih.enabled) { in si_irq_set()
6078 si_disable_interrupts(rdev); in si_irq_set()
6080 si_disable_interrupt_state(rdev); in si_irq_set()
6087 if (!ASIC_IS_NODCE(rdev)) { in si_irq_set()
6103 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { in si_irq_set()
6107 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) { in si_irq_set()
6111 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) { in si_irq_set()
6115 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) { in si_irq_set()
6120 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) { in si_irq_set()
6124 if (rdev->irq.crtc_vblank_int[0] || in si_irq_set()
6125 atomic_read(&rdev->irq.pflip[0])) { in si_irq_set()
6129 if (rdev->irq.crtc_vblank_int[1] || in si_irq_set()
6130 atomic_read(&rdev->irq.pflip[1])) { in si_irq_set()
6134 if (rdev->irq.crtc_vblank_int[2] || in si_irq_set()
6135 atomic_read(&rdev->irq.pflip[2])) { in si_irq_set()
6139 if (rdev->irq.crtc_vblank_int[3] || in si_irq_set()
6140 atomic_read(&rdev->irq.pflip[3])) { in si_irq_set()
6144 if (rdev->irq.crtc_vblank_int[4] || in si_irq_set()
6145 atomic_read(&rdev->irq.pflip[4])) { in si_irq_set()
6149 if (rdev->irq.crtc_vblank_int[5] || in si_irq_set()
6150 atomic_read(&rdev->irq.pflip[5])) { in si_irq_set()
6154 if (rdev->irq.hpd[0]) { in si_irq_set()
6158 if (rdev->irq.hpd[1]) { in si_irq_set()
6162 if (rdev->irq.hpd[2]) { in si_irq_set()
6166 if (rdev->irq.hpd[3]) { in si_irq_set()
6170 if (rdev->irq.hpd[4]) { in si_irq_set()
6174 if (rdev->irq.hpd[5]) { in si_irq_set()
6188 if (rdev->irq.dpm_thermal) { in si_irq_set()
6193 if (rdev->num_crtc >= 2) { in si_irq_set()
6197 if (rdev->num_crtc >= 4) { in si_irq_set()
6201 if (rdev->num_crtc >= 6) { in si_irq_set()
6206 if (rdev->num_crtc >= 2) { in si_irq_set()
6212 if (rdev->num_crtc >= 4) { in si_irq_set()
6218 if (rdev->num_crtc >= 6) { in si_irq_set()
6225 if (!ASIC_IS_NODCE(rdev)) { in si_irq_set()
6242 static inline void si_irq_ack(struct radeon_device *rdev) in si_irq_ack() argument
6246 if (ASIC_IS_NODCE(rdev)) in si_irq_ack()
6249 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS); in si_irq_ack()
6250 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); in si_irq_ack()
6251 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); in si_irq_ack()
6252 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); in si_irq_ack()
6253 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); in si_irq_ack()
6254 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); in si_irq_ack()
6255rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSE… in si_irq_ack()
6256rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSE… in si_irq_ack()
6257 if (rdev->num_crtc >= 4) { in si_irq_ack()
6258rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSE… in si_irq_ack()
6259rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSE… in si_irq_ack()
6261 if (rdev->num_crtc >= 6) { in si_irq_ack()
6262rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSE… in si_irq_ack()
6263rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSE… in si_irq_ack()
6266 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6268 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6270 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) in si_irq_ack()
6272 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) in si_irq_ack()
6274 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) in si_irq_ack()
6276 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) in si_irq_ack()
6279 if (rdev->num_crtc >= 4) { in si_irq_ack()
6280 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6282 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6284 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) in si_irq_ack()
6286 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) in si_irq_ack()
6288 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) in si_irq_ack()
6290 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) in si_irq_ack()
6294 if (rdev->num_crtc >= 6) { in si_irq_ack()
6295 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6297 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6299 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) in si_irq_ack()
6301 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) in si_irq_ack()
6303 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) in si_irq_ack()
6305 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) in si_irq_ack()
6309 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) { in si_irq_ack()
6314 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) { in si_irq_ack()
6319 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) { in si_irq_ack()
6324 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) { in si_irq_ack()
6329 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) { in si_irq_ack()
6334 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) { in si_irq_ack()
6340 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) { in si_irq_ack()
6345 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) { in si_irq_ack()
6350 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) { in si_irq_ack()
6355 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) { in si_irq_ack()
6360 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) { in si_irq_ack()
6365 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) { in si_irq_ack()
6372 static void si_irq_disable(struct radeon_device *rdev) in si_irq_disable() argument
6374 si_disable_interrupts(rdev); in si_irq_disable()
6377 si_irq_ack(rdev); in si_irq_disable()
6378 si_disable_interrupt_state(rdev); in si_irq_disable()
6381 static void si_irq_suspend(struct radeon_device *rdev) in si_irq_suspend() argument
6383 si_irq_disable(rdev); in si_irq_suspend()
6384 si_rlc_stop(rdev); in si_irq_suspend()
6387 static void si_irq_fini(struct radeon_device *rdev) in si_irq_fini() argument
6389 si_irq_suspend(rdev); in si_irq_fini()
6390 r600_ih_ring_fini(rdev); in si_irq_fini()
6393 static inline u32 si_get_ih_wptr(struct radeon_device *rdev) in si_get_ih_wptr() argument
6397 if (rdev->wb.enabled) in si_get_ih_wptr()
6398 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]); in si_get_ih_wptr()
6408 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n", in si_get_ih_wptr()
6409 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); in si_get_ih_wptr()
6410 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; in si_get_ih_wptr()
6415 return (wptr & rdev->ih.ptr_mask); in si_get_ih_wptr()
6428 int si_irq_process(struct radeon_device *rdev) in si_irq_process() argument
6439 if (!rdev->ih.enabled || rdev->shutdown) in si_irq_process()
6442 wptr = si_get_ih_wptr(rdev); in si_irq_process()
6446 if (atomic_xchg(&rdev->ih.lock, 1)) in si_irq_process()
6449 rptr = rdev->ih.rptr; in si_irq_process()
6456 si_irq_ack(rdev); in si_irq_process()
6461 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; in si_irq_process()
6462 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; in si_irq_process()
6463 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff; in si_irq_process()
6469 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)) in si_irq_process()
6472 if (rdev->irq.crtc_vblank_int[0]) { in si_irq_process()
6473 drm_handle_vblank(rdev->ddev, 0); in si_irq_process()
6474 rdev->pm.vblank_sync = true; in si_irq_process()
6475 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6477 if (atomic_read(&rdev->irq.pflip[0])) in si_irq_process()
6478 radeon_crtc_handle_vblank(rdev, 0); in si_irq_process()
6479 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT; in si_irq_process()
6484 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)) in si_irq_process()
6487 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT; in si_irq_process()
6499 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)) in si_irq_process()
6502 if (rdev->irq.crtc_vblank_int[1]) { in si_irq_process()
6503 drm_handle_vblank(rdev->ddev, 1); in si_irq_process()
6504 rdev->pm.vblank_sync = true; in si_irq_process()
6505 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6507 if (atomic_read(&rdev->irq.pflip[1])) in si_irq_process()
6508 radeon_crtc_handle_vblank(rdev, 1); in si_irq_process()
6509 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; in si_irq_process()
6514 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)) in si_irq_process()
6517 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; in si_irq_process()
6529 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)) in si_irq_process()
6532 if (rdev->irq.crtc_vblank_int[2]) { in si_irq_process()
6533 drm_handle_vblank(rdev->ddev, 2); in si_irq_process()
6534 rdev->pm.vblank_sync = true; in si_irq_process()
6535 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6537 if (atomic_read(&rdev->irq.pflip[2])) in si_irq_process()
6538 radeon_crtc_handle_vblank(rdev, 2); in si_irq_process()
6539 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; in si_irq_process()
6544 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)) in si_irq_process()
6547 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; in si_irq_process()
6559 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)) in si_irq_process()
6562 if (rdev->irq.crtc_vblank_int[3]) { in si_irq_process()
6563 drm_handle_vblank(rdev->ddev, 3); in si_irq_process()
6564 rdev->pm.vblank_sync = true; in si_irq_process()
6565 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6567 if (atomic_read(&rdev->irq.pflip[3])) in si_irq_process()
6568 radeon_crtc_handle_vblank(rdev, 3); in si_irq_process()
6569 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; in si_irq_process()
6574 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)) in si_irq_process()
6577 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; in si_irq_process()
6589 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)) in si_irq_process()
6592 if (rdev->irq.crtc_vblank_int[4]) { in si_irq_process()
6593 drm_handle_vblank(rdev->ddev, 4); in si_irq_process()
6594 rdev->pm.vblank_sync = true; in si_irq_process()
6595 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6597 if (atomic_read(&rdev->irq.pflip[4])) in si_irq_process()
6598 radeon_crtc_handle_vblank(rdev, 4); in si_irq_process()
6599 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; in si_irq_process()
6604 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)) in si_irq_process()
6607 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; in si_irq_process()
6619 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)) in si_irq_process()
6622 if (rdev->irq.crtc_vblank_int[5]) { in si_irq_process()
6623 drm_handle_vblank(rdev->ddev, 5); in si_irq_process()
6624 rdev->pm.vblank_sync = true; in si_irq_process()
6625 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6627 if (atomic_read(&rdev->irq.pflip[5])) in si_irq_process()
6628 radeon_crtc_handle_vblank(rdev, 5); in si_irq_process()
6629 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; in si_irq_process()
6634 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)) in si_irq_process()
6637 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; in si_irq_process()
6654 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1); in si_irq_process()
6659 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT)) in si_irq_process()
6662 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT; in si_irq_process()
6668 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT)) in si_irq_process()
6671 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT; in si_irq_process()
6677 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT)) in si_irq_process()
6680 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT; in si_irq_process()
6686 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT)) in si_irq_process()
6689 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT; in si_irq_process()
6695 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT)) in si_irq_process()
6698 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT; in si_irq_process()
6704 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT)) in si_irq_process()
6707 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT; in si_irq_process()
6713 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT)) in si_irq_process()
6716 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT; in si_irq_process()
6722 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT)) in si_irq_process()
6725 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT; in si_irq_process()
6731 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT)) in si_irq_process()
6734 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT; in si_irq_process()
6740 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT)) in si_irq_process()
6743 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT; in si_irq_process()
6749 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT)) in si_irq_process()
6752 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT; in si_irq_process()
6758 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT)) in si_irq_process()
6761 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT; in si_irq_process()
6777 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX); in si_irq_process()
6787 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data); in si_irq_process()
6788 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in si_irq_process()
6790 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in si_irq_process()
6792 si_vm_decode_fault(rdev, status, addr); in si_irq_process()
6795 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); in si_irq_process()
6798 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in si_irq_process()
6801 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in si_irq_process()
6807 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); in si_irq_process()
6810 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in si_irq_process()
6813 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in si_irq_process()
6819 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX); in si_irq_process()
6823 rdev->pm.dpm.thermal.high_to_low = false; in si_irq_process()
6828 rdev->pm.dpm.thermal.high_to_low = true; in si_irq_process()
6836 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); in si_irq_process()
6845 rptr &= rdev->ih.ptr_mask; in si_irq_process()
6849 schedule_work(&rdev->dp_work); in si_irq_process()
6851 schedule_work(&rdev->hotplug_work); in si_irq_process()
6852 if (queue_thermal && rdev->pm.dpm_enabled) in si_irq_process()
6853 schedule_work(&rdev->pm.dpm.thermal.work); in si_irq_process()
6854 rdev->ih.rptr = rptr; in si_irq_process()
6855 atomic_set(&rdev->ih.lock, 0); in si_irq_process()
6858 wptr = si_get_ih_wptr(rdev); in si_irq_process()
6868 static int si_startup(struct radeon_device *rdev) in si_startup() argument
6874 si_pcie_gen3_enable(rdev); in si_startup()
6876 si_program_aspm(rdev); in si_startup()
6879 r = r600_vram_scratch_init(rdev); in si_startup()
6883 si_mc_program(rdev); in si_startup()
6885 if (!rdev->pm.dpm_enabled) { in si_startup()
6886 r = si_mc_load_microcode(rdev); in si_startup()
6893 r = si_pcie_gart_enable(rdev); in si_startup()
6896 si_gpu_init(rdev); in si_startup()
6899 if (rdev->family == CHIP_VERDE) { in si_startup()
6900 rdev->rlc.reg_list = verde_rlc_save_restore_register_list; in si_startup()
6901 rdev->rlc.reg_list_size = in si_startup()
6904 rdev->rlc.cs_data = si_cs_data; in si_startup()
6905 r = sumo_rlc_init(rdev); in si_startup()
6912 r = radeon_wb_init(rdev); in si_startup()
6916 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); in si_startup()
6918 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in si_startup()
6922 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in si_startup()
6924 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in si_startup()
6928 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in si_startup()
6930 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in si_startup()
6934 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX); in si_startup()
6936 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in si_startup()
6940 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); in si_startup()
6942 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in si_startup()
6946 if (rdev->has_uvd) { in si_startup()
6947 r = uvd_v2_2_resume(rdev); in si_startup()
6949 r = radeon_fence_driver_start_ring(rdev, in si_startup()
6952 dev_err(rdev->dev, "UVD fences init error (%d).\n", r); in si_startup()
6955 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in si_startup()
6959 if (!rdev->irq.installed) { in si_startup()
6960 r = radeon_irq_kms_init(rdev); in si_startup()
6965 r = si_irq_init(rdev); in si_startup()
6968 radeon_irq_kms_fini(rdev); in si_startup()
6971 si_irq_set(rdev); in si_startup()
6973 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_startup()
6974 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in si_startup()
6979 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_startup()
6980 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET, in si_startup()
6985 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_startup()
6986 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET, in si_startup()
6991 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in si_startup()
6992 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, in si_startup()
6997 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in si_startup()
6998 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, in si_startup()
7003 r = si_cp_load_microcode(rdev); in si_startup()
7006 r = si_cp_resume(rdev); in si_startup()
7010 r = cayman_dma_resume(rdev); in si_startup()
7014 if (rdev->has_uvd) { in si_startup()
7015 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in si_startup()
7017 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, in si_startup()
7020 r = uvd_v1_0_init(rdev); in si_startup()
7026 r = radeon_ib_pool_init(rdev); in si_startup()
7028 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); in si_startup()
7032 r = radeon_vm_manager_init(rdev); in si_startup()
7034 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r); in si_startup()
7038 r = radeon_audio_init(rdev); in si_startup()
7045 int si_resume(struct radeon_device *rdev) in si_resume() argument
7054 atom_asic_init(rdev->mode_info.atom_context); in si_resume()
7057 si_init_golden_registers(rdev); in si_resume()
7059 if (rdev->pm.pm_method == PM_METHOD_DPM) in si_resume()
7060 radeon_pm_resume(rdev); in si_resume()
7062 rdev->accel_working = true; in si_resume()
7063 r = si_startup(rdev); in si_resume()
7066 rdev->accel_working = false; in si_resume()
7074 int si_suspend(struct radeon_device *rdev) in si_suspend() argument
7076 radeon_pm_suspend(rdev); in si_suspend()
7077 radeon_audio_fini(rdev); in si_suspend()
7078 radeon_vm_manager_fini(rdev); in si_suspend()
7079 si_cp_enable(rdev, false); in si_suspend()
7080 cayman_dma_stop(rdev); in si_suspend()
7081 if (rdev->has_uvd) { in si_suspend()
7082 uvd_v1_0_fini(rdev); in si_suspend()
7083 radeon_uvd_suspend(rdev); in si_suspend()
7085 si_fini_pg(rdev); in si_suspend()
7086 si_fini_cg(rdev); in si_suspend()
7087 si_irq_suspend(rdev); in si_suspend()
7088 radeon_wb_disable(rdev); in si_suspend()
7089 si_pcie_gart_disable(rdev); in si_suspend()
7099 int si_init(struct radeon_device *rdev) in si_init() argument
7101 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_init()
7105 if (!radeon_get_bios(rdev)) { in si_init()
7106 if (ASIC_IS_AVIVO(rdev)) in si_init()
7110 if (!rdev->is_atom_bios) { in si_init()
7111 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n"); in si_init()
7114 r = radeon_atombios_init(rdev); in si_init()
7119 if (!radeon_card_posted(rdev)) { in si_init()
7120 if (!rdev->bios) { in si_init()
7121 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); in si_init()
7125 atom_asic_init(rdev->mode_info.atom_context); in si_init()
7128 si_init_golden_registers(rdev); in si_init()
7130 si_scratch_init(rdev); in si_init()
7132 radeon_surface_init(rdev); in si_init()
7134 radeon_get_clock_info(rdev->ddev); in si_init()
7137 r = radeon_fence_driver_init(rdev); in si_init()
7142 r = si_mc_init(rdev); in si_init()
7146 r = radeon_bo_init(rdev); in si_init()
7150 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw || in si_init()
7151 !rdev->rlc_fw || !rdev->mc_fw) { in si_init()
7152 r = si_init_microcode(rdev); in si_init()
7160 radeon_pm_init(rdev); in si_init()
7162 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_init()
7164 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
7166 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_init()
7168 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
7170 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_init()
7172 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
7174 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in si_init()
7176 r600_ring_init(rdev, ring, 64 * 1024); in si_init()
7178 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in si_init()
7180 r600_ring_init(rdev, ring, 64 * 1024); in si_init()
7182 if (rdev->has_uvd) { in si_init()
7183 r = radeon_uvd_init(rdev); in si_init()
7185 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in si_init()
7187 r600_ring_init(rdev, ring, 4096); in si_init()
7191 rdev->ih.ring_obj = NULL; in si_init()
7192 r600_ih_ring_init(rdev, 64 * 1024); in si_init()
7194 r = r600_pcie_gart_init(rdev); in si_init()
7198 rdev->accel_working = true; in si_init()
7199 r = si_startup(rdev); in si_init()
7201 dev_err(rdev->dev, "disabling GPU acceleration\n"); in si_init()
7202 si_cp_fini(rdev); in si_init()
7203 cayman_dma_fini(rdev); in si_init()
7204 si_irq_fini(rdev); in si_init()
7205 sumo_rlc_fini(rdev); in si_init()
7206 radeon_wb_fini(rdev); in si_init()
7207 radeon_ib_pool_fini(rdev); in si_init()
7208 radeon_vm_manager_fini(rdev); in si_init()
7209 radeon_irq_kms_fini(rdev); in si_init()
7210 si_pcie_gart_fini(rdev); in si_init()
7211 rdev->accel_working = false; in si_init()
7218 if (!rdev->mc_fw) { in si_init()
7226 void si_fini(struct radeon_device *rdev) in si_fini() argument
7228 radeon_pm_fini(rdev); in si_fini()
7229 si_cp_fini(rdev); in si_fini()
7230 cayman_dma_fini(rdev); in si_fini()
7231 si_fini_pg(rdev); in si_fini()
7232 si_fini_cg(rdev); in si_fini()
7233 si_irq_fini(rdev); in si_fini()
7234 sumo_rlc_fini(rdev); in si_fini()
7235 radeon_wb_fini(rdev); in si_fini()
7236 radeon_vm_manager_fini(rdev); in si_fini()
7237 radeon_ib_pool_fini(rdev); in si_fini()
7238 radeon_irq_kms_fini(rdev); in si_fini()
7239 if (rdev->has_uvd) { in si_fini()
7240 uvd_v1_0_fini(rdev); in si_fini()
7241 radeon_uvd_fini(rdev); in si_fini()
7243 si_pcie_gart_fini(rdev); in si_fini()
7244 r600_vram_scratch_fini(rdev); in si_fini()
7245 radeon_gem_fini(rdev); in si_fini()
7246 radeon_fence_driver_fini(rdev); in si_fini()
7247 radeon_bo_fini(rdev); in si_fini()
7248 radeon_atombios_fini(rdev); in si_fini()
7249 kfree(rdev->bios); in si_fini()
7250 rdev->bios = NULL; in si_fini()
7261 uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev) in si_get_gpu_clock_counter() argument
7265 mutex_lock(&rdev->gpu_clock_mutex); in si_get_gpu_clock_counter()
7269 mutex_unlock(&rdev->gpu_clock_mutex); in si_get_gpu_clock_counter()
7273 int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) in si_set_uvd_clocks() argument
7291 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000, in si_set_uvd_clocks()
7311 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in si_set_uvd_clocks()
7348 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in si_set_uvd_clocks()
7362 static void si_pcie_gen3_enable(struct radeon_device *rdev) in si_pcie_gen3_enable() argument
7364 struct pci_dev *root = rdev->pdev->bus->self; in si_pcie_gen3_enable()
7370 if (pci_is_root_bus(rdev->pdev->bus)) in si_pcie_gen3_enable()
7376 if (rdev->flags & RADEON_IS_IGP) in si_pcie_gen3_enable()
7379 if (!(rdev->flags & RADEON_IS_PCIE)) in si_pcie_gen3_enable()
7382 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask); in si_pcie_gen3_enable()
7410 gpu_pos = pci_pcie_cap(rdev->pdev); in si_pcie_gen3_enable()
7422 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg); in si_pcie_gen3_enable()
7428 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16); in si_pcie_gen3_enable()
7446 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_DEVSTA, &tmp16); in si_pcie_gen3_enable()
7451 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg); in si_pcie_gen3_enable()
7454 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &gpu_cfg2); in si_pcie_gen3_enable()
7472 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &tmp16); in si_pcie_gen3_enable()
7475 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16); in si_pcie_gen3_enable()
7483 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16); in si_pcie_gen3_enable()
7486 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16); in si_pcie_gen3_enable()
7500 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16); in si_pcie_gen3_enable()
7508 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16); in si_pcie_gen3_enable()
7514 for (i = 0; i < rdev->usec_timeout; i++) { in si_pcie_gen3_enable()
7522 static void si_program_aspm(struct radeon_device *rdev) in si_program_aspm() argument
7531 if (!(rdev->flags & RADEON_IS_PCIE)) in si_program_aspm()
7589 if ((rdev->family != CHIP_OLAND) && (rdev->family != CHIP_HAINAN)) { in si_program_aspm()
7638 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN)) in si_program_aspm()
7645 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN)) in si_program_aspm()
7651 !pci_is_root_bus(rdev->pdev->bus)) { in si_program_aspm()
7652 struct pci_dev *root = rdev->pdev->bus->self; in si_program_aspm()