Lines Matching refs:rdev
114 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh);
115 static void si_pcie_gen3_enable(struct radeon_device *rdev);
116 static void si_program_aspm(struct radeon_device *rdev);
117 extern void sumo_rlc_fini(struct radeon_device *rdev);
118 extern int sumo_rlc_init(struct radeon_device *rdev);
119 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
120 extern void r600_ih_ring_fini(struct radeon_device *rdev);
121 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
122 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
123 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
124 extern u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev);
125 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
126 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
127 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
129 static void si_init_pg(struct radeon_device *rdev);
130 static void si_init_cg(struct radeon_device *rdev);
131 static void si_fini_pg(struct radeon_device *rdev);
132 static void si_fini_cg(struct radeon_device *rdev);
133 static void si_rlc_stop(struct radeon_device *rdev);
1198 static void si_init_golden_registers(struct radeon_device *rdev) in si_init_golden_registers() argument
1200 switch (rdev->family) { in si_init_golden_registers()
1202 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1205 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1208 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1211 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1216 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1219 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1222 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1227 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1230 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1233 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1236 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1241 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1244 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1247 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1252 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1255 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1258 radeon_program_register_sequence(rdev, in si_init_golden_registers()
1277 int si_get_allowed_info_register(struct radeon_device *rdev, in si_get_allowed_info_register() argument
1308 u32 si_get_xclk(struct radeon_device *rdev) in si_get_xclk() argument
1310 u32 reference_clock = rdev->clock.spll.reference_freq; in si_get_xclk()
1325 int si_get_temp(struct radeon_device *rdev) in si_get_temp() argument
1541 int si_mc_load_microcode(struct radeon_device *rdev) in si_mc_load_microcode() argument
1550 if (!rdev->mc_fw) in si_mc_load_microcode()
1553 if (rdev->new_fw) { in si_mc_load_microcode()
1555 (const struct mc_firmware_header_v1_0 *)rdev->mc_fw->data; in si_mc_load_microcode()
1560 (rdev->mc_fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in si_mc_load_microcode()
1563 (rdev->mc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in si_mc_load_microcode()
1565 ucode_size = rdev->mc_fw->size / 4; in si_mc_load_microcode()
1567 switch (rdev->family) { in si_mc_load_microcode()
1590 fw_data = (const __be32 *)rdev->mc_fw->data; in si_mc_load_microcode()
1607 if (rdev->new_fw) { in si_mc_load_microcode()
1617 if (rdev->new_fw) in si_mc_load_microcode()
1629 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1634 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1647 static int si_init_microcode(struct radeon_device *rdev) in si_init_microcode() argument
1659 switch (rdev->family) { in si_init_microcode()
1719 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in si_init_microcode()
1722 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in si_init_microcode()
1725 if (rdev->pfp_fw->size != pfp_req_size) { in si_init_microcode()
1728 rdev->pfp_fw->size, fw_name); in si_init_microcode()
1733 err = radeon_ucode_validate(rdev->pfp_fw); in si_init_microcode()
1745 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in si_init_microcode()
1748 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in si_init_microcode()
1751 if (rdev->me_fw->size != me_req_size) { in si_init_microcode()
1754 rdev->me_fw->size, fw_name); in si_init_microcode()
1758 err = radeon_ucode_validate(rdev->me_fw); in si_init_microcode()
1770 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); in si_init_microcode()
1773 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev); in si_init_microcode()
1776 if (rdev->ce_fw->size != ce_req_size) { in si_init_microcode()
1779 rdev->ce_fw->size, fw_name); in si_init_microcode()
1783 err = radeon_ucode_validate(rdev->ce_fw); in si_init_microcode()
1795 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in si_init_microcode()
1798 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in si_init_microcode()
1801 if (rdev->rlc_fw->size != rlc_req_size) { in si_init_microcode()
1804 rdev->rlc_fw->size, fw_name); in si_init_microcode()
1808 err = radeon_ucode_validate(rdev->rlc_fw); in si_init_microcode()
1820 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in si_init_microcode()
1823 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in si_init_microcode()
1826 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in si_init_microcode()
1830 if ((rdev->mc_fw->size != mc_req_size) && in si_init_microcode()
1831 (rdev->mc_fw->size != mc2_req_size)) { in si_init_microcode()
1834 rdev->mc_fw->size, fw_name); in si_init_microcode()
1837 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size); in si_init_microcode()
1839 err = radeon_ucode_validate(rdev->mc_fw); in si_init_microcode()
1851 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in si_init_microcode()
1854 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in si_init_microcode()
1859 release_firmware(rdev->smc_fw); in si_init_microcode()
1860 rdev->smc_fw = NULL; in si_init_microcode()
1862 } else if (rdev->smc_fw->size != smc_req_size) { in si_init_microcode()
1865 rdev->smc_fw->size, fw_name); in si_init_microcode()
1869 err = radeon_ucode_validate(rdev->smc_fw); in si_init_microcode()
1881 rdev->new_fw = false; in si_init_microcode()
1886 rdev->new_fw = true; in si_init_microcode()
1894 release_firmware(rdev->pfp_fw); in si_init_microcode()
1895 rdev->pfp_fw = NULL; in si_init_microcode()
1896 release_firmware(rdev->me_fw); in si_init_microcode()
1897 rdev->me_fw = NULL; in si_init_microcode()
1898 release_firmware(rdev->ce_fw); in si_init_microcode()
1899 rdev->ce_fw = NULL; in si_init_microcode()
1900 release_firmware(rdev->rlc_fw); in si_init_microcode()
1901 rdev->rlc_fw = NULL; in si_init_microcode()
1902 release_firmware(rdev->mc_fw); in si_init_microcode()
1903 rdev->mc_fw = NULL; in si_init_microcode()
1904 release_firmware(rdev->smc_fw); in si_init_microcode()
1905 rdev->smc_fw = NULL; in si_init_microcode()
1911 static u32 dce6_line_buffer_adjust(struct radeon_device *rdev, in dce6_line_buffer_adjust() argument
1949 for (i = 0; i < rdev->usec_timeout; i++) { in dce6_line_buffer_adjust()
1970 static u32 si_get_number_of_dram_channels(struct radeon_device *rdev) in si_get_number_of_dram_channels() argument
2251 static void dce6_program_watermarks(struct radeon_device *rdev, in dce6_program_watermarks() argument
2273 if (rdev->family == CHIP_ARUBA) in dce6_program_watermarks()
2274 dram_channels = evergreen_get_number_of_dram_channels(rdev); in dce6_program_watermarks()
2276 dram_channels = si_get_number_of_dram_channels(rdev); in dce6_program_watermarks()
2279 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in dce6_program_watermarks()
2281 radeon_dpm_get_mclk(rdev, false) * 10; in dce6_program_watermarks()
2283 radeon_dpm_get_sclk(rdev, false) * 10; in dce6_program_watermarks()
2285 wm_high.yclk = rdev->pm.current_mclk * 10; in dce6_program_watermarks()
2286 wm_high.sclk = rdev->pm.current_sclk * 10; in dce6_program_watermarks()
2306 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in dce6_program_watermarks()
2308 radeon_dpm_get_mclk(rdev, true) * 10; in dce6_program_watermarks()
2310 radeon_dpm_get_sclk(rdev, true) * 10; in dce6_program_watermarks()
2312 wm_low.yclk = rdev->pm.current_mclk * 10; in dce6_program_watermarks()
2313 wm_low.sclk = rdev->pm.current_sclk * 10; in dce6_program_watermarks()
2342 (rdev->disp_priority == 2)) { in dce6_program_watermarks()
2350 (rdev->disp_priority == 2)) { in dce6_program_watermarks()
2414 void dce6_bandwidth_update(struct radeon_device *rdev) in dce6_bandwidth_update() argument
2421 if (!rdev->mode_info.mode_config_initialized) in dce6_bandwidth_update()
2424 radeon_update_display_priority(rdev); in dce6_bandwidth_update()
2426 for (i = 0; i < rdev->num_crtc; i++) { in dce6_bandwidth_update()
2427 if (rdev->mode_info.crtcs[i]->base.enabled) in dce6_bandwidth_update()
2430 for (i = 0; i < rdev->num_crtc; i += 2) { in dce6_bandwidth_update()
2431 mode0 = &rdev->mode_info.crtcs[i]->base.mode; in dce6_bandwidth_update()
2432 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode; in dce6_bandwidth_update()
2433 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1); in dce6_bandwidth_update()
2434 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads); in dce6_bandwidth_update()
2435 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0); in dce6_bandwidth_update()
2436 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads); in dce6_bandwidth_update()
2443 static void si_tiling_mode_table_init(struct radeon_device *rdev) in si_tiling_mode_table_init() argument
2448 switch (rdev->config.si.mem_row_size_in_kb) { in si_tiling_mode_table_init()
2461 if ((rdev->family == CHIP_TAHITI) || in si_tiling_mode_table_init()
2462 (rdev->family == CHIP_PITCAIRN)) { in si_tiling_mode_table_init()
2699 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden; in si_tiling_mode_table_init()
2702 } else if ((rdev->family == CHIP_VERDE) || in si_tiling_mode_table_init()
2703 (rdev->family == CHIP_OLAND) || in si_tiling_mode_table_init()
2704 (rdev->family == CHIP_HAINAN)) { in si_tiling_mode_table_init()
2941 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden; in si_tiling_mode_table_init()
2945 DRM_ERROR("unknown asic: 0x%x\n", rdev->family); in si_tiling_mode_table_init()
2948 static void si_select_se_sh(struct radeon_device *rdev, in si_select_se_sh() argument
2975 static u32 si_get_cu_enabled(struct radeon_device *rdev, u32 cu_per_sh) in si_get_cu_enabled() argument
2993 static void si_setup_spi(struct radeon_device *rdev, in si_setup_spi() argument
3002 si_select_se_sh(rdev, i, j); in si_setup_spi()
3004 active_cu = si_get_cu_enabled(rdev, cu_per_sh); in si_setup_spi()
3017 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_setup_spi()
3020 static u32 si_get_rb_disabled(struct radeon_device *rdev, in si_get_rb_disabled() argument
3040 static void si_setup_rb(struct radeon_device *rdev, in si_setup_rb() argument
3051 si_select_se_sh(rdev, i, j); in si_setup_rb()
3052 data = si_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se); in si_setup_rb()
3056 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_setup_rb()
3065 rdev->config.si.backend_enable_mask = enabled_rbs; in si_setup_rb()
3068 si_select_se_sh(rdev, i, 0xffffffff); in si_setup_rb()
3087 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_setup_rb()
3090 static void si_gpu_init(struct radeon_device *rdev) in si_gpu_init() argument
3099 switch (rdev->family) { in si_gpu_init()
3101 rdev->config.si.max_shader_engines = 2; in si_gpu_init()
3102 rdev->config.si.max_tile_pipes = 12; in si_gpu_init()
3103 rdev->config.si.max_cu_per_sh = 8; in si_gpu_init()
3104 rdev->config.si.max_sh_per_se = 2; in si_gpu_init()
3105 rdev->config.si.max_backends_per_se = 4; in si_gpu_init()
3106 rdev->config.si.max_texture_channel_caches = 12; in si_gpu_init()
3107 rdev->config.si.max_gprs = 256; in si_gpu_init()
3108 rdev->config.si.max_gs_threads = 32; in si_gpu_init()
3109 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3111 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3112 rdev->config.si.sc_prim_fifo_size_backend = 0x100; in si_gpu_init()
3113 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3114 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3118 rdev->config.si.max_shader_engines = 2; in si_gpu_init()
3119 rdev->config.si.max_tile_pipes = 8; in si_gpu_init()
3120 rdev->config.si.max_cu_per_sh = 5; in si_gpu_init()
3121 rdev->config.si.max_sh_per_se = 2; in si_gpu_init()
3122 rdev->config.si.max_backends_per_se = 4; in si_gpu_init()
3123 rdev->config.si.max_texture_channel_caches = 8; in si_gpu_init()
3124 rdev->config.si.max_gprs = 256; in si_gpu_init()
3125 rdev->config.si.max_gs_threads = 32; in si_gpu_init()
3126 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3128 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3129 rdev->config.si.sc_prim_fifo_size_backend = 0x100; in si_gpu_init()
3130 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3131 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3136 rdev->config.si.max_shader_engines = 1; in si_gpu_init()
3137 rdev->config.si.max_tile_pipes = 4; in si_gpu_init()
3138 rdev->config.si.max_cu_per_sh = 5; in si_gpu_init()
3139 rdev->config.si.max_sh_per_se = 2; in si_gpu_init()
3140 rdev->config.si.max_backends_per_se = 4; in si_gpu_init()
3141 rdev->config.si.max_texture_channel_caches = 4; in si_gpu_init()
3142 rdev->config.si.max_gprs = 256; in si_gpu_init()
3143 rdev->config.si.max_gs_threads = 32; in si_gpu_init()
3144 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3146 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3147 rdev->config.si.sc_prim_fifo_size_backend = 0x40; in si_gpu_init()
3148 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3149 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3153 rdev->config.si.max_shader_engines = 1; in si_gpu_init()
3154 rdev->config.si.max_tile_pipes = 4; in si_gpu_init()
3155 rdev->config.si.max_cu_per_sh = 6; in si_gpu_init()
3156 rdev->config.si.max_sh_per_se = 1; in si_gpu_init()
3157 rdev->config.si.max_backends_per_se = 2; in si_gpu_init()
3158 rdev->config.si.max_texture_channel_caches = 4; in si_gpu_init()
3159 rdev->config.si.max_gprs = 256; in si_gpu_init()
3160 rdev->config.si.max_gs_threads = 16; in si_gpu_init()
3161 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3163 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3164 rdev->config.si.sc_prim_fifo_size_backend = 0x40; in si_gpu_init()
3165 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3166 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3170 rdev->config.si.max_shader_engines = 1; in si_gpu_init()
3171 rdev->config.si.max_tile_pipes = 4; in si_gpu_init()
3172 rdev->config.si.max_cu_per_sh = 5; in si_gpu_init()
3173 rdev->config.si.max_sh_per_se = 1; in si_gpu_init()
3174 rdev->config.si.max_backends_per_se = 1; in si_gpu_init()
3175 rdev->config.si.max_texture_channel_caches = 2; in si_gpu_init()
3176 rdev->config.si.max_gprs = 256; in si_gpu_init()
3177 rdev->config.si.max_gs_threads = 16; in si_gpu_init()
3178 rdev->config.si.max_hw_contexts = 8; in si_gpu_init()
3180 rdev->config.si.sc_prim_fifo_size_frontend = 0x20; in si_gpu_init()
3181 rdev->config.si.sc_prim_fifo_size_backend = 0x40; in si_gpu_init()
3182 rdev->config.si.sc_hiz_tile_fifo_size = 0x30; in si_gpu_init()
3183 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130; in si_gpu_init()
3201 evergreen_fix_pci_max_read_req_size(rdev); in si_gpu_init()
3208 rdev->config.si.num_tile_pipes = rdev->config.si.max_tile_pipes; in si_gpu_init()
3209 rdev->config.si.mem_max_burst_length_bytes = 256; in si_gpu_init()
3211 rdev->config.si.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024; in si_gpu_init()
3212 if (rdev->config.si.mem_row_size_in_kb > 4) in si_gpu_init()
3213 rdev->config.si.mem_row_size_in_kb = 4; in si_gpu_init()
3215 rdev->config.si.shader_engine_tile_size = 32; in si_gpu_init()
3216 rdev->config.si.num_gpus = 1; in si_gpu_init()
3217 rdev->config.si.multi_gpu_tile_size = 64; in si_gpu_init()
3221 switch (rdev->config.si.mem_row_size_in_kb) { in si_gpu_init()
3241 rdev->config.si.tile_config = 0; in si_gpu_init()
3242 switch (rdev->config.si.num_tile_pipes) { in si_gpu_init()
3244 rdev->config.si.tile_config |= (0 << 0); in si_gpu_init()
3247 rdev->config.si.tile_config |= (1 << 0); in si_gpu_init()
3250 rdev->config.si.tile_config |= (2 << 0); in si_gpu_init()
3255 rdev->config.si.tile_config |= (3 << 0); in si_gpu_init()
3260 rdev->config.si.tile_config |= 0 << 4; in si_gpu_init()
3263 rdev->config.si.tile_config |= 1 << 4; in si_gpu_init()
3267 rdev->config.si.tile_config |= 2 << 4; in si_gpu_init()
3270 rdev->config.si.tile_config |= in si_gpu_init()
3272 rdev->config.si.tile_config |= in si_gpu_init()
3281 if (rdev->has_uvd) { in si_gpu_init()
3287 si_tiling_mode_table_init(rdev); in si_gpu_init()
3289 si_setup_rb(rdev, rdev->config.si.max_shader_engines, in si_gpu_init()
3290 rdev->config.si.max_sh_per_se, in si_gpu_init()
3291 rdev->config.si.max_backends_per_se); in si_gpu_init()
3293 si_setup_spi(rdev, rdev->config.si.max_shader_engines, in si_gpu_init()
3294 rdev->config.si.max_sh_per_se, in si_gpu_init()
3295 rdev->config.si.max_cu_per_sh); in si_gpu_init()
3297 rdev->config.si.active_cus = 0; in si_gpu_init()
3298 for (i = 0; i < rdev->config.si.max_shader_engines; i++) { in si_gpu_init()
3299 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) { in si_gpu_init()
3300 rdev->config.si.active_cus += in si_gpu_init()
3301 hweight32(si_get_cu_active_bitmap(rdev, i, j)); in si_gpu_init()
3315 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_frontend) | in si_gpu_init()
3316 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_backend) | in si_gpu_init()
3317 SC_HIZ_TILE_FIFO_SIZE(rdev->config.si.sc_hiz_tile_fifo_size) | in si_gpu_init()
3318 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.si.sc_earlyz_tile_fifo_size))); in si_gpu_init()
3359 static void si_scratch_init(struct radeon_device *rdev) in si_scratch_init() argument
3363 rdev->scratch.num_reg = 7; in si_scratch_init()
3364 rdev->scratch.reg_base = SCRATCH_REG0; in si_scratch_init()
3365 for (i = 0; i < rdev->scratch.num_reg; i++) { in si_scratch_init()
3366 rdev->scratch.free[i] = true; in si_scratch_init()
3367 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4); in si_scratch_init()
3371 void si_fence_ring_emit(struct radeon_device *rdev, in si_fence_ring_emit() argument
3374 struct radeon_ring *ring = &rdev->ring[fence->ring]; in si_fence_ring_emit()
3375 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in si_fence_ring_emit()
3401 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in si_ring_ib_execute() argument
3403 struct radeon_ring *ring = &rdev->ring[ib->ring]; in si_ring_ib_execute()
3421 } else if (rdev->wb.enabled) { in si_ring_ib_execute()
3461 static void si_cp_enable(struct radeon_device *rdev, bool enable) in si_cp_enable() argument
3466 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in si_cp_enable()
3467 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); in si_cp_enable()
3470 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in si_cp_enable()
3471 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_enable()
3472 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_enable()
3477 static int si_cp_load_microcode(struct radeon_device *rdev) in si_cp_load_microcode() argument
3481 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw) in si_cp_load_microcode()
3484 si_cp_enable(rdev, false); in si_cp_load_microcode()
3486 if (rdev->new_fw) { in si_cp_load_microcode()
3488 (const struct gfx_firmware_header_v1_0 *)rdev->pfp_fw->data; in si_cp_load_microcode()
3490 (const struct gfx_firmware_header_v1_0 *)rdev->ce_fw->data; in si_cp_load_microcode()
3492 (const struct gfx_firmware_header_v1_0 *)rdev->me_fw->data; in si_cp_load_microcode()
3502 (rdev->pfp_fw->data + le32_to_cpu(pfp_hdr->header.ucode_array_offset_bytes)); in si_cp_load_microcode()
3511 (rdev->ce_fw->data + le32_to_cpu(ce_hdr->header.ucode_array_offset_bytes)); in si_cp_load_microcode()
3520 (rdev->me_fw->data + le32_to_cpu(me_hdr->header.ucode_array_offset_bytes)); in si_cp_load_microcode()
3530 fw_data = (const __be32 *)rdev->pfp_fw->data; in si_cp_load_microcode()
3537 fw_data = (const __be32 *)rdev->ce_fw->data; in si_cp_load_microcode()
3544 fw_data = (const __be32 *)rdev->me_fw->data; in si_cp_load_microcode()
3558 static int si_cp_start(struct radeon_device *rdev) in si_cp_start() argument
3560 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_start()
3563 r = radeon_ring_lock(rdev, ring, 7 + 4); in si_cp_start()
3572 radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1); in si_cp_start()
3582 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3584 si_cp_enable(rdev, true); in si_cp_start()
3586 r = radeon_ring_lock(rdev, ring, si_default_size + 10); in si_cp_start()
3611 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3614 ring = &rdev->ring[i]; in si_cp_start()
3615 r = radeon_ring_lock(rdev, ring, 2); in si_cp_start()
3621 radeon_ring_unlock_commit(rdev, ring, false); in si_cp_start()
3627 static void si_cp_fini(struct radeon_device *rdev) in si_cp_fini() argument
3630 si_cp_enable(rdev, false); in si_cp_fini()
3632 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_fini()
3633 radeon_ring_fini(rdev, ring); in si_cp_fini()
3634 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3636 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_cp_fini()
3637 radeon_ring_fini(rdev, ring); in si_cp_fini()
3638 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3640 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_cp_fini()
3641 radeon_ring_fini(rdev, ring); in si_cp_fini()
3642 radeon_scratch_free(rdev, ring->rptr_save_reg); in si_cp_fini()
3645 static int si_cp_resume(struct radeon_device *rdev) in si_cp_resume() argument
3652 si_enable_gui_idle_interrupt(rdev, false); in si_cp_resume()
3661 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in si_cp_resume()
3665 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_cp_resume()
3679 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume()
3680 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); in si_cp_resume()
3682 if (rdev->wb.enabled) in si_cp_resume()
3696 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_cp_resume()
3710 WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume()
3711 WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF); in si_cp_resume()
3720 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_cp_resume()
3734 WREG32(CP_RB2_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume()
3735 WREG32(CP_RB2_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFF); in si_cp_resume()
3743 si_cp_start(rdev); in si_cp_resume()
3744 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; in si_cp_resume()
3745 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true; in si_cp_resume()
3746 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true; in si_cp_resume()
3747 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); in si_cp_resume()
3749 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in si_cp_resume()
3750 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_resume()
3751 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_resume()
3754 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]); in si_cp_resume()
3756 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in si_cp_resume()
3758 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]); in si_cp_resume()
3760 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in si_cp_resume()
3763 si_enable_gui_idle_interrupt(rdev, true); in si_cp_resume()
3765 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in si_cp_resume()
3766 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); in si_cp_resume()
3771 u32 si_gpu_check_soft_reset(struct radeon_device *rdev) in si_gpu_check_soft_reset() argument
3835 if (evergreen_is_display_hung(rdev)) in si_gpu_check_soft_reset()
3852 static void si_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask) in si_gpu_soft_reset() argument
3861 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); in si_gpu_soft_reset()
3863 evergreen_print_gpu_status_regs(rdev); in si_gpu_soft_reset()
3864 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in si_gpu_soft_reset()
3866 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in si_gpu_soft_reset()
3870 si_fini_pg(rdev); in si_gpu_soft_reset()
3871 si_fini_cg(rdev); in si_gpu_soft_reset()
3874 si_rlc_stop(rdev); in si_gpu_soft_reset()
3894 evergreen_mc_stop(rdev, &save); in si_gpu_soft_reset()
3895 if (evergreen_mc_wait_for_idle(rdev)) { in si_gpu_soft_reset()
3896 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in si_gpu_soft_reset()
3950 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); in si_gpu_soft_reset()
3964 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in si_gpu_soft_reset()
3978 evergreen_mc_resume(rdev, &save); in si_gpu_soft_reset()
3981 evergreen_print_gpu_status_regs(rdev); in si_gpu_soft_reset()
3984 static void si_set_clk_bypass_mode(struct radeon_device *rdev) in si_set_clk_bypass_mode() argument
3996 for (i = 0; i < rdev->usec_timeout; i++) { in si_set_clk_bypass_mode()
4011 static void si_spll_powerdown(struct radeon_device *rdev) in si_spll_powerdown() argument
4032 static void si_gpu_pci_config_reset(struct radeon_device *rdev) in si_gpu_pci_config_reset() argument
4037 dev_info(rdev->dev, "GPU pci config reset\n"); in si_gpu_pci_config_reset()
4042 si_fini_pg(rdev); in si_gpu_pci_config_reset()
4043 si_fini_cg(rdev); in si_gpu_pci_config_reset()
4058 si_rlc_stop(rdev); in si_gpu_pci_config_reset()
4063 evergreen_mc_stop(rdev, &save); in si_gpu_pci_config_reset()
4064 if (evergreen_mc_wait_for_idle(rdev)) { in si_gpu_pci_config_reset()
4065 dev_warn(rdev->dev, "Wait for MC idle timed out !\n"); in si_gpu_pci_config_reset()
4069 si_set_clk_bypass_mode(rdev); in si_gpu_pci_config_reset()
4071 si_spll_powerdown(rdev); in si_gpu_pci_config_reset()
4073 pci_clear_master(rdev->pdev); in si_gpu_pci_config_reset()
4075 radeon_pci_config_reset(rdev); in si_gpu_pci_config_reset()
4077 for (i = 0; i < rdev->usec_timeout; i++) { in si_gpu_pci_config_reset()
4084 int si_asic_reset(struct radeon_device *rdev) in si_asic_reset() argument
4088 reset_mask = si_gpu_check_soft_reset(rdev); in si_asic_reset()
4091 r600_set_bios_scratch_engine_hung(rdev, true); in si_asic_reset()
4094 si_gpu_soft_reset(rdev, reset_mask); in si_asic_reset()
4096 reset_mask = si_gpu_check_soft_reset(rdev); in si_asic_reset()
4100 si_gpu_pci_config_reset(rdev); in si_asic_reset()
4102 reset_mask = si_gpu_check_soft_reset(rdev); in si_asic_reset()
4105 r600_set_bios_scratch_engine_hung(rdev, false); in si_asic_reset()
4119 bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in si_gfx_is_lockup() argument
4121 u32 reset_mask = si_gpu_check_soft_reset(rdev); in si_gfx_is_lockup()
4126 radeon_ring_lockup_update(rdev, ring); in si_gfx_is_lockup()
4129 return radeon_ring_test_lockup(rdev, ring); in si_gfx_is_lockup()
4133 static void si_mc_program(struct radeon_device *rdev) in si_mc_program() argument
4149 evergreen_mc_stop(rdev, &save); in si_mc_program()
4150 if (radeon_mc_wait_for_idle(rdev)) { in si_mc_program()
4151 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in si_mc_program()
4153 if (!ASIC_IS_NODCE(rdev)) in si_mc_program()
4158 rdev->mc.vram_start >> 12); in si_mc_program()
4160 rdev->mc.vram_end >> 12); in si_mc_program()
4162 rdev->vram_scratch.gpu_addr >> 12); in si_mc_program()
4163 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; in si_mc_program()
4164 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); in si_mc_program()
4167 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); in si_mc_program()
4173 if (radeon_mc_wait_for_idle(rdev)) { in si_mc_program()
4174 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in si_mc_program()
4176 evergreen_mc_resume(rdev, &save); in si_mc_program()
4177 if (!ASIC_IS_NODCE(rdev)) { in si_mc_program()
4180 rv515_vga_render_disable(rdev); in si_mc_program()
4184 void si_vram_gtt_location(struct radeon_device *rdev, in si_vram_gtt_location() argument
4189 dev_warn(rdev->dev, "limiting VRAM\n"); in si_vram_gtt_location()
4193 radeon_vram_location(rdev, &rdev->mc, 0); in si_vram_gtt_location()
4194 rdev->mc.gtt_base_align = 0; in si_vram_gtt_location()
4195 radeon_gtt_location(rdev, mc); in si_vram_gtt_location()
4198 static int si_mc_init(struct radeon_device *rdev) in si_mc_init() argument
4204 rdev->mc.vram_is_ddr = true; in si_mc_init()
4244 rdev->mc.vram_width = numchan * chansize; in si_mc_init()
4246 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); in si_mc_init()
4247 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); in si_mc_init()
4256 rdev->mc.mc_vram_size = tmp * 1024ULL * 1024ULL; in si_mc_init()
4257 rdev->mc.real_vram_size = rdev->mc.mc_vram_size; in si_mc_init()
4258 rdev->mc.visible_vram_size = rdev->mc.aper_size; in si_mc_init()
4259 si_vram_gtt_location(rdev, &rdev->mc); in si_mc_init()
4260 radeon_update_bandwidth_info(rdev); in si_mc_init()
4268 void si_pcie_gart_tlb_flush(struct radeon_device *rdev) in si_pcie_gart_tlb_flush() argument
4277 static int si_pcie_gart_enable(struct radeon_device *rdev) in si_pcie_gart_enable() argument
4281 if (rdev->gart.robj == NULL) { in si_pcie_gart_enable()
4282 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); in si_pcie_gart_enable()
4285 r = radeon_gart_table_vram_pin(rdev); in si_pcie_gart_enable()
4308 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); in si_pcie_gart_enable()
4309 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); in si_pcie_gart_enable()
4310 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in si_pcie_gart_enable()
4312 (u32)(rdev->dummy_page.addr >> 12)); in si_pcie_gart_enable()
4324 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1); in si_pcie_gart_enable()
4332 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable()
4335 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable()
4340 (u32)(rdev->dummy_page.addr >> 12)); in si_pcie_gart_enable()
4357 si_pcie_gart_tlb_flush(rdev); in si_pcie_gart_enable()
4359 (unsigned)(rdev->mc.gtt_size >> 20), in si_pcie_gart_enable()
4360 (unsigned long long)rdev->gart.table_addr); in si_pcie_gart_enable()
4361 rdev->gart.ready = true; in si_pcie_gart_enable()
4365 static void si_pcie_gart_disable(struct radeon_device *rdev) in si_pcie_gart_disable() argument
4375 rdev->vm_manager.saved_table_addr[i] = RREG32(reg); in si_pcie_gart_disable()
4392 radeon_gart_table_vram_unpin(rdev); in si_pcie_gart_disable()
4395 static void si_pcie_gart_fini(struct radeon_device *rdev) in si_pcie_gart_fini() argument
4397 si_pcie_gart_disable(rdev); in si_pcie_gart_fini()
4398 radeon_gart_table_vram_free(rdev); in si_pcie_gart_fini()
4399 radeon_gart_fini(rdev); in si_pcie_gart_fini()
4444 static int si_vm_packet3_ce_check(struct radeon_device *rdev, in si_vm_packet3_ce_check() argument
4517 static int si_vm_packet3_gfx_check(struct radeon_device *rdev, in si_vm_packet3_gfx_check() argument
4635 static int si_vm_packet3_compute_check(struct radeon_device *rdev, in si_vm_packet3_compute_check() argument
4723 int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib) in si_ib_parse() argument
4736 dev_err(rdev->dev, "Packet0 not allowed!\n"); in si_ib_parse()
4745 ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt); in si_ib_parse()
4749 ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt); in si_ib_parse()
4753 ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt); in si_ib_parse()
4756 dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring); in si_ib_parse()
4764 dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type); in si_ib_parse()
4785 int si_vm_init(struct radeon_device *rdev) in si_vm_init() argument
4788 rdev->vm_manager.nvm = 16; in si_vm_init()
4790 rdev->vm_manager.vram_base_offset = 0; in si_vm_init()
4795 void si_vm_fini(struct radeon_device *rdev) in si_vm_fini() argument
4808 static void si_vm_decode_fault(struct radeon_device *rdev, in si_vm_decode_fault() argument
4816 if (rdev->family == CHIP_TAHITI) { in si_vm_decode_fault()
5063 void si_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, in si_vm_flush() argument
5115 static void si_wait_for_rlc_serdes(struct radeon_device *rdev) in si_wait_for_rlc_serdes() argument
5119 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5125 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5132 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev, in si_enable_gui_idle_interrupt() argument
5150 for (i = 0; i < rdev->usec_timeout; i++) { in si_enable_gui_idle_interrupt()
5158 static void si_set_uvd_dcm(struct radeon_device *rdev, in si_set_uvd_dcm() argument
5179 void si_init_uvd_internal_cg(struct radeon_device *rdev) in si_init_uvd_internal_cg() argument
5184 si_set_uvd_dcm(rdev, false); in si_init_uvd_internal_cg()
5192 static u32 si_halt_rlc(struct radeon_device *rdev) in si_halt_rlc() argument
5202 si_wait_for_rlc_serdes(rdev); in si_halt_rlc()
5208 static void si_update_rlc(struct radeon_device *rdev, u32 rlc) in si_update_rlc() argument
5217 static void si_enable_dma_pg(struct radeon_device *rdev, bool enable) in si_enable_dma_pg() argument
5222 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA)) in si_enable_dma_pg()
5230 static void si_init_dma_pg(struct radeon_device *rdev) in si_init_dma_pg() argument
5241 static void si_enable_gfx_cgpg(struct radeon_device *rdev, in si_enable_gfx_cgpg() argument
5246 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) { in si_enable_gfx_cgpg()
5266 static void si_init_gfx_cgpg(struct radeon_device *rdev) in si_init_gfx_cgpg() argument
5270 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in si_init_gfx_cgpg()
5276 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in si_init_gfx_cgpg()
5286 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh) in si_get_cu_active_bitmap() argument
5291 si_select_se_sh(rdev, se, sh); in si_get_cu_active_bitmap()
5294 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_get_cu_active_bitmap()
5301 for (i = 0; i < rdev->config.si.max_cu_per_sh; i ++) { in si_get_cu_active_bitmap()
5309 static void si_init_ao_cu_mask(struct radeon_device *rdev) in si_init_ao_cu_mask() argument
5315 for (i = 0; i < rdev->config.si.max_shader_engines; i++) { in si_init_ao_cu_mask()
5316 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) { in si_init_ao_cu_mask()
5320 for (k = 0; k < rdev->config.si.max_cu_per_sh; k++) { in si_init_ao_cu_mask()
5321 if (si_get_cu_active_bitmap(rdev, i, j) & mask) { in si_init_ao_cu_mask()
5342 static void si_enable_cgcg(struct radeon_device *rdev, in si_enable_cgcg() argument
5349 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) { in si_enable_cgcg()
5350 si_enable_gui_idle_interrupt(rdev, true); in si_enable_cgcg()
5354 tmp = si_halt_rlc(rdev); in si_enable_cgcg()
5360 si_wait_for_rlc_serdes(rdev); in si_enable_cgcg()
5362 si_update_rlc(rdev, tmp); in si_enable_cgcg()
5368 si_enable_gui_idle_interrupt(rdev, false); in si_enable_cgcg()
5382 static void si_enable_mgcg(struct radeon_device *rdev, in si_enable_mgcg() argument
5387 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) { in si_enable_mgcg()
5393 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) { in si_enable_mgcg()
5405 tmp = si_halt_rlc(rdev); in si_enable_mgcg()
5411 si_update_rlc(rdev, tmp); in si_enable_mgcg()
5428 tmp = si_halt_rlc(rdev); in si_enable_mgcg()
5434 si_update_rlc(rdev, tmp); in si_enable_mgcg()
5438 static void si_enable_uvd_mgcg(struct radeon_device *rdev, in si_enable_uvd_mgcg() argument
5443 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) { in si_enable_uvd_mgcg()
5483 static void si_enable_mc_ls(struct radeon_device *rdev, in si_enable_mc_ls() argument
5491 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS)) in si_enable_mc_ls()
5500 static void si_enable_mc_mgcg(struct radeon_device *rdev, in si_enable_mc_mgcg() argument
5508 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG)) in si_enable_mc_mgcg()
5517 static void si_enable_dma_mgcg(struct radeon_device *rdev, in si_enable_dma_mgcg() argument
5523 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) { in si_enable_dma_mgcg()
5554 static void si_enable_bif_mgls(struct radeon_device *rdev, in si_enable_bif_mgls() argument
5561 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS)) in si_enable_bif_mgls()
5572 static void si_enable_hdp_mgcg(struct radeon_device *rdev, in si_enable_hdp_mgcg() argument
5579 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG)) in si_enable_hdp_mgcg()
5588 static void si_enable_hdp_ls(struct radeon_device *rdev, in si_enable_hdp_ls() argument
5595 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS)) in si_enable_hdp_ls()
5604 static void si_update_cg(struct radeon_device *rdev, in si_update_cg() argument
5608 si_enable_gui_idle_interrupt(rdev, false); in si_update_cg()
5611 si_enable_mgcg(rdev, true); in si_update_cg()
5612 si_enable_cgcg(rdev, true); in si_update_cg()
5614 si_enable_cgcg(rdev, false); in si_update_cg()
5615 si_enable_mgcg(rdev, false); in si_update_cg()
5617 si_enable_gui_idle_interrupt(rdev, true); in si_update_cg()
5621 si_enable_mc_mgcg(rdev, enable); in si_update_cg()
5622 si_enable_mc_ls(rdev, enable); in si_update_cg()
5626 si_enable_dma_mgcg(rdev, enable); in si_update_cg()
5630 si_enable_bif_mgls(rdev, enable); in si_update_cg()
5634 if (rdev->has_uvd) { in si_update_cg()
5635 si_enable_uvd_mgcg(rdev, enable); in si_update_cg()
5640 si_enable_hdp_mgcg(rdev, enable); in si_update_cg()
5641 si_enable_hdp_ls(rdev, enable); in si_update_cg()
5645 static void si_init_cg(struct radeon_device *rdev) in si_init_cg() argument
5647 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX | in si_init_cg()
5652 if (rdev->has_uvd) { in si_init_cg()
5653 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, true); in si_init_cg()
5654 si_init_uvd_internal_cg(rdev); in si_init_cg()
5658 static void si_fini_cg(struct radeon_device *rdev) in si_fini_cg() argument
5660 if (rdev->has_uvd) { in si_fini_cg()
5661 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, false); in si_fini_cg()
5663 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX | in si_fini_cg()
5670 u32 si_get_csb_size(struct radeon_device *rdev) in si_get_csb_size() argument
5676 if (rdev->rlc.cs_data == NULL) in si_get_csb_size()
5684 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { in si_get_csb_size()
5702 void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer) in si_get_csb_buffer() argument
5708 if (rdev->rlc.cs_data == NULL) in si_get_csb_buffer()
5720 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) { in si_get_csb_buffer()
5736 switch (rdev->family) { in si_get_csb_buffer()
5762 static void si_init_pg(struct radeon_device *rdev) in si_init_pg() argument
5764 if (rdev->pg_flags) { in si_init_pg()
5765 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA) { in si_init_pg()
5766 si_init_dma_pg(rdev); in si_init_pg()
5768 si_init_ao_cu_mask(rdev); in si_init_pg()
5769 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) { in si_init_pg()
5770 si_init_gfx_cgpg(rdev); in si_init_pg()
5772 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in si_init_pg()
5773 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in si_init_pg()
5775 si_enable_dma_pg(rdev, true); in si_init_pg()
5776 si_enable_gfx_cgpg(rdev, true); in si_init_pg()
5778 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in si_init_pg()
5779 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in si_init_pg()
5783 static void si_fini_pg(struct radeon_device *rdev) in si_fini_pg() argument
5785 if (rdev->pg_flags) { in si_fini_pg()
5786 si_enable_dma_pg(rdev, false); in si_fini_pg()
5787 si_enable_gfx_cgpg(rdev, false); in si_fini_pg()
5794 void si_rlc_reset(struct radeon_device *rdev) in si_rlc_reset() argument
5806 static void si_rlc_stop(struct radeon_device *rdev) in si_rlc_stop() argument
5810 si_enable_gui_idle_interrupt(rdev, false); in si_rlc_stop()
5812 si_wait_for_rlc_serdes(rdev); in si_rlc_stop()
5815 static void si_rlc_start(struct radeon_device *rdev) in si_rlc_start() argument
5819 si_enable_gui_idle_interrupt(rdev, true); in si_rlc_start()
5824 static bool si_lbpw_supported(struct radeon_device *rdev) in si_lbpw_supported() argument
5835 static void si_enable_lbpw(struct radeon_device *rdev, bool enable) in si_enable_lbpw() argument
5847 si_select_se_sh(rdev, 0xffffffff, 0xffffffff); in si_enable_lbpw()
5852 static int si_rlc_resume(struct radeon_device *rdev) in si_rlc_resume() argument
5856 if (!rdev->rlc_fw) in si_rlc_resume()
5859 si_rlc_stop(rdev); in si_rlc_resume()
5861 si_rlc_reset(rdev); in si_rlc_resume()
5863 si_init_pg(rdev); in si_rlc_resume()
5865 si_init_cg(rdev); in si_rlc_resume()
5877 if (rdev->new_fw) { in si_rlc_resume()
5879 (const struct rlc_firmware_header_v1_0 *)rdev->rlc_fw->data; in si_rlc_resume()
5882 (rdev->rlc_fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in si_rlc_resume()
5892 (const __be32 *)rdev->rlc_fw->data; in si_rlc_resume()
5900 si_enable_lbpw(rdev, si_lbpw_supported(rdev)); in si_rlc_resume()
5902 si_rlc_start(rdev); in si_rlc_resume()
5907 static void si_enable_interrupts(struct radeon_device *rdev) in si_enable_interrupts() argument
5916 rdev->ih.enabled = true; in si_enable_interrupts()
5919 static void si_disable_interrupts(struct radeon_device *rdev) in si_disable_interrupts() argument
5931 rdev->ih.enabled = false; in si_disable_interrupts()
5932 rdev->ih.rptr = 0; in si_disable_interrupts()
5935 static void si_disable_interrupt_state(struct radeon_device *rdev) in si_disable_interrupt_state() argument
5950 if (rdev->num_crtc >= 2) { in si_disable_interrupt_state()
5954 if (rdev->num_crtc >= 4) { in si_disable_interrupt_state()
5958 if (rdev->num_crtc >= 6) { in si_disable_interrupt_state()
5963 if (rdev->num_crtc >= 2) { in si_disable_interrupt_state()
5967 if (rdev->num_crtc >= 4) { in si_disable_interrupt_state()
5971 if (rdev->num_crtc >= 6) { in si_disable_interrupt_state()
5976 if (!ASIC_IS_NODCE(rdev)) { in si_disable_interrupt_state()
5994 static int si_irq_init(struct radeon_device *rdev) in si_irq_init() argument
6001 ret = r600_ih_ring_alloc(rdev); in si_irq_init()
6006 si_disable_interrupts(rdev); in si_irq_init()
6009 ret = si_rlc_resume(rdev); in si_irq_init()
6011 r600_ih_ring_fini(rdev); in si_irq_init()
6017 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8); in si_irq_init()
6027 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8); in si_irq_init()
6028 rb_bufsz = order_base_2(rdev->ih.ring_size / 4); in si_irq_init()
6034 if (rdev->wb.enabled) in si_irq_init()
6038 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC); in si_irq_init()
6039 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF); in si_irq_init()
6050 if (rdev->msi_enabled) in si_irq_init()
6055 si_disable_interrupt_state(rdev); in si_irq_init()
6057 pci_set_master(rdev->pdev); in si_irq_init()
6060 si_enable_interrupts(rdev); in si_irq_init()
6065 int si_irq_set(struct radeon_device *rdev) in si_irq_set() argument
6075 if (!rdev->irq.installed) { in si_irq_set()
6080 if (!rdev->ih.enabled) { in si_irq_set()
6081 si_disable_interrupts(rdev); in si_irq_set()
6083 si_disable_interrupt_state(rdev); in si_irq_set()
6090 if (!ASIC_IS_NODCE(rdev)) { in si_irq_set()
6106 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { in si_irq_set()
6110 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) { in si_irq_set()
6114 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) { in si_irq_set()
6118 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) { in si_irq_set()
6123 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) { in si_irq_set()
6127 if (rdev->irq.crtc_vblank_int[0] || in si_irq_set()
6128 atomic_read(&rdev->irq.pflip[0])) { in si_irq_set()
6132 if (rdev->irq.crtc_vblank_int[1] || in si_irq_set()
6133 atomic_read(&rdev->irq.pflip[1])) { in si_irq_set()
6137 if (rdev->irq.crtc_vblank_int[2] || in si_irq_set()
6138 atomic_read(&rdev->irq.pflip[2])) { in si_irq_set()
6142 if (rdev->irq.crtc_vblank_int[3] || in si_irq_set()
6143 atomic_read(&rdev->irq.pflip[3])) { in si_irq_set()
6147 if (rdev->irq.crtc_vblank_int[4] || in si_irq_set()
6148 atomic_read(&rdev->irq.pflip[4])) { in si_irq_set()
6152 if (rdev->irq.crtc_vblank_int[5] || in si_irq_set()
6153 atomic_read(&rdev->irq.pflip[5])) { in si_irq_set()
6157 if (rdev->irq.hpd[0]) { in si_irq_set()
6161 if (rdev->irq.hpd[1]) { in si_irq_set()
6165 if (rdev->irq.hpd[2]) { in si_irq_set()
6169 if (rdev->irq.hpd[3]) { in si_irq_set()
6173 if (rdev->irq.hpd[4]) { in si_irq_set()
6177 if (rdev->irq.hpd[5]) { in si_irq_set()
6191 if (rdev->irq.dpm_thermal) { in si_irq_set()
6196 if (rdev->num_crtc >= 2) { in si_irq_set()
6200 if (rdev->num_crtc >= 4) { in si_irq_set()
6204 if (rdev->num_crtc >= 6) { in si_irq_set()
6209 if (rdev->num_crtc >= 2) { in si_irq_set()
6215 if (rdev->num_crtc >= 4) { in si_irq_set()
6221 if (rdev->num_crtc >= 6) { in si_irq_set()
6228 if (!ASIC_IS_NODCE(rdev)) { in si_irq_set()
6245 static inline void si_irq_ack(struct radeon_device *rdev) in si_irq_ack() argument
6249 if (ASIC_IS_NODCE(rdev)) in si_irq_ack()
6252 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS); in si_irq_ack()
6253 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE); in si_irq_ack()
6254 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2); in si_irq_ack()
6255 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3); in si_irq_ack()
6256 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4); in si_irq_ack()
6257 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5); in si_irq_ack()
6258 …rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSE… in si_irq_ack()
6259 …rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSE… in si_irq_ack()
6260 if (rdev->num_crtc >= 4) { in si_irq_ack()
6261 …rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSE… in si_irq_ack()
6262 …rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSE… in si_irq_ack()
6264 if (rdev->num_crtc >= 6) { in si_irq_ack()
6265 …rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSE… in si_irq_ack()
6266 …rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSE… in si_irq_ack()
6269 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6271 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6273 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) in si_irq_ack()
6275 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) in si_irq_ack()
6277 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) in si_irq_ack()
6279 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) in si_irq_ack()
6282 if (rdev->num_crtc >= 4) { in si_irq_ack()
6283 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6285 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6287 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) in si_irq_ack()
6289 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) in si_irq_ack()
6291 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) in si_irq_ack()
6293 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) in si_irq_ack()
6297 if (rdev->num_crtc >= 6) { in si_irq_ack()
6298 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6300 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED) in si_irq_ack()
6302 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) in si_irq_ack()
6304 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) in si_irq_ack()
6306 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) in si_irq_ack()
6308 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) in si_irq_ack()
6312 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) { in si_irq_ack()
6317 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) { in si_irq_ack()
6322 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) { in si_irq_ack()
6327 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) { in si_irq_ack()
6332 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) { in si_irq_ack()
6337 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) { in si_irq_ack()
6343 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) { in si_irq_ack()
6348 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) { in si_irq_ack()
6353 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) { in si_irq_ack()
6358 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) { in si_irq_ack()
6363 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) { in si_irq_ack()
6368 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) { in si_irq_ack()
6375 static void si_irq_disable(struct radeon_device *rdev) in si_irq_disable() argument
6377 si_disable_interrupts(rdev); in si_irq_disable()
6380 si_irq_ack(rdev); in si_irq_disable()
6381 si_disable_interrupt_state(rdev); in si_irq_disable()
6384 static void si_irq_suspend(struct radeon_device *rdev) in si_irq_suspend() argument
6386 si_irq_disable(rdev); in si_irq_suspend()
6387 si_rlc_stop(rdev); in si_irq_suspend()
6390 static void si_irq_fini(struct radeon_device *rdev) in si_irq_fini() argument
6392 si_irq_suspend(rdev); in si_irq_fini()
6393 r600_ih_ring_fini(rdev); in si_irq_fini()
6396 static inline u32 si_get_ih_wptr(struct radeon_device *rdev) in si_get_ih_wptr() argument
6400 if (rdev->wb.enabled) in si_get_ih_wptr()
6401 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]); in si_get_ih_wptr()
6411 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n", in si_get_ih_wptr()
6412 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); in si_get_ih_wptr()
6413 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; in si_get_ih_wptr()
6418 return (wptr & rdev->ih.ptr_mask); in si_get_ih_wptr()
6431 int si_irq_process(struct radeon_device *rdev) in si_irq_process() argument
6442 if (!rdev->ih.enabled || rdev->shutdown) in si_irq_process()
6445 wptr = si_get_ih_wptr(rdev); in si_irq_process()
6449 if (atomic_xchg(&rdev->ih.lock, 1)) in si_irq_process()
6452 rptr = rdev->ih.rptr; in si_irq_process()
6459 si_irq_ack(rdev); in si_irq_process()
6464 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; in si_irq_process()
6465 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; in si_irq_process()
6466 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff; in si_irq_process()
6472 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)) in si_irq_process()
6475 if (rdev->irq.crtc_vblank_int[0]) { in si_irq_process()
6476 drm_handle_vblank(rdev->ddev, 0); in si_irq_process()
6477 rdev->pm.vblank_sync = true; in si_irq_process()
6478 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6480 if (atomic_read(&rdev->irq.pflip[0])) in si_irq_process()
6481 radeon_crtc_handle_vblank(rdev, 0); in si_irq_process()
6482 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT; in si_irq_process()
6487 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)) in si_irq_process()
6490 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT; in si_irq_process()
6502 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)) in si_irq_process()
6505 if (rdev->irq.crtc_vblank_int[1]) { in si_irq_process()
6506 drm_handle_vblank(rdev->ddev, 1); in si_irq_process()
6507 rdev->pm.vblank_sync = true; in si_irq_process()
6508 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6510 if (atomic_read(&rdev->irq.pflip[1])) in si_irq_process()
6511 radeon_crtc_handle_vblank(rdev, 1); in si_irq_process()
6512 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT; in si_irq_process()
6517 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)) in si_irq_process()
6520 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT; in si_irq_process()
6532 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)) in si_irq_process()
6535 if (rdev->irq.crtc_vblank_int[2]) { in si_irq_process()
6536 drm_handle_vblank(rdev->ddev, 2); in si_irq_process()
6537 rdev->pm.vblank_sync = true; in si_irq_process()
6538 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6540 if (atomic_read(&rdev->irq.pflip[2])) in si_irq_process()
6541 radeon_crtc_handle_vblank(rdev, 2); in si_irq_process()
6542 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT; in si_irq_process()
6547 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)) in si_irq_process()
6550 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT; in si_irq_process()
6562 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)) in si_irq_process()
6565 if (rdev->irq.crtc_vblank_int[3]) { in si_irq_process()
6566 drm_handle_vblank(rdev->ddev, 3); in si_irq_process()
6567 rdev->pm.vblank_sync = true; in si_irq_process()
6568 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6570 if (atomic_read(&rdev->irq.pflip[3])) in si_irq_process()
6571 radeon_crtc_handle_vblank(rdev, 3); in si_irq_process()
6572 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT; in si_irq_process()
6577 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)) in si_irq_process()
6580 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT; in si_irq_process()
6592 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)) in si_irq_process()
6595 if (rdev->irq.crtc_vblank_int[4]) { in si_irq_process()
6596 drm_handle_vblank(rdev->ddev, 4); in si_irq_process()
6597 rdev->pm.vblank_sync = true; in si_irq_process()
6598 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6600 if (atomic_read(&rdev->irq.pflip[4])) in si_irq_process()
6601 radeon_crtc_handle_vblank(rdev, 4); in si_irq_process()
6602 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT; in si_irq_process()
6607 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)) in si_irq_process()
6610 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT; in si_irq_process()
6622 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)) in si_irq_process()
6625 if (rdev->irq.crtc_vblank_int[5]) { in si_irq_process()
6626 drm_handle_vblank(rdev->ddev, 5); in si_irq_process()
6627 rdev->pm.vblank_sync = true; in si_irq_process()
6628 wake_up(&rdev->irq.vblank_queue); in si_irq_process()
6630 if (atomic_read(&rdev->irq.pflip[5])) in si_irq_process()
6631 radeon_crtc_handle_vblank(rdev, 5); in si_irq_process()
6632 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT; in si_irq_process()
6637 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)) in si_irq_process()
6640 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT; in si_irq_process()
6657 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1); in si_irq_process()
6662 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT)) in si_irq_process()
6665 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT; in si_irq_process()
6671 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT)) in si_irq_process()
6674 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT; in si_irq_process()
6680 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT)) in si_irq_process()
6683 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT; in si_irq_process()
6689 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT)) in si_irq_process()
6692 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT; in si_irq_process()
6698 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT)) in si_irq_process()
6701 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT; in si_irq_process()
6707 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT)) in si_irq_process()
6710 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT; in si_irq_process()
6716 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT)) in si_irq_process()
6719 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT; in si_irq_process()
6725 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT)) in si_irq_process()
6728 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT; in si_irq_process()
6734 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT)) in si_irq_process()
6737 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT; in si_irq_process()
6743 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT)) in si_irq_process()
6746 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT; in si_irq_process()
6752 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT)) in si_irq_process()
6755 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT; in si_irq_process()
6761 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT)) in si_irq_process()
6764 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT; in si_irq_process()
6780 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX); in si_irq_process()
6790 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data); in si_irq_process()
6791 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in si_irq_process()
6793 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in si_irq_process()
6795 si_vm_decode_fault(rdev, status, addr); in si_irq_process()
6798 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); in si_irq_process()
6801 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in si_irq_process()
6804 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in si_irq_process()
6810 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX); in si_irq_process()
6813 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in si_irq_process()
6816 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in si_irq_process()
6822 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX); in si_irq_process()
6826 rdev->pm.dpm.thermal.high_to_low = false; in si_irq_process()
6831 rdev->pm.dpm.thermal.high_to_low = true; in si_irq_process()
6839 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); in si_irq_process()
6848 rptr &= rdev->ih.ptr_mask; in si_irq_process()
6852 schedule_work(&rdev->dp_work); in si_irq_process()
6854 schedule_delayed_work(&rdev->hotplug_work, 0); in si_irq_process()
6855 if (queue_thermal && rdev->pm.dpm_enabled) in si_irq_process()
6856 schedule_work(&rdev->pm.dpm.thermal.work); in si_irq_process()
6857 rdev->ih.rptr = rptr; in si_irq_process()
6858 atomic_set(&rdev->ih.lock, 0); in si_irq_process()
6861 wptr = si_get_ih_wptr(rdev); in si_irq_process()
6871 static int si_startup(struct radeon_device *rdev) in si_startup() argument
6877 si_pcie_gen3_enable(rdev); in si_startup()
6879 si_program_aspm(rdev); in si_startup()
6882 r = r600_vram_scratch_init(rdev); in si_startup()
6886 si_mc_program(rdev); in si_startup()
6888 if (!rdev->pm.dpm_enabled) { in si_startup()
6889 r = si_mc_load_microcode(rdev); in si_startup()
6896 r = si_pcie_gart_enable(rdev); in si_startup()
6899 si_gpu_init(rdev); in si_startup()
6902 if (rdev->family == CHIP_VERDE) { in si_startup()
6903 rdev->rlc.reg_list = verde_rlc_save_restore_register_list; in si_startup()
6904 rdev->rlc.reg_list_size = in si_startup()
6907 rdev->rlc.cs_data = si_cs_data; in si_startup()
6908 r = sumo_rlc_init(rdev); in si_startup()
6915 r = radeon_wb_init(rdev); in si_startup()
6919 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); in si_startup()
6921 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in si_startup()
6925 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in si_startup()
6927 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in si_startup()
6931 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in si_startup()
6933 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in si_startup()
6937 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX); in si_startup()
6939 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in si_startup()
6943 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); in si_startup()
6945 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in si_startup()
6949 if (rdev->has_uvd) { in si_startup()
6950 r = uvd_v2_2_resume(rdev); in si_startup()
6952 r = radeon_fence_driver_start_ring(rdev, in si_startup()
6955 dev_err(rdev->dev, "UVD fences init error (%d).\n", r); in si_startup()
6958 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in si_startup()
6961 r = radeon_vce_resume(rdev); in si_startup()
6963 r = vce_v1_0_resume(rdev); in si_startup()
6965 r = radeon_fence_driver_start_ring(rdev, in si_startup()
6968 r = radeon_fence_driver_start_ring(rdev, in si_startup()
6972 dev_err(rdev->dev, "VCE init error (%d).\n", r); in si_startup()
6973 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; in si_startup()
6974 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; in si_startup()
6978 if (!rdev->irq.installed) { in si_startup()
6979 r = radeon_irq_kms_init(rdev); in si_startup()
6984 r = si_irq_init(rdev); in si_startup()
6987 radeon_irq_kms_fini(rdev); in si_startup()
6990 si_irq_set(rdev); in si_startup()
6992 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_startup()
6993 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in si_startup()
6998 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_startup()
6999 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET, in si_startup()
7004 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_startup()
7005 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET, in si_startup()
7010 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in si_startup()
7011 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, in si_startup()
7016 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in si_startup()
7017 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, in si_startup()
7022 r = si_cp_load_microcode(rdev); in si_startup()
7025 r = si_cp_resume(rdev); in si_startup()
7029 r = cayman_dma_resume(rdev); in si_startup()
7033 if (rdev->has_uvd) { in si_startup()
7034 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in si_startup()
7036 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, in si_startup()
7039 r = uvd_v1_0_init(rdev); in si_startup()
7047 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; in si_startup()
7049 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, in si_startup()
7052 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; in si_startup()
7054 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, in si_startup()
7058 r = vce_v1_0_init(rdev); in si_startup()
7062 r = radeon_ib_pool_init(rdev); in si_startup()
7064 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); in si_startup()
7068 r = radeon_vm_manager_init(rdev); in si_startup()
7070 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r); in si_startup()
7074 r = radeon_audio_init(rdev); in si_startup()
7081 int si_resume(struct radeon_device *rdev) in si_resume() argument
7090 atom_asic_init(rdev->mode_info.atom_context); in si_resume()
7093 si_init_golden_registers(rdev); in si_resume()
7095 if (rdev->pm.pm_method == PM_METHOD_DPM) in si_resume()
7096 radeon_pm_resume(rdev); in si_resume()
7098 rdev->accel_working = true; in si_resume()
7099 r = si_startup(rdev); in si_resume()
7102 rdev->accel_working = false; in si_resume()
7110 int si_suspend(struct radeon_device *rdev) in si_suspend() argument
7112 radeon_pm_suspend(rdev); in si_suspend()
7113 radeon_audio_fini(rdev); in si_suspend()
7114 radeon_vm_manager_fini(rdev); in si_suspend()
7115 si_cp_enable(rdev, false); in si_suspend()
7116 cayman_dma_stop(rdev); in si_suspend()
7117 if (rdev->has_uvd) { in si_suspend()
7118 uvd_v1_0_fini(rdev); in si_suspend()
7119 radeon_uvd_suspend(rdev); in si_suspend()
7120 radeon_vce_suspend(rdev); in si_suspend()
7122 si_fini_pg(rdev); in si_suspend()
7123 si_fini_cg(rdev); in si_suspend()
7124 si_irq_suspend(rdev); in si_suspend()
7125 radeon_wb_disable(rdev); in si_suspend()
7126 si_pcie_gart_disable(rdev); in si_suspend()
7136 int si_init(struct radeon_device *rdev) in si_init() argument
7138 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_init()
7142 if (!radeon_get_bios(rdev)) { in si_init()
7143 if (ASIC_IS_AVIVO(rdev)) in si_init()
7147 if (!rdev->is_atom_bios) { in si_init()
7148 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n"); in si_init()
7151 r = radeon_atombios_init(rdev); in si_init()
7156 if (!radeon_card_posted(rdev)) { in si_init()
7157 if (!rdev->bios) { in si_init()
7158 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); in si_init()
7162 atom_asic_init(rdev->mode_info.atom_context); in si_init()
7165 si_init_golden_registers(rdev); in si_init()
7167 si_scratch_init(rdev); in si_init()
7169 radeon_surface_init(rdev); in si_init()
7171 radeon_get_clock_info(rdev->ddev); in si_init()
7174 r = radeon_fence_driver_init(rdev); in si_init()
7179 r = si_mc_init(rdev); in si_init()
7183 r = radeon_bo_init(rdev); in si_init()
7187 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw || in si_init()
7188 !rdev->rlc_fw || !rdev->mc_fw) { in si_init()
7189 r = si_init_microcode(rdev); in si_init()
7197 radeon_pm_init(rdev); in si_init()
7199 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in si_init()
7201 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
7203 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]; in si_init()
7205 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
7207 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]; in si_init()
7209 r600_ring_init(rdev, ring, 1024 * 1024); in si_init()
7211 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in si_init()
7213 r600_ring_init(rdev, ring, 64 * 1024); in si_init()
7215 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in si_init()
7217 r600_ring_init(rdev, ring, 64 * 1024); in si_init()
7219 if (rdev->has_uvd) { in si_init()
7220 r = radeon_uvd_init(rdev); in si_init()
7222 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in si_init()
7224 r600_ring_init(rdev, ring, 4096); in si_init()
7228 r = radeon_vce_init(rdev); in si_init()
7230 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; in si_init()
7232 r600_ring_init(rdev, ring, 4096); in si_init()
7234 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; in si_init()
7236 r600_ring_init(rdev, ring, 4096); in si_init()
7239 rdev->ih.ring_obj = NULL; in si_init()
7240 r600_ih_ring_init(rdev, 64 * 1024); in si_init()
7242 r = r600_pcie_gart_init(rdev); in si_init()
7246 rdev->accel_working = true; in si_init()
7247 r = si_startup(rdev); in si_init()
7249 dev_err(rdev->dev, "disabling GPU acceleration\n"); in si_init()
7250 si_cp_fini(rdev); in si_init()
7251 cayman_dma_fini(rdev); in si_init()
7252 si_irq_fini(rdev); in si_init()
7253 sumo_rlc_fini(rdev); in si_init()
7254 radeon_wb_fini(rdev); in si_init()
7255 radeon_ib_pool_fini(rdev); in si_init()
7256 radeon_vm_manager_fini(rdev); in si_init()
7257 radeon_irq_kms_fini(rdev); in si_init()
7258 si_pcie_gart_fini(rdev); in si_init()
7259 rdev->accel_working = false; in si_init()
7266 if (!rdev->mc_fw) { in si_init()
7274 void si_fini(struct radeon_device *rdev) in si_fini() argument
7276 radeon_pm_fini(rdev); in si_fini()
7277 si_cp_fini(rdev); in si_fini()
7278 cayman_dma_fini(rdev); in si_fini()
7279 si_fini_pg(rdev); in si_fini()
7280 si_fini_cg(rdev); in si_fini()
7281 si_irq_fini(rdev); in si_fini()
7282 sumo_rlc_fini(rdev); in si_fini()
7283 radeon_wb_fini(rdev); in si_fini()
7284 radeon_vm_manager_fini(rdev); in si_fini()
7285 radeon_ib_pool_fini(rdev); in si_fini()
7286 radeon_irq_kms_fini(rdev); in si_fini()
7287 if (rdev->has_uvd) { in si_fini()
7288 uvd_v1_0_fini(rdev); in si_fini()
7289 radeon_uvd_fini(rdev); in si_fini()
7290 radeon_vce_fini(rdev); in si_fini()
7292 si_pcie_gart_fini(rdev); in si_fini()
7293 r600_vram_scratch_fini(rdev); in si_fini()
7294 radeon_gem_fini(rdev); in si_fini()
7295 radeon_fence_driver_fini(rdev); in si_fini()
7296 radeon_bo_fini(rdev); in si_fini()
7297 radeon_atombios_fini(rdev); in si_fini()
7298 kfree(rdev->bios); in si_fini()
7299 rdev->bios = NULL; in si_fini()
7310 uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev) in si_get_gpu_clock_counter() argument
7314 mutex_lock(&rdev->gpu_clock_mutex); in si_get_gpu_clock_counter()
7318 mutex_unlock(&rdev->gpu_clock_mutex); in si_get_gpu_clock_counter()
7322 int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) in si_set_uvd_clocks() argument
7340 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000, in si_set_uvd_clocks()
7360 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in si_set_uvd_clocks()
7397 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in si_set_uvd_clocks()
7411 static void si_pcie_gen3_enable(struct radeon_device *rdev) in si_pcie_gen3_enable() argument
7413 struct pci_dev *root = rdev->pdev->bus->self; in si_pcie_gen3_enable()
7419 if (pci_is_root_bus(rdev->pdev->bus)) in si_pcie_gen3_enable()
7425 if (rdev->flags & RADEON_IS_IGP) in si_pcie_gen3_enable()
7428 if (!(rdev->flags & RADEON_IS_PCIE)) in si_pcie_gen3_enable()
7431 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask); in si_pcie_gen3_enable()
7459 gpu_pos = pci_pcie_cap(rdev->pdev); in si_pcie_gen3_enable()
7471 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg); in si_pcie_gen3_enable()
7477 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16); in si_pcie_gen3_enable()
7495 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_DEVSTA, &tmp16); in si_pcie_gen3_enable()
7500 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg); in si_pcie_gen3_enable()
7503 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &gpu_cfg2); in si_pcie_gen3_enable()
7521 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &tmp16); in si_pcie_gen3_enable()
7524 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16); in si_pcie_gen3_enable()
7532 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16); in si_pcie_gen3_enable()
7535 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16); in si_pcie_gen3_enable()
7549 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16); in si_pcie_gen3_enable()
7557 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16); in si_pcie_gen3_enable()
7563 for (i = 0; i < rdev->usec_timeout; i++) { in si_pcie_gen3_enable()
7571 static void si_program_aspm(struct radeon_device *rdev) in si_program_aspm() argument
7580 if (!(rdev->flags & RADEON_IS_PCIE)) in si_program_aspm()
7638 if ((rdev->family != CHIP_OLAND) && (rdev->family != CHIP_HAINAN)) { in si_program_aspm()
7687 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN)) in si_program_aspm()
7694 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN)) in si_program_aspm()
7700 !pci_is_root_bus(rdev->pdev->bus)) { in si_program_aspm()
7701 struct pci_dev *root = rdev->pdev->bus->self; in si_program_aspm()
7776 int si_vce_send_vcepll_ctlreq(struct radeon_device *rdev) in si_vce_send_vcepll_ctlreq() argument
7807 int si_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk) in si_set_vce_clocks() argument
7828 r = radeon_uvd_calc_upll_dividers(rdev, evclk, ecclk, 125000, 250000, in si_set_vce_clocks()
7851 r = si_vce_send_vcepll_ctlreq(rdev); in si_set_vce_clocks()
7883 r = si_vce_send_vcepll_ctlreq(rdev); in si_set_vce_clocks()