Lines Matching refs:adev

47 static void sdma_v3_0_set_ring_funcs(struct amdgpu_device *adev);
48 static void sdma_v3_0_set_buffer_funcs(struct amdgpu_device *adev);
49 static void sdma_v3_0_set_vm_pte_funcs(struct amdgpu_device *adev);
50 static void sdma_v3_0_set_irq_funcs(struct amdgpu_device *adev);
156 static void sdma_v3_0_init_golden_registers(struct amdgpu_device *adev) in sdma_v3_0_init_golden_registers() argument
158 switch (adev->asic_type) { in sdma_v3_0_init_golden_registers()
160 amdgpu_program_register_sequence(adev, in sdma_v3_0_init_golden_registers()
163 amdgpu_program_register_sequence(adev, in sdma_v3_0_init_golden_registers()
168 amdgpu_program_register_sequence(adev, in sdma_v3_0_init_golden_registers()
171 amdgpu_program_register_sequence(adev, in sdma_v3_0_init_golden_registers()
176 amdgpu_program_register_sequence(adev, in sdma_v3_0_init_golden_registers()
179 amdgpu_program_register_sequence(adev, in sdma_v3_0_init_golden_registers()
184 amdgpu_program_register_sequence(adev, in sdma_v3_0_init_golden_registers()
187 amdgpu_program_register_sequence(adev, in sdma_v3_0_init_golden_registers()
205 static int sdma_v3_0_init_microcode(struct amdgpu_device *adev) in sdma_v3_0_init_microcode() argument
216 switch (adev->asic_type) { in sdma_v3_0_init_microcode()
232 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
237 err = request_firmware(&adev->sdma.instance[i].fw, fw_name, adev->dev); in sdma_v3_0_init_microcode()
240 err = amdgpu_ucode_validate(adev->sdma.instance[i].fw); in sdma_v3_0_init_microcode()
243 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_init_microcode()
244 adev->sdma.instance[i].fw_version = le32_to_cpu(hdr->header.ucode_version); in sdma_v3_0_init_microcode()
245 adev->sdma.instance[i].feature_version = le32_to_cpu(hdr->ucode_feature_version); in sdma_v3_0_init_microcode()
246 if (adev->sdma.instance[i].feature_version >= 20) in sdma_v3_0_init_microcode()
247 adev->sdma.instance[i].burst_nop = true; in sdma_v3_0_init_microcode()
249 if (adev->firmware.smu_load) { in sdma_v3_0_init_microcode()
250 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_SDMA0 + i]; in sdma_v3_0_init_microcode()
252 info->fw = adev->sdma.instance[i].fw; in sdma_v3_0_init_microcode()
254 adev->firmware.fw_size += in sdma_v3_0_init_microcode()
263 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
264 release_firmware(adev->sdma.instance[i].fw); in sdma_v3_0_init_microcode()
265 adev->sdma.instance[i].fw = NULL; in sdma_v3_0_init_microcode()
283 rptr = ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v3_0_ring_get_rptr()
297 struct amdgpu_device *adev = ring->adev; in sdma_v3_0_ring_get_wptr() local
302 wptr = ring->adev->wb.wb[ring->wptr_offs] >> 2; in sdma_v3_0_ring_get_wptr()
304 int me = (ring == &ring->adev->sdma.instance[0].ring) ? 0 : 1; in sdma_v3_0_ring_get_wptr()
321 struct amdgpu_device *adev = ring->adev; in sdma_v3_0_ring_set_wptr() local
325 adev->wb.wb[ring->wptr_offs] = ring->wptr << 2; in sdma_v3_0_ring_set_wptr()
328 int me = (ring == &ring->adev->sdma.instance[0].ring) ? 0 : 1; in sdma_v3_0_ring_set_wptr()
397 if (ring == &ring->adev->sdma.instance[0].ring) in sdma_v3_0_ring_emit_hdp_flush()
480 static void sdma_v3_0_gfx_stop(struct amdgpu_device *adev) in sdma_v3_0_gfx_stop() argument
482 struct amdgpu_ring *sdma0 = &adev->sdma.instance[0].ring; in sdma_v3_0_gfx_stop()
483 struct amdgpu_ring *sdma1 = &adev->sdma.instance[1].ring; in sdma_v3_0_gfx_stop()
487 if ((adev->mman.buffer_funcs_ring == sdma0) || in sdma_v3_0_gfx_stop()
488 (adev->mman.buffer_funcs_ring == sdma1)) in sdma_v3_0_gfx_stop()
489 amdgpu_ttm_set_active_vram_size(adev, adev->mc.visible_vram_size); in sdma_v3_0_gfx_stop()
491 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_stop()
510 static void sdma_v3_0_rlc_stop(struct amdgpu_device *adev) in sdma_v3_0_rlc_stop() argument
523 static void sdma_v3_0_ctx_switch_enable(struct amdgpu_device *adev, bool enable) in sdma_v3_0_ctx_switch_enable() argument
528 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_ctx_switch_enable()
548 static void sdma_v3_0_enable(struct amdgpu_device *adev, bool enable) in sdma_v3_0_enable() argument
554 sdma_v3_0_gfx_stop(adev); in sdma_v3_0_enable()
555 sdma_v3_0_rlc_stop(adev); in sdma_v3_0_enable()
558 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_enable()
576 static int sdma_v3_0_gfx_resume(struct amdgpu_device *adev) in sdma_v3_0_gfx_resume() argument
585 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
586 ring = &adev->sdma.instance[i].ring; in sdma_v3_0_gfx_resume()
589 mutex_lock(&adev->srbm_mutex); in sdma_v3_0_gfx_resume()
591 vi_srbm_select(adev, 0, 0, 0, j); in sdma_v3_0_gfx_resume()
596 vi_srbm_select(adev, 0, 0, 0, 0); in sdma_v3_0_gfx_resume()
597 mutex_unlock(&adev->srbm_mutex); in sdma_v3_0_gfx_resume()
618 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()
620 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
661 if (adev->mman.buffer_funcs_ring == ring) in sdma_v3_0_gfx_resume()
662 amdgpu_ttm_set_active_vram_size(adev, adev->mc.real_vram_size); in sdma_v3_0_gfx_resume()
676 static int sdma_v3_0_rlc_resume(struct amdgpu_device *adev) in sdma_v3_0_rlc_resume() argument
690 static int sdma_v3_0_load_microcode(struct amdgpu_device *adev) in sdma_v3_0_load_microcode() argument
698 sdma_v3_0_enable(adev, false); in sdma_v3_0_load_microcode()
700 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_load_microcode()
701 if (!adev->sdma.instance[i].fw) in sdma_v3_0_load_microcode()
703 hdr = (const struct sdma_firmware_header_v1_0 *)adev->sdma.instance[i].fw->data; in sdma_v3_0_load_microcode()
707 (adev->sdma.instance[i].fw->data + in sdma_v3_0_load_microcode()
712 WREG32(mmSDMA0_UCODE_ADDR + sdma_offsets[i], adev->sdma.instance[i].fw_version); in sdma_v3_0_load_microcode()
726 static int sdma_v3_0_start(struct amdgpu_device *adev) in sdma_v3_0_start() argument
730 if (!adev->firmware.smu_load) { in sdma_v3_0_start()
731 r = sdma_v3_0_load_microcode(adev); in sdma_v3_0_start()
735 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_start()
736 r = adev->smu.smumgr_funcs->check_fw_load_finish(adev, in sdma_v3_0_start()
746 sdma_v3_0_enable(adev, true); in sdma_v3_0_start()
748 sdma_v3_0_ctx_switch_enable(adev, true); in sdma_v3_0_start()
751 r = sdma_v3_0_gfx_resume(adev); in sdma_v3_0_start()
754 r = sdma_v3_0_rlc_resume(adev); in sdma_v3_0_start()
772 struct amdgpu_device *adev = ring->adev; in sdma_v3_0_ring_test_ring() local
779 r = amdgpu_wb_get(adev, &index); in sdma_v3_0_ring_test_ring()
781 dev_err(adev->dev, "(%d) failed to allocate wb slot\n", r); in sdma_v3_0_ring_test_ring()
785 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring()
787 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ring()
792 amdgpu_wb_free(adev, index); in sdma_v3_0_ring_test_ring()
804 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_ring_test_ring()
805 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v3_0_ring_test_ring()
811 if (i < adev->usec_timeout) { in sdma_v3_0_ring_test_ring()
818 amdgpu_wb_free(adev, index); in sdma_v3_0_ring_test_ring()
833 struct amdgpu_device *adev = ring->adev; in sdma_v3_0_ring_test_ib() local
842 r = amdgpu_wb_get(adev, &index); in sdma_v3_0_ring_test_ib()
844 dev_err(adev->dev, "(%d) failed to allocate wb slot\n", r); in sdma_v3_0_ring_test_ib()
848 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ib()
850 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ib()
869 r = amdgpu_sched_ib_submit_kernel_helper(adev, ring, &ib, 1, NULL, in sdma_v3_0_ring_test_ib()
880 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_ring_test_ib()
881 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v3_0_ring_test_ib()
886 if (i < adev->usec_timeout) { in sdma_v3_0_ring_test_ib()
896 amdgpu_ib_free(adev, &ib); in sdma_v3_0_ring_test_ib()
898 amdgpu_wb_free(adev, index); in sdma_v3_0_ring_test_ib()
969 value = amdgpu_vm_map_gart(ib->ring->adev, addr); in sdma_v3_0_vm_write_pte()
1096 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_early_init() local
1098 switch (adev->asic_type) { in sdma_v3_0_early_init()
1100 adev->sdma.num_instances = 1; in sdma_v3_0_early_init()
1103 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in sdma_v3_0_early_init()
1107 sdma_v3_0_set_ring_funcs(adev); in sdma_v3_0_early_init()
1108 sdma_v3_0_set_buffer_funcs(adev); in sdma_v3_0_early_init()
1109 sdma_v3_0_set_vm_pte_funcs(adev); in sdma_v3_0_early_init()
1110 sdma_v3_0_set_irq_funcs(adev); in sdma_v3_0_early_init()
1119 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_sw_init() local
1122 r = amdgpu_irq_add_id(adev, 224, &adev->sdma.trap_irq); in sdma_v3_0_sw_init()
1127 r = amdgpu_irq_add_id(adev, 241, &adev->sdma.illegal_inst_irq); in sdma_v3_0_sw_init()
1132 r = amdgpu_irq_add_id(adev, 247, &adev->sdma.illegal_inst_irq); in sdma_v3_0_sw_init()
1136 r = sdma_v3_0_init_microcode(adev); in sdma_v3_0_sw_init()
1142 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_sw_init()
1143 ring = &adev->sdma.instance[i].ring; in sdma_v3_0_sw_init()
1150 r = amdgpu_ring_init(adev, ring, 256 * 1024, in sdma_v3_0_sw_init()
1152 &adev->sdma.trap_irq, in sdma_v3_0_sw_init()
1165 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_sw_fini() local
1168 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_sw_fini()
1169 amdgpu_ring_fini(&adev->sdma.instance[i].ring); in sdma_v3_0_sw_fini()
1177 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_hw_init() local
1179 sdma_v3_0_init_golden_registers(adev); in sdma_v3_0_hw_init()
1181 r = sdma_v3_0_start(adev); in sdma_v3_0_hw_init()
1190 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_hw_fini() local
1192 sdma_v3_0_ctx_switch_enable(adev, false); in sdma_v3_0_hw_fini()
1193 sdma_v3_0_enable(adev, false); in sdma_v3_0_hw_fini()
1200 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_suspend() local
1202 return sdma_v3_0_hw_fini(adev); in sdma_v3_0_suspend()
1207 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_resume() local
1209 return sdma_v3_0_hw_init(adev); in sdma_v3_0_resume()
1214 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_is_idle() local
1228 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_wait_for_idle() local
1230 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_wait_for_idle()
1244 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_print_status() local
1246 dev_info(adev->dev, "VI SDMA registers\n"); in sdma_v3_0_print_status()
1247 dev_info(adev->dev, " SRBM_STATUS2=0x%08X\n", in sdma_v3_0_print_status()
1249 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_print_status()
1250 dev_info(adev->dev, " SDMA%d_STATUS_REG=0x%08X\n", in sdma_v3_0_print_status()
1252 dev_info(adev->dev, " SDMA%d_F32_CNTL=0x%08X\n", in sdma_v3_0_print_status()
1254 dev_info(adev->dev, " SDMA%d_CNTL=0x%08X\n", in sdma_v3_0_print_status()
1256 dev_info(adev->dev, " SDMA%d_SEM_WAIT_FAIL_TIMER_CNTL=0x%08X\n", in sdma_v3_0_print_status()
1258 dev_info(adev->dev, " SDMA%d_GFX_IB_CNTL=0x%08X\n", in sdma_v3_0_print_status()
1260 dev_info(adev->dev, " SDMA%d_GFX_RB_CNTL=0x%08X\n", in sdma_v3_0_print_status()
1262 dev_info(adev->dev, " SDMA%d_GFX_RB_RPTR=0x%08X\n", in sdma_v3_0_print_status()
1264 dev_info(adev->dev, " SDMA%d_GFX_RB_WPTR=0x%08X\n", in sdma_v3_0_print_status()
1266 dev_info(adev->dev, " SDMA%d_GFX_RB_RPTR_ADDR_HI=0x%08X\n", in sdma_v3_0_print_status()
1268 dev_info(adev->dev, " SDMA%d_GFX_RB_RPTR_ADDR_LO=0x%08X\n", in sdma_v3_0_print_status()
1270 dev_info(adev->dev, " SDMA%d_GFX_RB_BASE=0x%08X\n", in sdma_v3_0_print_status()
1272 dev_info(adev->dev, " SDMA%d_GFX_RB_BASE_HI=0x%08X\n", in sdma_v3_0_print_status()
1274 dev_info(adev->dev, " SDMA%d_GFX_DOORBELL=0x%08X\n", in sdma_v3_0_print_status()
1276 mutex_lock(&adev->srbm_mutex); in sdma_v3_0_print_status()
1278 vi_srbm_select(adev, 0, 0, 0, j); in sdma_v3_0_print_status()
1279 dev_info(adev->dev, " VM %d:\n", j); in sdma_v3_0_print_status()
1280 dev_info(adev->dev, " SDMA%d_GFX_VIRTUAL_ADDR=0x%08X\n", in sdma_v3_0_print_status()
1282 dev_info(adev->dev, " SDMA%d_GFX_APE1_CNTL=0x%08X\n", in sdma_v3_0_print_status()
1285 vi_srbm_select(adev, 0, 0, 0, 0); in sdma_v3_0_print_status()
1286 mutex_unlock(&adev->srbm_mutex); in sdma_v3_0_print_status()
1293 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in sdma_v3_0_soft_reset() local
1312 sdma_v3_0_print_status((void *)adev); in sdma_v3_0_soft_reset()
1316 dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in sdma_v3_0_soft_reset()
1329 sdma_v3_0_print_status((void *)adev); in sdma_v3_0_soft_reset()
1335 static int sdma_v3_0_set_trap_irq_state(struct amdgpu_device *adev, in sdma_v3_0_set_trap_irq_state() argument
1381 static int sdma_v3_0_process_trap_irq(struct amdgpu_device *adev, in sdma_v3_0_process_trap_irq() argument
1394 amdgpu_fence_process(&adev->sdma.instance[0].ring); in sdma_v3_0_process_trap_irq()
1407 amdgpu_fence_process(&adev->sdma.instance[1].ring); in sdma_v3_0_process_trap_irq()
1421 static int sdma_v3_0_process_illegal_inst_irq(struct amdgpu_device *adev, in sdma_v3_0_process_illegal_inst_irq() argument
1426 schedule_work(&adev->reset_work); in sdma_v3_0_process_illegal_inst_irq()
1474 static void sdma_v3_0_set_ring_funcs(struct amdgpu_device *adev) in sdma_v3_0_set_ring_funcs() argument
1478 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_set_ring_funcs()
1479 adev->sdma.instance[i].ring.funcs = &sdma_v3_0_ring_funcs; in sdma_v3_0_set_ring_funcs()
1491 static void sdma_v3_0_set_irq_funcs(struct amdgpu_device *adev) in sdma_v3_0_set_irq_funcs() argument
1493 adev->sdma.trap_irq.num_types = AMDGPU_SDMA_IRQ_LAST; in sdma_v3_0_set_irq_funcs()
1494 adev->sdma.trap_irq.funcs = &sdma_v3_0_trap_irq_funcs; in sdma_v3_0_set_irq_funcs()
1495 adev->sdma.illegal_inst_irq.funcs = &sdma_v3_0_illegal_inst_irq_funcs; in sdma_v3_0_set_irq_funcs()
1557 static void sdma_v3_0_set_buffer_funcs(struct amdgpu_device *adev) in sdma_v3_0_set_buffer_funcs() argument
1559 if (adev->mman.buffer_funcs == NULL) { in sdma_v3_0_set_buffer_funcs()
1560 adev->mman.buffer_funcs = &sdma_v3_0_buffer_funcs; in sdma_v3_0_set_buffer_funcs()
1561 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v3_0_set_buffer_funcs()
1572 static void sdma_v3_0_set_vm_pte_funcs(struct amdgpu_device *adev) in sdma_v3_0_set_vm_pte_funcs() argument
1574 if (adev->vm_manager.vm_pte_funcs == NULL) { in sdma_v3_0_set_vm_pte_funcs()
1575 adev->vm_manager.vm_pte_funcs = &sdma_v3_0_vm_pte_funcs; in sdma_v3_0_set_vm_pte_funcs()
1576 adev->vm_manager.vm_pte_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v3_0_set_vm_pte_funcs()
1577 adev->vm_manager.vm_pte_funcs_ring->is_pte_ring = true; in sdma_v3_0_set_vm_pte_funcs()