Home
last modified time | relevance | path

Searched refs:usec_timeout (Results 1 – 78 of 78) sorted by relevance

/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
Dfiji_smc.c141 for (i = 0; i < adev->usec_timeout; i++) { in wait_smu_response()
148 if (i == adev->usec_timeout) in wait_smu_response()
247 for (i = 0; i < adev->usec_timeout; i++) {
254 if (i == adev->usec_timeout)
547 for (i = 0; i < adev->usec_timeout; i++) { in fiji_smu_check_fw_load_finish()
553 if (i == adev->usec_timeout) { in fiji_smu_check_fw_load_finish()
597 for (i = 0; i < adev->usec_timeout; i++) { in fiji_smu_start_in_protection_mode()
604 if (i == adev->usec_timeout) { in fiji_smu_start_in_protection_mode()
615 for (i = 0; i < adev->usec_timeout; i++) { in fiji_smu_start_in_protection_mode()
622 if (i == adev->usec_timeout) { in fiji_smu_start_in_protection_mode()
[all …]
Dtonga_smc.c141 for (i = 0; i < adev->usec_timeout; i++) { in wait_smu_response()
148 if (i == adev->usec_timeout) in wait_smu_response()
247 for (i = 0; i < adev->usec_timeout; i++) {
254 if (i == adev->usec_timeout)
549 for (i = 0; i < adev->usec_timeout; i++) { in tonga_smu_check_fw_load_finish()
555 if (i == adev->usec_timeout) { in tonga_smu_check_fw_load_finish()
599 for (i = 0; i < adev->usec_timeout; i++) { in tonga_smu_start_in_protection_mode()
606 if (i == adev->usec_timeout) { in tonga_smu_start_in_protection_mode()
617 for (i = 0; i < adev->usec_timeout; i++) { in tonga_smu_start_in_protection_mode()
624 if (i == adev->usec_timeout) { in tonga_smu_start_in_protection_mode()
[all …]
Diceland_smc.c177 for (i = 0; i < adev->usec_timeout; i++) { in wait_smu_response()
184 if (i == adev->usec_timeout) in wait_smu_response()
253 for (i = 0; i < adev->usec_timeout; i++) {
260 if (i == adev->usec_timeout)
301 for (i = 0; i < adev->usec_timeout; i++) { in iceland_smu_upload_firmware_image()
386 for (i = 0; i < adev->usec_timeout; i++) { in iceland_smu_start_smc()
564 for (i = 0; i < adev->usec_timeout; i++) { in iceland_smu_check_fw_load_finish()
570 if (i == adev->usec_timeout) { in iceland_smu_check_fw_load_finish()
Dcz_smc.c56 for (i = 0; i < adev->usec_timeout; i++) { in cz_send_msg_to_smc_async()
65 if (i == adev->usec_timeout) in cz_send_msg_to_smc_async()
82 for (i = 0; i < adev->usec_timeout; i++) { in cz_send_msg_to_smc()
91 if (i == adev->usec_timeout) in cz_send_msg_to_smc()
205 for (i = 0; i < adev->usec_timeout; i++) { in cz_smu_check_fw_load_finish()
211 if (i >= adev->usec_timeout) { in cz_smu_check_fw_load_finish()
Dci_smc.c179 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_ci_send_msg_to_smc()
198 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_ci_wait_for_smc_inactive()
Duvd_v6_0.c536 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v6_0_ring_test_ring()
543 if (i < adev->usec_timeout) { in uvd_v6_0_ring_test_ring()
620 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v6_0_wait_for_idle()
Duvd_v5_0.c536 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v5_0_ring_test_ring()
543 if (i < adev->usec_timeout) { in uvd_v5_0_ring_test_ring()
628 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v5_0_wait_for_idle()
Dkv_smc.c40 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_kv_notify_message_to_smu()
Dcik_sdma.c600 for (i = 0; i < adev->usec_timeout; i++) { in cik_sdma_ring_test_ring()
607 if (i < adev->usec_timeout) { in cik_sdma_ring_test_ring()
671 for (i = 0; i < adev->usec_timeout; i++) { in cik_sdma_ring_test_ib()
677 if (i < adev->usec_timeout) { in cik_sdma_ring_test_ib()
1054 for (i = 0; i < adev->usec_timeout; i++) { in cik_sdma_wait_for_idle()
Dsdma_v2_4.c654 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v2_4_ring_test_ring()
661 if (i < adev->usec_timeout) { in sdma_v2_4_ring_test_ring()
730 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v2_4_ring_test_ib()
736 if (i < adev->usec_timeout) { in sdma_v2_4_ring_test_ib()
1070 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v2_4_wait_for_idle()
Duvd_v4_2.c492 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v4_2_ring_test_ring()
499 if (i < adev->usec_timeout) { in uvd_v4_2_ring_test_ring()
685 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v4_2_wait_for_idle()
Dsdma_v3_0.c804 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_ring_test_ring()
811 if (i < adev->usec_timeout) { in sdma_v3_0_ring_test_ring()
880 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_ring_test_ib()
886 if (i < adev->usec_timeout) { in sdma_v3_0_ring_test_ib()
1230 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_wait_for_idle()
Dgmc_v8_0.c135 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v8_0_mc_wait_for_idle()
293 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v8_0_mc_load_microcode()
299 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v8_0_mc_load_microcode()
1063 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v8_0_wait_for_idle()
Dgmc_v7_0.c90 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v7_0_mc_wait_for_idle()
252 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v7_0_mc_load_microcode()
258 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v7_0_mc_load_microcode()
1102 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v7_0_wait_for_idle()
Damdgpu_vce.c827 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_vce_ring_test_ring()
833 if (i < adev->usec_timeout) { in amdgpu_vce_ring_test_ring()
Dcik_ih.c359 for (i = 0; i < adev->usec_timeout; i++) { in cik_ih_wait_for_idle()
Dgfx_v7_0.c2393 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v7_0_ring_test_ring()
2399 if (i < adev->usec_timeout) { in gfx_v7_0_ring_test_ring()
2685 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v7_0_ring_test_ib()
2691 if (i < adev->usec_timeout) { in gfx_v7_0_ring_test_ib()
3439 for (j = 0; j < adev->usec_timeout; j++) { in gfx_v7_0_cp_compute_resume()
3924 for (k = 0; k < adev->usec_timeout; k++) { in gfx_v7_0_wait_for_rlc_serdes()
3938 for (k = 0; k < adev->usec_timeout; k++) { in gfx_v7_0_wait_for_rlc_serdes()
3966 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v7_0_halt_rlc()
3987 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v7_0_enter_rlc_safe_mode()
3993 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v7_0_enter_rlc_safe_mode()
[all …]
Dcz_ih.c337 for (i = 0; i < adev->usec_timeout; i++) { in cz_ih_wait_for_idle()
Diceland_ih.c337 for (i = 0; i < adev->usec_timeout; i++) { in iceland_ih_wait_for_idle()
Dtonga_ih.c360 for (i = 0; i < adev->usec_timeout; i++) { in tonga_ih_wait_for_idle()
Dgfx_v8_0.c646 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v8_0_ring_test_ring()
652 if (i < adev->usec_timeout) { in gfx_v8_0_ring_test_ring()
702 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v8_0_ring_test_ib()
708 if (i < adev->usec_timeout) { in gfx_v8_0_ring_test_ib()
2974 for (k = 0; k < adev->usec_timeout; k++) { in gfx_v8_0_wait_for_rlc_serdes()
2988 for (k = 0; k < adev->usec_timeout; k++) { in gfx_v8_0_wait_for_rlc_serdes()
3843 for (j = 0; j < adev->usec_timeout; j++) { in gfx_v8_0_cp_compute_resume()
4096 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v8_0_wait_for_idle()
Dvce_v2_0.c463 for (i = 0; i < adev->usec_timeout; i++) { in vce_v2_0_wait_for_idle()
Dvce_v3_0.c472 for (i = 0; i < adev->usec_timeout; i++) { in vce_v3_0_wait_for_idle()
Dci_dpm.c1902 for (i = 0; i < adev->usec_timeout; i++) { in ci_dpm_start_smc()
2042 for (i = 0; i < adev->usec_timeout; i++) {
2199 for (i = 0; i < adev->usec_timeout; i++) { in ci_upload_firmware()
4335 for (i = 0; i < adev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4355 for (i = 0; i < adev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4375 for (i = 0; i < adev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4395 for (i = 0; i < adev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4413 for (i = 0; i < adev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4429 for (i = 0; i < adev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4445 for (i = 0; i < adev->usec_timeout; i++) { in ci_dpm_force_performance_level()
Dcik.c1425 for (i = 0; i < adev->usec_timeout; i++) { in cik_gpu_pci_config_reset()
1718 for (i = 0; i < adev->usec_timeout; i++) { in cik_pcie_gen3_enable()
Ddce_v8_0.c541 for (j = 0; j < adev->usec_timeout; j++) { in dce_v8_0_stop_mc_access()
605 for (j = 0; j < adev->usec_timeout; j++) { in dce_v8_0_resume_mc_access()
618 for (j = 0; j < adev->usec_timeout; j++) { in dce_v8_0_resume_mc_access()
784 for (i = 0; i < adev->usec_timeout; i++) { in dce_v8_0_line_buffer_adjust()
Ddce_v11_0.c581 for (j = 0; j < adev->usec_timeout; j++) { in dce_v11_0_stop_mc_access()
645 for (j = 0; j < adev->usec_timeout; j++) { in dce_v11_0_resume_mc_access()
658 for (j = 0; j < adev->usec_timeout; j++) { in dce_v11_0_resume_mc_access()
829 for (i = 0; i < adev->usec_timeout; i++) { in dce_v11_0_line_buffer_adjust()
Ddce_v10_0.c593 for (j = 0; j < adev->usec_timeout; j++) { in dce_v10_0_stop_mc_access()
657 for (j = 0; j < adev->usec_timeout; j++) { in dce_v10_0_resume_mc_access()
670 for (j = 0; j < adev->usec_timeout; j++) { in dce_v10_0_resume_mc_access()
841 for (i = 0; i < adev->usec_timeout; i++) { in dce_v10_0_line_buffer_adjust()
Dvi.c916 for (i = 0; i < adev->usec_timeout; i++) { in vi_gpu_pci_config_reset()
Damdgpu_device.c1388 adev->usec_timeout = AMDGPU_MAX_USEC_TIMEOUT; in amdgpu_device_init()
Damdgpu.h1965 int usec_timeout; member
/linux-4.4.14/drivers/gpu/drm/radeon/
Dsumo_smc.c41 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu()
50 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu()
56 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu()
62 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu()
Dtrinity_smc.c36 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_notify_message_to_smu()
117 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_acquire_mutex()
Dr600_dma.c260 for (i = 0; i < rdev->usec_timeout; i++) { in r600_dma_ring_test()
267 if (i < rdev->usec_timeout) { in r600_dma_ring_test()
376 for (i = 0; i < rdev->usec_timeout; i++) { in r600_dma_ib_test()
382 if (i < rdev->usec_timeout) { in r600_dma_ib_test()
Dci_smc.c176 for (i = 0; i < rdev->usec_timeout; i++) { in ci_send_msg_to_smc()
196 for (i = 0; i < rdev->usec_timeout; i++) {
Dsi_smc.c182 for (i = 0; i < rdev->usec_timeout; i++) { in si_send_msg_to_smc()
201 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_smc_inactive()
Dradeon_cp.c336 for (i = 0; i < dev_priv->usec_timeout; i++) { in radeon_do_pixcache_flush()
361 for (i = 0; i < dev_priv->usec_timeout; i++) { in radeon_do_wait_for_fifo()
389 for (i = 0; i < dev_priv->usec_timeout; i++) { in radeon_do_wait_for_idle()
879 for (tmp = 0; tmp < dev_priv->usec_timeout; tmp++) { in radeon_test_writeback()
888 if (tmp < dev_priv->usec_timeout) { in radeon_test_writeback()
1202 dev_priv->usec_timeout = init->usec_timeout; in radeon_do_init_cp()
1203 if (dev_priv->usec_timeout < 1 || in radeon_do_init_cp()
1204 dev_priv->usec_timeout > RADEON_MAX_USEC_TIMEOUT) { in radeon_do_init_cp()
1933 for (t = 0; t < dev_priv->usec_timeout; t++) { in radeon_freelist_get()
1983 for (i = 0; i < dev_priv->usec_timeout; i++) { in radeon_wait_ring()
Dradeon_ioc32.c43 int usec_timeout; member
76 || __put_user(init32.usec_timeout, &init->usec_timeout) in compat_radeon_cp_init()
Drv770_smc.c435 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_send_msg_to_smc()
458 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_wait_for_smc_inactive()
Duvd_v1_0.c436 for (i = 0; i < rdev->usec_timeout; i++) { in uvd_v1_0_ring_test()
443 if (i < rdev->usec_timeout) { in uvd_v1_0_ring_test()
Dcik_sdma.c676 for (i = 0; i < rdev->usec_timeout; i++) { in cik_sdma_ring_test()
683 if (i < rdev->usec_timeout) { in cik_sdma_ring_test()
745 for (i = 0; i < rdev->usec_timeout; i++) { in cik_sdma_ib_test()
751 if (i < rdev->usec_timeout) { in cik_sdma_ib_test()
Dkv_smc.c37 for (i = 0; i < rdev->usec_timeout; i++) { in kv_notify_message_to_smu()
Drs400.c63 unsigned int timeout = rdev->usec_timeout; in rs400_gart_tlb_flush()
242 for (i = 0; i < rdev->usec_timeout; i++) { in rs400_mc_wait_for_idle()
Dr600_dpm.c250 for (i = 0; i < rdev->usec_timeout; i++) { in r600_gfx_clockgating_enable()
328 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_spll_change()
651 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_power_level_unequal()
657 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_power_level_unequal()
669 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_power_level()
675 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_power_level()
Dr600_cp.c108 for (i = 0; i < dev_priv->usec_timeout; i++) { in r600_do_wait_for_fifo()
139 for (i = 0; i < dev_priv->usec_timeout; i++) { in r600_do_wait_for_idle()
539 for (tmp = 0; tmp < dev_priv->usec_timeout; tmp++) { in r600_test_writeback()
548 if (tmp < dev_priv->usec_timeout) { in r600_test_writeback()
2011 dev_priv->usec_timeout = init->usec_timeout; in r600_do_init_cp()
2012 if (dev_priv->usec_timeout < 1 || in r600_do_init_cp()
2013 dev_priv->usec_timeout > RADEON_MAX_USEC_TIMEOUT) { in r600_do_init_cp()
Dr520.c41 for (i = 0; i < rdev->usec_timeout; i++) { in r520_mc_wait_for_idle()
Dradeon_vce.c771 for (i = 0; i < rdev->usec_timeout; i++) { in radeon_vce_ring_test()
777 if (i < rdev->usec_timeout) { in radeon_vce_ring_test()
Drv515.c135 for (i = 0; i < rdev->usec_timeout; i++) { in rv515_mc_wait_for_idle()
321 for (j = 0; j < rdev->usec_timeout; j++) { in rv515_mc_stop()
424 for (j = 0; j < rdev->usec_timeout; j++) { in rv515_mc_resume()
455 for (j = 0; j < rdev->usec_timeout; j++) { in rv515_mc_resume()
Dr100.c167 for (i = 0; i < rdev->usec_timeout; i++) { in r100_page_flip()
964 for (i = 0; i < rdev->usec_timeout; i++) { in r100_cp_wait_for_idle()
2475 for (i = 0; i < rdev->usec_timeout; i++) { in r100_rbbm_fifo_wait_for_entry()
2494 for (i = 0; i < rdev->usec_timeout; i++) { in r100_gui_wait_for_idle()
2509 for (i = 0; i < rdev->usec_timeout; i++) { in r100_mc_wait_for_idle()
3668 for (i = 0; i < rdev->usec_timeout; i++) { in r100_ring_test()
3675 if (i < rdev->usec_timeout) { in r100_ring_test()
3739 for (i = 0; i < rdev->usec_timeout; i++) { in r100_ib_test()
3746 if (i < rdev->usec_timeout) { in r100_ib_test()
Dcik.c1955 for (i = 0; i < rdev->usec_timeout; i++) { in ci_mc_load_microcode()
1960 for (i = 0; i < rdev->usec_timeout; i++) { in ci_mc_load_microcode()
3891 for (i = 0; i < rdev->usec_timeout; i++) { in cik_ring_test()
3897 if (i < rdev->usec_timeout) { in cik_ring_test()
4229 for (i = 0; i < rdev->usec_timeout; i++) { in cik_ib_test()
4235 if (i < rdev->usec_timeout) { in cik_ib_test()
4616 for (j = 0; j < rdev->usec_timeout; j++) { in cik_compute_stop()
5043 for (j = 0; j < rdev->usec_timeout; j++) { in cik_cp_compute_resume()
5601 for (i = 0; i < rdev->usec_timeout; i++) { in cik_gpu_pci_config_reset()
6223 for (k = 0; k < rdev->usec_timeout; k++) { in cik_wait_for_rlc_serdes()
[all …]
Dtrinity_dpm.c771 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_wait_for_dpm_enabled()
776 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_wait_for_dpm_enabled()
781 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_wait_for_dpm_enabled()
816 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_wait_for_level_0()
Drs600.c130 for (i = 0; i < rdev->usec_timeout; i++) { in rs600_page_flip()
848 for (i = 0; i < rdev->usec_timeout; i++) { in rs600_mc_wait_for_idle()
Dr600.c1090 for (i = 0; i < rdev->usec_timeout; i++) { in r600_pcie_gart_tlb_flush()
1260 for (i = 0; i < rdev->usec_timeout; i++) { in r600_mc_wait_for_idle()
1867 for (i = 0; i < rdev->usec_timeout; i++) { in r600_gpu_pci_config_reset()
2843 for (i = 0; i < rdev->usec_timeout; i++) { in r600_ring_test()
2849 if (i < rdev->usec_timeout) { in r600_ring_test()
3389 for (i = 0; i < rdev->usec_timeout; i++) { in r600_ib_test()
3395 if (i < rdev->usec_timeout) { in r600_ib_test()
Dsi.c1629 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1634 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1949 for (i = 0; i < rdev->usec_timeout; i++) { in dce6_line_buffer_adjust()
3996 for (i = 0; i < rdev->usec_timeout; i++) { in si_set_clk_bypass_mode()
4077 for (i = 0; i < rdev->usec_timeout; i++) { in si_gpu_pci_config_reset()
5119 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5125 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5150 for (i = 0; i < rdev->usec_timeout; i++) { in si_enable_gui_idle_interrupt()
7563 for (i = 0; i < rdev->usec_timeout; i++) { in si_pcie_gen3_enable()
Dsumo_dpm.c631 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_set_forced_mode_enabled()
642 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_wait_for_level_0()
647 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_wait_for_level_0()
Dci_dpm.c1774 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_start_smc()
1914 for (i = 0; i < rdev->usec_timeout; i++) {
2068 for (i = 0; i < rdev->usec_timeout; i++) { in ci_upload_firmware()
4191 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4210 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4229 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4246 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4261 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4276 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
Devergreen.c1966 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_line_buffer_adjust()
2458 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_mc_wait_for_idle()
2479 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_pcie_gart_tlb_flush()
2791 for (j = 0; j < rdev->usec_timeout; j++) { in evergreen_mc_stop()
2891 for (j = 0; j < rdev->usec_timeout; j++) { in evergreen_mc_resume()
2924 for (j = 0; j < rdev->usec_timeout; j++) { in evergreen_mc_resume()
4132 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_gpu_pci_config_reset()
Dcypress_dpm.c1108 for (j = 0; j < rdev->usec_timeout; j++) { in cypress_wait_for_mc_sequencer()
1152 for (i = 0; i < rdev->usec_timeout; i++) { in cypress_force_mc_use_s1()
1210 for (i = 0; i < rdev->usec_timeout; i++) { in cypress_force_mc_use_s0()
Drv770.c828 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_page_flip()
1144 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_set_clk_bypass_mode()
Drs690.c40 for (i = 0; i < rdev->usec_timeout; i++) { in rs690_mc_wait_for_idle()
Dr300.c347 for (i = 0; i < rdev->usec_timeout; i++) { in r300_mc_wait_for_idle()
Dradeon_device.c1293 rdev->usec_timeout = RADEON_MAX_USEC_TIMEOUT; in radeon_device_init()
Dradeon_drv.h218 int usec_timeout; member
Dbtc_dpm.c1743 for (i = 0; i < rdev->usec_timeout; i++) { in btc_stop_smc()
Dni.c697 for (i = 0; i < rdev->usec_timeout; i++) { in ni_mc_load_microcode()
Drv6xx_dpm.c64 for (i = 0; i < rdev->usec_timeout; i++) { in rv6xx_force_pcie_gen1()
Drv770_dpm.c1657 for (i = 0; i < rdev->usec_timeout; i++) {
Dradeon.h2323 int usec_timeout; member
Dni_dpm.c1085 for (i = 0; i < rdev->usec_timeout; i++) { in ni_stop_smc()
Dsi_dpm.c3633 for (i = 0; i < rdev->usec_timeout; i++) {
/linux-4.4.14/drivers/gpu/drm/r128/
Dr128_cce.c85 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_pixcache_flush()
101 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_wait_for_fifo()
122 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_wait_for_idle()
205 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_cce_idle()
367 dev_priv->usec_timeout = init->usec_timeout; in r128_do_init_cce()
368 if (dev_priv->usec_timeout < 1 || in r128_do_init_cce()
369 dev_priv->usec_timeout > R128_MAX_USEC_TIMEOUT) { in r128_do_init_cce()
827 for (t = 0; t < dev_priv->usec_timeout; t++) { in r128_freelist_get()
869 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_wait_ring()
Dr128_ioc32.c45 int usec_timeout; member
79 || __put_user(init32.usec_timeout, &init->usec_timeout) in compat_r128_init()
Dr128_drv.h92 int usec_timeout; member
440 for (i = 0 ; i < dev_priv->usec_timeout ; i++) { \
/linux-4.4.14/include/trace/events/
Dwriteback.h637 TP_PROTO(unsigned int usec_timeout, unsigned int usec_delayed),
639 TP_ARGS(usec_timeout, usec_delayed),
642 __field( unsigned int, usec_timeout )
647 __entry->usec_timeout = usec_timeout;
652 __entry->usec_timeout,
658 TP_PROTO(unsigned int usec_timeout, unsigned int usec_delayed),
660 TP_ARGS(usec_timeout, usec_delayed)
665 TP_PROTO(unsigned int usec_timeout, unsigned int usec_delayed),
667 TP_ARGS(usec_timeout, usec_delayed)
/linux-4.4.14/include/uapi/drm/
Dr128_drm.h232 int usec_timeout; member
Dradeon_drm.h573 int usec_timeout; member
/linux-4.4.14/drivers/gpu/drm/mga/
Dmga_dma.c59 for (i = 0; i < dev_priv->usec_timeout; i++) { in mga_do_wait_for_idle()
113 for (i = 0; i < dev_priv->usec_timeout; i++) { in mga_do_dma_flush()
401 dev_priv->usec_timeout = MGA_DEFAULT_USEC_TIMEOUT; in mga_driver_load()
Dmga_drv.h90 int usec_timeout; member