Home
last modified time | relevance | path

Searched refs:usec_timeout (Results 1 – 47 of 47) sorted by relevance

/linux-4.1.27/drivers/gpu/drm/radeon/
Dsumo_smc.c41 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu()
50 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu()
56 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu()
62 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu()
Dtrinity_smc.c36 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_notify_message_to_smu()
117 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_acquire_mutex()
Dr600_dma.c260 for (i = 0; i < rdev->usec_timeout; i++) { in r600_dma_ring_test()
267 if (i < rdev->usec_timeout) { in r600_dma_ring_test()
376 for (i = 0; i < rdev->usec_timeout; i++) { in r600_dma_ib_test()
382 if (i < rdev->usec_timeout) { in r600_dma_ib_test()
Dci_smc.c176 for (i = 0; i < rdev->usec_timeout; i++) { in ci_send_msg_to_smc()
196 for (i = 0; i < rdev->usec_timeout; i++) {
Dsi_smc.c182 for (i = 0; i < rdev->usec_timeout; i++) { in si_send_msg_to_smc()
201 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_smc_inactive()
Dradeon_cp.c336 for (i = 0; i < dev_priv->usec_timeout; i++) { in radeon_do_pixcache_flush()
361 for (i = 0; i < dev_priv->usec_timeout; i++) { in radeon_do_wait_for_fifo()
389 for (i = 0; i < dev_priv->usec_timeout; i++) { in radeon_do_wait_for_idle()
879 for (tmp = 0; tmp < dev_priv->usec_timeout; tmp++) { in radeon_test_writeback()
888 if (tmp < dev_priv->usec_timeout) { in radeon_test_writeback()
1202 dev_priv->usec_timeout = init->usec_timeout; in radeon_do_init_cp()
1203 if (dev_priv->usec_timeout < 1 || in radeon_do_init_cp()
1204 dev_priv->usec_timeout > RADEON_MAX_USEC_TIMEOUT) { in radeon_do_init_cp()
1933 for (t = 0; t < dev_priv->usec_timeout; t++) { in radeon_freelist_get()
1983 for (i = 0; i < dev_priv->usec_timeout; i++) { in radeon_wait_ring()
Dradeon_ioc32.c43 int usec_timeout; member
76 || __put_user(init32.usec_timeout, &init->usec_timeout) in compat_radeon_cp_init()
Duvd_v1_0.c436 for (i = 0; i < rdev->usec_timeout; i++) { in uvd_v1_0_ring_test()
443 if (i < rdev->usec_timeout) { in uvd_v1_0_ring_test()
Drv770_smc.c435 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_send_msg_to_smc()
458 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_wait_for_smc_inactive()
Dcik_sdma.c676 for (i = 0; i < rdev->usec_timeout; i++) { in cik_sdma_ring_test()
683 if (i < rdev->usec_timeout) { in cik_sdma_ring_test()
745 for (i = 0; i < rdev->usec_timeout; i++) { in cik_sdma_ib_test()
751 if (i < rdev->usec_timeout) { in cik_sdma_ib_test()
Dkv_smc.c37 for (i = 0; i < rdev->usec_timeout; i++) { in kv_notify_message_to_smu()
Drs400.c63 unsigned int timeout = rdev->usec_timeout; in rs400_gart_tlb_flush()
242 for (i = 0; i < rdev->usec_timeout; i++) { in rs400_mc_wait_for_idle()
Dr600_dpm.c250 for (i = 0; i < rdev->usec_timeout; i++) { in r600_gfx_clockgating_enable()
328 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_spll_change()
651 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_power_level_unequal()
657 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_power_level_unequal()
669 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_power_level()
675 for (i = 0; i < rdev->usec_timeout; i++) { in r600_wait_for_power_level()
Dr600_cp.c108 for (i = 0; i < dev_priv->usec_timeout; i++) { in r600_do_wait_for_fifo()
139 for (i = 0; i < dev_priv->usec_timeout; i++) { in r600_do_wait_for_idle()
539 for (tmp = 0; tmp < dev_priv->usec_timeout; tmp++) { in r600_test_writeback()
548 if (tmp < dev_priv->usec_timeout) { in r600_test_writeback()
2011 dev_priv->usec_timeout = init->usec_timeout; in r600_do_init_cp()
2012 if (dev_priv->usec_timeout < 1 || in r600_do_init_cp()
2013 dev_priv->usec_timeout > RADEON_MAX_USEC_TIMEOUT) { in r600_do_init_cp()
Dr520.c41 for (i = 0; i < rdev->usec_timeout; i++) { in r520_mc_wait_for_idle()
Dradeon_vce.c753 for (i = 0; i < rdev->usec_timeout; i++) { in radeon_vce_ring_test()
759 if (i < rdev->usec_timeout) { in radeon_vce_ring_test()
Drv515.c135 for (i = 0; i < rdev->usec_timeout; i++) { in rv515_mc_wait_for_idle()
321 for (j = 0; j < rdev->usec_timeout; j++) { in rv515_mc_stop()
424 for (j = 0; j < rdev->usec_timeout; j++) { in rv515_mc_resume()
455 for (j = 0; j < rdev->usec_timeout; j++) { in rv515_mc_resume()
Dr100.c167 for (i = 0; i < rdev->usec_timeout; i++) { in r100_page_flip()
964 for (i = 0; i < rdev->usec_timeout; i++) { in r100_cp_wait_for_idle()
2475 for (i = 0; i < rdev->usec_timeout; i++) { in r100_rbbm_fifo_wait_for_entry()
2494 for (i = 0; i < rdev->usec_timeout; i++) { in r100_gui_wait_for_idle()
2509 for (i = 0; i < rdev->usec_timeout; i++) { in r100_mc_wait_for_idle()
3658 for (i = 0; i < rdev->usec_timeout; i++) { in r100_ring_test()
3665 if (i < rdev->usec_timeout) { in r100_ring_test()
3729 for (i = 0; i < rdev->usec_timeout; i++) { in r100_ib_test()
3736 if (i < rdev->usec_timeout) { in r100_ib_test()
Dtrinity_dpm.c770 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_wait_for_dpm_enabled()
775 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_wait_for_dpm_enabled()
780 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_wait_for_dpm_enabled()
815 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_wait_for_level_0()
Dcik.c1930 for (i = 0; i < rdev->usec_timeout; i++) { in ci_mc_load_microcode()
1935 for (i = 0; i < rdev->usec_timeout; i++) { in ci_mc_load_microcode()
3866 for (i = 0; i < rdev->usec_timeout; i++) { in cik_ring_test()
3872 if (i < rdev->usec_timeout) { in cik_ring_test()
4208 for (i = 0; i < rdev->usec_timeout; i++) { in cik_ib_test()
4214 if (i < rdev->usec_timeout) { in cik_ib_test()
4595 for (j = 0; j < rdev->usec_timeout; j++) { in cik_compute_stop()
5022 for (j = 0; j < rdev->usec_timeout; j++) { in cik_cp_compute_resume()
5580 for (i = 0; i < rdev->usec_timeout; i++) { in cik_gpu_pci_config_reset()
6202 for (k = 0; k < rdev->usec_timeout; k++) { in cik_wait_for_rlc_serdes()
[all …]
Drs600.c130 for (i = 0; i < rdev->usec_timeout; i++) { in rs600_page_flip()
848 for (i = 0; i < rdev->usec_timeout; i++) { in rs600_mc_wait_for_idle()
Dr600.c1043 for (i = 0; i < rdev->usec_timeout; i++) { in r600_pcie_gart_tlb_flush()
1213 for (i = 0; i < rdev->usec_timeout; i++) { in r600_mc_wait_for_idle()
1820 for (i = 0; i < rdev->usec_timeout; i++) { in r600_gpu_pci_config_reset()
2796 for (i = 0; i < rdev->usec_timeout; i++) { in r600_ring_test()
2802 if (i < rdev->usec_timeout) { in r600_ring_test()
3342 for (i = 0; i < rdev->usec_timeout; i++) { in r600_ib_test()
3348 if (i < rdev->usec_timeout) { in r600_ib_test()
Dsi.c1629 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1634 for (i = 0; i < rdev->usec_timeout; i++) { in si_mc_load_microcode()
1949 for (i = 0; i < rdev->usec_timeout; i++) { in dce6_line_buffer_adjust()
3993 for (i = 0; i < rdev->usec_timeout; i++) { in si_set_clk_bypass_mode()
4074 for (i = 0; i < rdev->usec_timeout; i++) { in si_gpu_pci_config_reset()
5116 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5122 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_rlc_serdes()
5147 for (i = 0; i < rdev->usec_timeout; i++) { in si_enable_gui_idle_interrupt()
7514 for (i = 0; i < rdev->usec_timeout; i++) { in si_pcie_gen3_enable()
Devergreen.c1366 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_page_flip()
1921 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_line_buffer_adjust()
2410 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_mc_wait_for_idle()
2431 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_pcie_gart_tlb_flush()
2743 for (j = 0; j < rdev->usec_timeout; j++) { in evergreen_mc_stop()
2843 for (j = 0; j < rdev->usec_timeout; j++) { in evergreen_mc_resume()
2876 for (j = 0; j < rdev->usec_timeout; j++) { in evergreen_mc_resume()
4084 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_gpu_pci_config_reset()
Dsumo_dpm.c631 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_set_forced_mode_enabled()
642 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_wait_for_level_0()
647 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_wait_for_level_0()
Dci_dpm.c1774 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_start_smc()
1914 for (i = 0; i < rdev->usec_timeout; i++) {
2068 for (i = 0; i < rdev->usec_timeout; i++) { in ci_upload_firmware()
4191 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4210 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4229 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4246 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4261 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
4276 for (i = 0; i < rdev->usec_timeout; i++) { in ci_dpm_force_performance_level()
Dcypress_dpm.c1108 for (j = 0; j < rdev->usec_timeout; j++) { in cypress_wait_for_mc_sequencer()
1152 for (i = 0; i < rdev->usec_timeout; i++) { in cypress_force_mc_use_s1()
1210 for (i = 0; i < rdev->usec_timeout; i++) { in cypress_force_mc_use_s0()
Drv770.c828 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_page_flip()
1144 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_set_clk_bypass_mode()
Drs690.c40 for (i = 0; i < rdev->usec_timeout; i++) { in rs690_mc_wait_for_idle()
Dr300.c322 for (i = 0; i < rdev->usec_timeout; i++) { in r300_mc_wait_for_idle()
Dradeon_device.c1287 rdev->usec_timeout = RADEON_MAX_USEC_TIMEOUT; in radeon_device_init()
Dradeon_drv.h218 int usec_timeout; member
Dni.c672 for (i = 0; i < rdev->usec_timeout; i++) { in ni_mc_load_microcode()
Dbtc_dpm.c1743 for (i = 0; i < rdev->usec_timeout; i++) { in btc_stop_smc()
Drv6xx_dpm.c64 for (i = 0; i < rdev->usec_timeout; i++) { in rv6xx_force_pcie_gen1()
Drv770_dpm.c1657 for (i = 0; i < rdev->usec_timeout; i++) {
Dradeon.h2322 int usec_timeout; member
Dni_dpm.c1085 for (i = 0; i < rdev->usec_timeout; i++) { in ni_stop_smc()
Dsi_dpm.c3559 for (i = 0; i < rdev->usec_timeout; i++) {
/linux-4.1.27/include/trace/events/
Dwriteback.h537 TP_PROTO(unsigned int usec_timeout, unsigned int usec_delayed),
539 TP_ARGS(usec_timeout, usec_delayed),
542 __field( unsigned int, usec_timeout )
547 __entry->usec_timeout = usec_timeout;
552 __entry->usec_timeout,
558 TP_PROTO(unsigned int usec_timeout, unsigned int usec_delayed),
560 TP_ARGS(usec_timeout, usec_delayed)
565 TP_PROTO(unsigned int usec_timeout, unsigned int usec_delayed),
567 TP_ARGS(usec_timeout, usec_delayed)
/linux-4.1.27/drivers/gpu/drm/r128/
Dr128_cce.c85 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_pixcache_flush()
101 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_wait_for_fifo()
122 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_wait_for_idle()
205 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_cce_idle()
367 dev_priv->usec_timeout = init->usec_timeout; in r128_do_init_cce()
368 if (dev_priv->usec_timeout < 1 || in r128_do_init_cce()
369 dev_priv->usec_timeout > R128_MAX_USEC_TIMEOUT) { in r128_do_init_cce()
827 for (t = 0; t < dev_priv->usec_timeout; t++) { in r128_freelist_get()
869 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_wait_ring()
Dr128_ioc32.c45 int usec_timeout; member
79 || __put_user(init32.usec_timeout, &init->usec_timeout) in compat_r128_init()
Dr128_drv.h92 int usec_timeout; member
440 for (i = 0 ; i < dev_priv->usec_timeout ; i++) { \
/linux-4.1.27/include/uapi/drm/
Dr128_drm.h230 int usec_timeout; member
Dradeon_drm.h573 int usec_timeout; member
/linux-4.1.27/drivers/gpu/drm/mga/
Dmga_dma.c59 for (i = 0; i < dev_priv->usec_timeout; i++) { in mga_do_wait_for_idle()
113 for (i = 0; i < dev_priv->usec_timeout; i++) { in mga_do_dma_flush()
401 dev_priv->usec_timeout = MGA_DEFAULT_USEC_TIMEOUT; in mga_driver_load()
Dmga_drv.h90 int usec_timeout; member