/linux-6.12.1/drivers/gpu/drm/radeon/ |
D | sumo_smc.c | 38 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu() 47 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu() 53 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu() 59 for (i = 0; i < rdev->usec_timeout; i++) { in sumo_send_msg_to_smu()
|
D | trinity_smc.c | 35 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_notify_message_to_smu() 116 for (i = 0; i < rdev->usec_timeout; i++) { in trinity_acquire_mutex()
|
D | r600_dma.c | 259 for (i = 0; i < rdev->usec_timeout; i++) { in r600_dma_ring_test() 266 if (i < rdev->usec_timeout) { in r600_dma_ring_test() 380 for (i = 0; i < rdev->usec_timeout; i++) { in r600_dma_ib_test() 386 if (i < rdev->usec_timeout) { in r600_dma_ib_test()
|
D | si_smc.c | 182 for (i = 0; i < rdev->usec_timeout; i++) { in si_send_msg_to_smc() 201 for (i = 0; i < rdev->usec_timeout; i++) { in si_wait_for_smc_inactive()
|
D | cik_sdma.c | 675 for (i = 0; i < rdev->usec_timeout; i++) { in cik_sdma_ring_test() 682 if (i < rdev->usec_timeout) { in cik_sdma_ring_test() 749 for (i = 0; i < rdev->usec_timeout; i++) { in cik_sdma_ib_test() 755 if (i < rdev->usec_timeout) { in cik_sdma_ib_test()
|
D | rv770_smc.c | 423 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_send_msg_to_smc() 446 for (i = 0; i < rdev->usec_timeout; i++) { in rv770_wait_for_smc_inactive()
|
D | uvd_v1_0.c | 437 for (i = 0; i < rdev->usec_timeout; i++) { in uvd_v1_0_ring_test() 444 if (i < rdev->usec_timeout) { in uvd_v1_0_ring_test()
|
D | kv_smc.c | 36 for (i = 0; i < rdev->usec_timeout; i++) { in kv_notify_message_to_smu()
|
/linux-6.12.1/include/trace/events/ |
D | vmscan.h | 469 TP_PROTO(int nid, int usec_timeout, int usec_delayed, int reason), 471 TP_ARGS(nid, usec_timeout, usec_delayed, reason), 475 __field(int, usec_timeout) 482 __entry->usec_timeout = usec_timeout; 489 __entry->usec_timeout,
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_jpeg.c | 169 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_jpeg_dec_ring_test_ring() 176 if (i >= adev->usec_timeout) in amdgpu_jpeg_dec_ring_test_ring() 245 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_jpeg_dec_ring_test_ib() 254 if (i >= adev->usec_timeout) in amdgpu_jpeg_dec_ring_test_ib()
|
D | gfx_v11_0.c | 528 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v11_0_ring_test_ring() 538 if (i >= adev->usec_timeout) in gfx_v11_0_ring_test_ring() 2306 uint32_t usec_timeout = 50000; /* wait for 50ms */ in gfx_v11_0_config_me_cache() local 2316 for (i = 0; i < usec_timeout; i++) { in gfx_v11_0_config_me_cache() 2324 if (i >= usec_timeout) { in gfx_v11_0_config_me_cache() 2350 uint32_t usec_timeout = 50000; /* wait for 50ms */ in gfx_v11_0_config_pfp_cache() local 2360 for (i = 0; i < usec_timeout; i++) { in gfx_v11_0_config_pfp_cache() 2368 if (i >= usec_timeout) { in gfx_v11_0_config_pfp_cache() 2394 uint32_t usec_timeout = 50000; /* wait for 50ms */ in gfx_v11_0_config_mec_cache() local 2405 for (i = 0; i < usec_timeout; i++) { in gfx_v11_0_config_mec_cache() [all …]
|
D | amdgpu_lsdma.c | 36 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_lsdma_wait_for()
|
D | gfx_v12_0.c | 430 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v12_0_ring_test_ring() 440 if (i >= adev->usec_timeout) in gfx_v12_0_ring_test_ring() 2163 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v12_0_wait_for_rlc_autoload_complete() 2177 if (i >= adev->usec_timeout) { in gfx_v12_0_wait_for_rlc_autoload_complete() 2200 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v12_0_cp_gfx_enable() 2206 if (i >= adev->usec_timeout) in gfx_v12_0_cp_gfx_enable() 2219 uint32_t usec_timeout = 50000; /* wait for 50ms */ in gfx_v12_0_cp_gfx_load_pfp_microcode_rs64() local 2285 for (i = 0; i < usec_timeout; i++) { in gfx_v12_0_cp_gfx_load_pfp_microcode_rs64() 2293 if (i >= usec_timeout) { in gfx_v12_0_cp_gfx_load_pfp_microcode_rs64() 2303 for (i = 0; i < usec_timeout; i++) { in gfx_v12_0_cp_gfx_load_pfp_microcode_rs64() [all …]
|
D | sdma_v7_0.c | 596 max(adev->usec_timeout/100000, 1)); in sdma_v7_0_gfx_resume() 736 for (j = 0; j < adev->usec_timeout; j++) { in sdma_v7_0_load_microcode() 747 if (j >= adev->usec_timeout) { in sdma_v7_0_load_microcode() 954 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v7_0_ring_test_ring() 967 if (i >= adev->usec_timeout) in sdma_v7_0_ring_test_ring() 1402 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v7_0_wait_for_idle() 1442 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v7_0_ring_preempt_ib() 1449 if (i >= adev->usec_timeout) { in sdma_v7_0_ring_preempt_ib()
|
D | gmc_v11_0.c | 254 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v11_0_flush_gpu_tlb() 262 if (i >= adev->usec_timeout) in gmc_v11_0_flush_gpu_tlb() 269 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v11_0_flush_gpu_tlb() 297 if (i >= adev->usec_timeout) in gmc_v11_0_flush_gpu_tlb()
|
D | gmc_v12_0.c | 223 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v12_0_flush_vm_hub() 232 if (i >= adev->usec_timeout) in gmc_v12_0_flush_vm_hub() 240 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v12_0_flush_vm_hub() 274 if (i < adev->usec_timeout) in gmc_v12_0_flush_vm_hub()
|
D | uvd_v3_1.c | 154 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v3_1_ring_test_ring() 161 if (i >= adev->usec_timeout) in uvd_v3_1_ring_test_ring() 774 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v3_1_wait_for_idle()
|
D | uvd_v4_2.c | 521 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v4_2_ring_test_ring() 528 if (i >= adev->usec_timeout) in uvd_v4_2_ring_test_ring() 674 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v4_2_wait_for_idle()
|
D | gmc_v10_0.c | 293 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v10_0_flush_gpu_tlb() 301 if (i >= adev->usec_timeout) in gmc_v10_0_flush_gpu_tlb() 316 for (i = 0; i < adev->usec_timeout; i++) { in gmc_v10_0_flush_gpu_tlb() 331 if (i >= adev->usec_timeout) in gmc_v10_0_flush_gpu_tlb()
|
D | amdgpu_vcn.c | 499 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_vcn_dec_ring_test_ring() 506 if (i >= adev->usec_timeout) in amdgpu_vcn_dec_ring_test_ring() 531 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_vcn_dec_sw_ring_test_ring() 537 if (i >= adev->usec_timeout) in amdgpu_vcn_dec_sw_ring_test_ring() 836 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_vcn_enc_ring_test_ring() 842 if (i >= adev->usec_timeout) in amdgpu_vcn_enc_ring_test_ring()
|
D | uvd_v5_0.c | 536 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v5_0_ring_test_ring() 543 if (i >= adev->usec_timeout) in uvd_v5_0_ring_test_ring() 596 for (i = 0; i < adev->usec_timeout; i++) { in uvd_v5_0_wait_for_idle()
|
D | sdma_v6_0.c | 572 max(adev->usec_timeout/100000, 1)); in sdma_v6_0_gfx_resume() 935 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v6_0_ring_test_ring() 948 if (i >= adev->usec_timeout) in sdma_v6_0_ring_test_ring() 1416 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v6_0_wait_for_idle() 1452 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v6_0_ring_preempt_ib() 1459 if (i >= adev->usec_timeout) { in sdma_v6_0_ring_preempt_ib()
|
/linux-6.12.1/drivers/gpu/drm/amd/pm/legacy-dpm/ |
D | si_smc.c | 175 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_si_send_msg_to_smc() 193 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_si_wait_for_smc_inactive()
|
D | kv_smc.c | 39 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_kv_notify_message_to_smu()
|
/linux-6.12.1/drivers/gpu/drm/amd/pm/powerplay/hwmgr/ |
D | smu_helper.c | 121 for (i = 0; i < hwmgr->usec_timeout; i++) { in phm_wait_on_register() 129 if (i == hwmgr->usec_timeout) in phm_wait_on_register() 165 for (i = 0; i < hwmgr->usec_timeout; i++) { in phm_wait_for_register_unequal() 174 if (i == hwmgr->usec_timeout) in phm_wait_for_register_unequal()
|