Lines Matching refs:tmp_adev

149 	struct amdgpu_device *tmp_adev = NULL;  in aldebaran_mode2_perform_reset()  local
163 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
164 mutex_lock(&tmp_adev->reset_cntl->reset_lock); in aldebaran_mode2_perform_reset()
165 tmp_adev->reset_cntl->active_reset = AMD_RESET_METHOD_MODE2; in aldebaran_mode2_perform_reset()
171 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
173 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset()
175 &tmp_adev->reset_cntl->reset_work)) in aldebaran_mode2_perform_reset()
178 r = aldebaran_mode2_reset(tmp_adev); in aldebaran_mode2_perform_reset()
180 dev_err(tmp_adev->dev, in aldebaran_mode2_perform_reset()
182 r, adev_to_drm(tmp_adev)->unique); in aldebaran_mode2_perform_reset()
189 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
190 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in aldebaran_mode2_perform_reset()
191 flush_work(&tmp_adev->reset_cntl->reset_work); in aldebaran_mode2_perform_reset()
192 r = tmp_adev->asic_reset_res; in aldebaran_mode2_perform_reset()
199 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_perform_reset()
200 mutex_unlock(&tmp_adev->reset_cntl->reset_lock); in aldebaran_mode2_perform_reset()
201 tmp_adev->reset_cntl->active_reset = AMD_RESET_METHOD_NONE; in aldebaran_mode2_perform_reset()
330 struct amdgpu_device *tmp_adev = NULL; in aldebaran_mode2_restore_hwcontext() local
344 list_for_each_entry(tmp_adev, reset_device_list, reset_list) { in aldebaran_mode2_restore_hwcontext()
345 dev_info(tmp_adev->dev, in aldebaran_mode2_restore_hwcontext()
347 r = aldebaran_mode2_restore_ip(tmp_adev); in aldebaran_mode2_restore_hwcontext()
355 amdgpu_register_gpu_instance(tmp_adev); in aldebaran_mode2_restore_hwcontext()
358 con = amdgpu_ras_get_context(tmp_adev); in aldebaran_mode2_restore_hwcontext()
359 if (!amdgpu_sriov_vf(tmp_adev) && con) { in aldebaran_mode2_restore_hwcontext()
360 if (tmp_adev->sdma.ras && in aldebaran_mode2_restore_hwcontext()
361 tmp_adev->sdma.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
362 r = tmp_adev->sdma.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
363 &tmp_adev->sdma.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
365 dev_err(tmp_adev->dev, "SDMA failed to execute ras_late_init! ret:%d\n", r); in aldebaran_mode2_restore_hwcontext()
370 if (tmp_adev->gfx.ras && in aldebaran_mode2_restore_hwcontext()
371 tmp_adev->gfx.ras->ras_block.ras_late_init) { in aldebaran_mode2_restore_hwcontext()
372 r = tmp_adev->gfx.ras->ras_block.ras_late_init(tmp_adev, in aldebaran_mode2_restore_hwcontext()
373 &tmp_adev->gfx.ras->ras_block.ras_comm); in aldebaran_mode2_restore_hwcontext()
375 dev_err(tmp_adev->dev, "GFX failed to execute ras_late_init! ret:%d\n", r); in aldebaran_mode2_restore_hwcontext()
381 amdgpu_ras_resume(tmp_adev); in aldebaran_mode2_restore_hwcontext()
385 tmp_adev->gmc.xgmi.num_physical_nodes > 1) in aldebaran_mode2_restore_hwcontext()
387 tmp_adev); in aldebaran_mode2_restore_hwcontext()
390 amdgpu_irq_gpu_reset_resume_helper(tmp_adev); in aldebaran_mode2_restore_hwcontext()
392 r = amdgpu_ib_ring_tests(tmp_adev); in aldebaran_mode2_restore_hwcontext()
394 dev_err(tmp_adev->dev, in aldebaran_mode2_restore_hwcontext()
397 tmp_adev->asic_reset_res = r; in aldebaran_mode2_restore_hwcontext()