Home
last modified time | relevance | path

Searched refs:err_data (Results 1 – 25 of 43) sorted by relevance

12

/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/
Damdgpu_umc.c32 struct ras_err_data *err_data, uint64_t err_addr, in amdgpu_umc_convert_error_address() argument
38 err_data, err_addr, ch_inst, umc_inst); in amdgpu_umc_convert_error_address()
52 struct ras_err_data err_data; in amdgpu_umc_page_retirement_mca() local
55 ret = amdgpu_ras_error_data_init(&err_data); in amdgpu_umc_page_retirement_mca()
59 err_data.err_addr = in amdgpu_umc_page_retirement_mca()
62 if (!err_data.err_addr) { in amdgpu_umc_page_retirement_mca()
69 err_data.err_addr_len = adev->umc.max_ras_err_cnt_per_query; in amdgpu_umc_page_retirement_mca()
74 ret = amdgpu_umc_convert_error_address(adev, &err_data, err_addr, in amdgpu_umc_page_retirement_mca()
80 amdgpu_ras_add_bad_pages(adev, err_data.err_addr, in amdgpu_umc_page_retirement_mca()
81 err_data.err_addr_cnt); in amdgpu_umc_page_retirement_mca()
[all …]
Dumc_v8_7.c93 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v8_7_ecc_info_query_ras_error_count() local
104 &(err_data->ce_count)); in umc_v8_7_ecc_info_query_ras_error_count()
107 &(err_data->ue_count)); in umc_v8_7_ecc_info_query_ras_error_count()
112 struct ras_err_data *err_data, uint64_t err_addr, in umc_v8_7_convert_error_address() argument
126 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v8_7_convert_error_address()
131 struct ras_err_data *err_data, in umc_v8_7_ecc_info_query_error_address() argument
145 if (!err_data->err_addr) in umc_v8_7_ecc_info_query_error_address()
155 umc_v8_7_convert_error_address(adev, err_data, err_addr, in umc_v8_7_ecc_info_query_error_address()
163 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v8_7_ecc_info_query_ras_error_address() local
174 err_data, in umc_v8_7_ecc_info_query_ras_error_address()
[all …]
Damdgpu_ras.c166 struct ras_err_data err_data; in amdgpu_reserve_page_direct() local
185 ret = amdgpu_ras_error_data_init(&err_data); in amdgpu_reserve_page_direct()
190 err_data.err_addr = &err_rec; in amdgpu_reserve_page_direct()
191 amdgpu_umc_fill_error_record(&err_data, address, address, 0, 0); in amdgpu_reserve_page_direct()
194 amdgpu_ras_add_bad_pages(adev, err_data.err_addr, in amdgpu_reserve_page_direct()
195 err_data.err_addr_cnt); in amdgpu_reserve_page_direct()
199 amdgpu_ras_error_data_fini(&err_data); in amdgpu_reserve_page_direct()
658 amdgpu_ras_error_data_fini(&obj->err_data); in put_obj()
690 if (amdgpu_ras_error_data_init(&obj->err_data)) in amdgpu_ras_create_obj()
1017 static void amdgpu_ras_get_ecc_info(struct amdgpu_device *adev, struct ras_err_data *err_data) in amdgpu_ras_get_ecc_info() argument
[all …]
Dumc_v8_10.c147 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_query_ecc_error_count() local
153 &(err_data->ce_count)); in umc_v8_10_query_ecc_error_count()
156 &(err_data->ue_count)); in umc_v8_10_query_ecc_error_count()
206 struct ras_err_data *err_data, uint64_t err_addr, in umc_v8_10_convert_error_address() argument
239 amdgpu_umc_fill_error_record(err_data, na_err_addr, in umc_v8_10_convert_error_address()
251 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_query_error_address() local
262 if (!err_data->err_addr) { in umc_v8_10_query_error_address()
277 umc_v8_10_convert_error_address(adev, err_data, err_addr, in umc_v8_10_query_error_address()
383 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_ecc_info_query_ecc_error_count() local
387 &(err_data->ce_count)); in umc_v8_10_ecc_info_query_ecc_error_count()
[all …]
Dumc_v6_7.c167 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v6_7_ecc_info_querry_ecc_error_count() local
171 &(err_data->ce_count)); in umc_v6_7_ecc_info_querry_ecc_error_count()
175 &(err_data->ue_count)); in umc_v6_7_ecc_info_querry_ecc_error_count()
188 struct ras_err_data *err_data, uint64_t err_addr, in umc_v6_7_convert_error_address() argument
211 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_7_convert_error_address()
217 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_7_convert_error_address()
229 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v6_7_ecc_info_query_error_address() local
237 if (!err_data->err_addr) in umc_v6_7_ecc_info_query_error_address()
247 umc_v6_7_convert_error_address(adev, err_data, err_addr, in umc_v6_7_ecc_info_query_error_address()
416 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v6_7_query_ecc_error_count() local
[all …]
Dumc_v12_0.c140 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v12_0_query_error_count() local
160 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, ue_count); in umc_v12_0_query_error_count()
161 amdgpu_ras_error_statistic_ce_count(err_data, &mcm_info, ce_count); in umc_v12_0_query_error_count()
162 amdgpu_ras_error_statistic_de_count(err_data, &mcm_info, de_count); in umc_v12_0_query_error_count()
177 struct ras_err_data *err_data, in umc_v12_0_convert_error_address() argument
215 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v12_0_convert_error_address()
223 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v12_0_convert_error_address()
332 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v12_0_query_error_address() local
348 if (!err_data->err_addr) { in umc_v12_0_query_error_address()
377 umc_v12_0_convert_error_address(adev, err_data, &addr_in); in umc_v12_0_query_error_address()
[all …]
Dnbio_v7_4.c364 struct ras_err_data err_data; in nbio_v7_4_handle_ras_controller_intr_no_bifring() local
367 if (amdgpu_ras_error_data_init(&err_data)) in nbio_v7_4_handle_ras_controller_intr_no_bifring()
392 nbio_v7_4_query_ras_error_count(adev, &err_data); in nbio_v7_4_handle_ras_controller_intr_no_bifring()
395 obj->err_data.ue_count += err_data.ue_count; in nbio_v7_4_handle_ras_controller_intr_no_bifring()
396 obj->err_data.ce_count += err_data.ce_count; in nbio_v7_4_handle_ras_controller_intr_no_bifring()
398 if (err_data.ce_count) in nbio_v7_4_handle_ras_controller_intr_no_bifring()
401 obj->err_data.ce_count, in nbio_v7_4_handle_ras_controller_intr_no_bifring()
404 if (err_data.ue_count) in nbio_v7_4_handle_ras_controller_intr_no_bifring()
407 obj->err_data.ue_count, in nbio_v7_4_handle_ras_controller_intr_no_bifring()
421 amdgpu_ras_error_data_fini(&err_data); in nbio_v7_4_handle_ras_controller_intr_no_bifring()
[all …]
Dnbio_v7_9.c525 struct ras_err_data err_data; in nbio_v7_9_handle_ras_controller_intr_no_bifring() local
528 if (amdgpu_ras_error_data_init(&err_data)) in nbio_v7_9_handle_ras_controller_intr_no_bifring()
547 nbio_v7_9_query_ras_error_count(adev, &err_data); in nbio_v7_9_handle_ras_controller_intr_no_bifring()
550 obj->err_data.ue_count += err_data.ue_count; in nbio_v7_9_handle_ras_controller_intr_no_bifring()
551 obj->err_data.ce_count += err_data.ce_count; in nbio_v7_9_handle_ras_controller_intr_no_bifring()
553 if (err_data.ce_count) in nbio_v7_9_handle_ras_controller_intr_no_bifring()
556 obj->err_data.ce_count, in nbio_v7_9_handle_ras_controller_intr_no_bifring()
559 if (err_data.ue_count) in nbio_v7_9_handle_ras_controller_intr_no_bifring()
562 obj->err_data.ue_count, in nbio_v7_9_handle_ras_controller_intr_no_bifring()
570 amdgpu_ras_error_data_fini(&err_data); in nbio_v7_9_handle_ras_controller_intr_no_bifring()
Dumc_v6_1.c257 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_count() local
279 &(err_data->ce_count)); in umc_v6_1_query_ras_error_count()
282 &(err_data->ue_count)); in umc_v6_1_query_ras_error_count()
296 struct ras_err_data *err_data, in umc_v6_1_query_error_address() argument
324 if (!err_data->err_addr) { in umc_v6_1_query_error_address()
345 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_1_query_error_address()
356 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_address() local
377 err_data, in umc_v6_1_query_ras_error_address()
Dhdp_v4_0.c67 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in hdp_v4_0_query_ras_error_count() local
69 err_data->ue_count = 0; in hdp_v4_0_query_ras_error_count()
70 err_data->ce_count = 0; in hdp_v4_0_query_ras_error_count()
76 err_data->ue_count += RREG32_SOC15(HDP, 0, mmHDP_EDC_CNT); in hdp_v4_0_query_ras_error_count()
Damdgpu_mca.c76 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_mca_query_ras_error_count() local
78 amdgpu_mca_query_correctable_error_count(adev, mc_status_addr, &(err_data->ce_count)); in amdgpu_mca_query_ras_error_count()
79 amdgpu_mca_query_uncorrectable_error_count(adev, mc_status_addr, &(err_data->ue_count)); in amdgpu_mca_query_ras_error_count()
397 struct mca_bank_set *mca_set, struct ras_err_data *err_data) in amdgpu_mca_dispatch_mca_set() argument
428 amdgpu_ras_error_statistic_ue_count(err_data, in amdgpu_mca_dispatch_mca_set()
432 amdgpu_ras_error_statistic_de_count(err_data, in amdgpu_mca_dispatch_mca_set()
435 amdgpu_ras_error_statistic_ce_count(err_data, in amdgpu_mca_dispatch_mca_set()
458 struct ras_err_data *err_data, struct ras_query_context *qctx) in amdgpu_mca_smu_log_ras_error() argument
470 ret = amdgpu_mca_dispatch_mca_set(adev, blk, type, &mca_set, err_data); in amdgpu_mca_smu_log_ras_error()
484 ret = amdgpu_mca_dispatch_mca_set(adev, blk, type, &mca_cache->mca_set, err_data); in amdgpu_mca_smu_log_ras_error()
Dgfx_v9_4.c690 struct ras_err_data *err_data) in gfx_v9_4_query_utc_edc_status() argument
716 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status()
724 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status()
738 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status()
747 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status()
760 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status()
768 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status()
782 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status()
791 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status()
805 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status()
[all …]
Damdgpu_ras.h598 #define for_each_ras_error(err_node, err_data) \ argument
599 list_for_each_entry(err_node, &(err_data)->err_node_list, node)
611 void *err_data,
647 struct ras_err_data err_data; member
933 int amdgpu_ras_error_data_init(struct ras_err_data *err_data);
934 void amdgpu_ras_error_data_fini(struct ras_err_data *err_data);
935 int amdgpu_ras_error_statistic_ce_count(struct ras_err_data *err_data,
938 int amdgpu_ras_error_statistic_ue_count(struct ras_err_data *err_data,
941 int amdgpu_ras_error_statistic_de_count(struct ras_err_data *err_data,
Damdgpu_aca.c440 …or_data(struct aca_bank_error *bank_error, enum aca_error_type type, struct ras_err_data *err_data) in aca_log_aca_error_data() argument
459 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, count); in aca_log_aca_error_data()
462 amdgpu_ras_error_statistic_ce_count(err_data, &mcm_info, count); in aca_log_aca_error_data()
465 amdgpu_ras_error_statistic_de_count(err_data, &mcm_info, count); in aca_log_aca_error_data()
474 …a_log_aca_error(struct aca_handle *handle, enum aca_error_type type, struct ras_err_data *err_data) in aca_log_aca_error() argument
486 aca_log_aca_error_data(bank_error, type, err_data); in aca_log_aca_error()
497 struct ras_err_data *err_data, struct ras_query_context *qctx) in __aca_get_error_data() argument
519 return aca_log_aca_error(handle, type, err_data); in __aca_get_error_data()
531 enum aca_error_type type, struct ras_err_data *err_data, in amdgpu_aca_get_error_data() argument
534 if (!handle || !err_data) in amdgpu_aca_get_error_data()
[all …]
Dsdma_v4_4.c199 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in sdma_v4_4_query_ras_error_count_by_instance() local
225 err_data->ue_count += sec_count; in sdma_v4_4_query_ras_error_count_by_instance()
231 err_data->ce_count = 0; in sdma_v4_4_query_ras_error_count_by_instance()
Damdgpu_xgmi.c1263 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_xgmi_legacy_query_ras_error_count() local
1271 err_data->ue_count = 0; in amdgpu_xgmi_legacy_query_ras_error_count()
1272 err_data->ce_count = 0; in amdgpu_xgmi_legacy_query_ras_error_count()
1352 err_data->ue_count += ue_cnt; in amdgpu_xgmi_legacy_query_ras_error_count()
1353 err_data->ce_count += ce_cnt; in amdgpu_xgmi_legacy_query_ras_error_count()
1381 u64 mca_base, struct ras_err_data *err_data) in __xgmi_v6_4_0_query_error_count() argument
1392 amdgpu_ras_error_statistic_ue_count(err_data, mcm_info, 1ULL); in __xgmi_v6_4_0_query_error_count()
1395 amdgpu_ras_error_statistic_ce_count(err_data, mcm_info, 1ULL); in __xgmi_v6_4_0_query_error_count()
1404 …_v6_4_0_query_error_count(struct amdgpu_device *adev, int xgmi_inst, struct ras_err_data *err_data) in xgmi_v6_4_0_query_error_count() argument
1413 __xgmi_v6_4_0_query_error_count(adev, &mcm_info, xgmi_v6_4_0_mca_base_array[i], err_data); in xgmi_v6_4_0_query_error_count()
[all …]
Dumc_v6_7.h75 struct ras_err_data *err_data, uint64_t err_addr,
Dmmhub_v1_0.c748 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_0_query_ras_error_count() local
753 err_data->ue_count = 0; in mmhub_v1_0_query_ras_error_count()
754 err_data->ce_count = 0; in mmhub_v1_0_query_ras_error_count()
765 err_data->ce_count += sec_count; in mmhub_v1_0_query_ras_error_count()
766 err_data->ue_count += ded_count; in mmhub_v1_0_query_ras_error_count()
Damdgpu_sdma.h168 void *err_data,
Damdgpu_umc.h115 int amdgpu_umc_fill_error_record(struct ras_err_data *err_data,
Damdgpu_mca.h167 struct ras_err_data *err_data, struct ras_query_context *qctx);
Dmmhub_v1_7.c1246 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_7_query_ras_error_count() local
1251 err_data->ue_count = 0; in mmhub_v1_7_query_ras_error_count()
1252 err_data->ce_count = 0; in mmhub_v1_7_query_ras_error_count()
1262 err_data->ce_count += sec_count; in mmhub_v1_7_query_ras_error_count()
1263 err_data->ue_count += ded_count; in mmhub_v1_7_query_ras_error_count()
Dmmhub_v1_8.c629 struct ras_err_data *err_data = (struct ras_err_data *)ras_err_status; in mmhub_v1_8_inst_query_ras_error_count() local
656 amdgpu_ras_error_statistic_ce_count(err_data, &mcm_info, ce_count); in mmhub_v1_8_inst_query_ras_error_count()
657 amdgpu_ras_error_statistic_ue_count(err_data, &mcm_info, ue_count); in mmhub_v1_8_inst_query_ras_error_count()
/linux-6.12.1/drivers/misc/
Dsmpro-errmon.c251 unsigned char err_data[MAX_READ_BLOCK_LENGTH]; in smpro_error_data_read() local
271 memset(err_data, 0x00, MAX_READ_BLOCK_LENGTH); in smpro_error_data_read()
272 ret = regmap_noinc_read(errmon->regmap, err_info->data, err_data, err_length); in smpro_error_data_read()
285 return sysfs_emit(buf, "%*phN\n", MAX_READ_BLOCK_LENGTH, err_data); in smpro_error_data_read()
/linux-6.12.1/drivers/clk/hisilicon/
Dclk.c78 goto err_data; in hisi_clk_init()
84 err_data: in hisi_clk_init()

12