Lines Matching refs:hwmgr

173 static int smu7_force_clock_level(struct pp_hwmgr *hwmgr,
175 static int smu7_notify_has_display(struct pp_hwmgr *hwmgr);
203 static int smu7_get_mc_microcode_version(struct pp_hwmgr *hwmgr) in smu7_get_mc_microcode_version() argument
205 cgs_write_register(hwmgr->device, mmMC_SEQ_IO_DEBUG_INDEX, 0x9F); in smu7_get_mc_microcode_version()
207 hwmgr->microcode_version_info.MC = cgs_read_register(hwmgr->device, mmMC_SEQ_IO_DEBUG_DATA); in smu7_get_mc_microcode_version()
212 static uint16_t smu7_get_current_pcie_speed(struct pp_hwmgr *hwmgr) in smu7_get_current_pcie_speed() argument
217 speedCntl = cgs_read_ind_register(hwmgr->device, CGS_IND_REG__PCIE, in smu7_get_current_pcie_speed()
223 static int smu7_get_current_pcie_lane_number(struct pp_hwmgr *hwmgr) in smu7_get_current_pcie_lane_number() argument
228 link_width = PHM_READ_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__PCIE, in smu7_get_current_pcie_lane_number()
243 static int smu7_enable_smc_voltage_controller(struct pp_hwmgr *hwmgr) in smu7_enable_smc_voltage_controller() argument
245 if (hwmgr->chip_id >= CHIP_POLARIS10 && in smu7_enable_smc_voltage_controller()
246 hwmgr->chip_id <= CHIP_VEGAM) { in smu7_enable_smc_voltage_controller()
247 PHM_WRITE_VFPF_INDIRECT_FIELD(hwmgr->device, in smu7_enable_smc_voltage_controller()
249 PHM_WRITE_VFPF_INDIRECT_FIELD(hwmgr->device, in smu7_enable_smc_voltage_controller()
253 if (hwmgr->feature_mask & PP_SMC_VOLTAGE_CONTROL_MASK) in smu7_enable_smc_voltage_controller()
254 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_Voltage_Cntl_Enable, NULL); in smu7_enable_smc_voltage_controller()
264 static bool smu7_voltage_control(const struct pp_hwmgr *hwmgr) in smu7_voltage_control() argument
267 (const struct smu7_hwmgr *)(hwmgr->backend); in smu7_voltage_control()
278 static int smu7_enable_voltage_control(struct pp_hwmgr *hwmgr) in smu7_enable_voltage_control() argument
281 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_voltage_control()
316 static int smu7_construct_voltage_tables(struct pp_hwmgr *hwmgr) in smu7_construct_voltage_tables() argument
318 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_construct_voltage_tables()
320 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_construct_voltage_tables()
325 result = atomctrl_get_voltage_table_v3(hwmgr, in smu7_construct_voltage_tables()
332 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_construct_voltage_tables()
335 else if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_construct_voltage_tables()
337 hwmgr->dyn_state.mvdd_dependency_on_mclk); in smu7_construct_voltage_tables()
345 result = atomctrl_get_voltage_table_v3(hwmgr, in smu7_construct_voltage_tables()
352 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_construct_voltage_tables()
355 else if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_construct_voltage_tables()
357 hwmgr->dyn_state.vddci_dependency_on_mclk); in smu7_construct_voltage_tables()
373 result = atomctrl_get_voltage_table_v3(hwmgr, in smu7_construct_voltage_tables()
380 if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_construct_voltage_tables()
382 hwmgr->dyn_state.vddc_dependency_on_mclk); in smu7_construct_voltage_tables()
383 else if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_construct_voltage_tables()
391 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_VDDC); in smu7_construct_voltage_tables()
398 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_VDDGFX); in smu7_construct_voltage_tables()
405 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_VDDCI); in smu7_construct_voltage_tables()
412 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_MVDD); in smu7_construct_voltage_tables()
429 struct pp_hwmgr *hwmgr) in smu7_program_static_screen_threshold_parameters() argument
431 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_program_static_screen_threshold_parameters()
434 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_static_screen_threshold_parameters()
438 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_static_screen_threshold_parameters()
451 static int smu7_enable_display_gap(struct pp_hwmgr *hwmgr) in smu7_enable_display_gap() argument
454 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_display_gap()
463 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_display_gap()
475 static int smu7_program_voting_clients(struct pp_hwmgr *hwmgr) in smu7_program_voting_clients() argument
477 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_program_voting_clients()
481 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_voting_clients()
483 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_voting_clients()
487 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_voting_clients()
493 static int smu7_clear_voting_clients(struct pp_hwmgr *hwmgr) in smu7_clear_voting_clients() argument
498 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_clear_voting_clients()
500 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_clear_voting_clients()
504 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_clear_voting_clients()
513 static int smu7_copy_and_switch_arb_sets(struct pp_hwmgr *hwmgr, in smu7_copy_and_switch_arb_sets() argument
523 mc_arb_dram_timing = cgs_read_register(hwmgr->device, mmMC_ARB_DRAM_TIMING); in smu7_copy_and_switch_arb_sets()
524 mc_arb_dram_timing2 = cgs_read_register(hwmgr->device, mmMC_ARB_DRAM_TIMING2); in smu7_copy_and_switch_arb_sets()
525 burst_time = PHM_READ_FIELD(hwmgr->device, MC_ARB_BURST_TIME, STATE0); in smu7_copy_and_switch_arb_sets()
528 mc_arb_dram_timing = cgs_read_register(hwmgr->device, mmMC_ARB_DRAM_TIMING_1); in smu7_copy_and_switch_arb_sets()
529 mc_arb_dram_timing2 = cgs_read_register(hwmgr->device, mmMC_ARB_DRAM_TIMING2_1); in smu7_copy_and_switch_arb_sets()
530 burst_time = PHM_READ_FIELD(hwmgr->device, MC_ARB_BURST_TIME, STATE1); in smu7_copy_and_switch_arb_sets()
538 cgs_write_register(hwmgr->device, mmMC_ARB_DRAM_TIMING, mc_arb_dram_timing); in smu7_copy_and_switch_arb_sets()
539 cgs_write_register(hwmgr->device, mmMC_ARB_DRAM_TIMING2, mc_arb_dram_timing2); in smu7_copy_and_switch_arb_sets()
540 PHM_WRITE_FIELD(hwmgr->device, MC_ARB_BURST_TIME, STATE0, burst_time); in smu7_copy_and_switch_arb_sets()
543 cgs_write_register(hwmgr->device, mmMC_ARB_DRAM_TIMING_1, mc_arb_dram_timing); in smu7_copy_and_switch_arb_sets()
544 cgs_write_register(hwmgr->device, mmMC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2); in smu7_copy_and_switch_arb_sets()
545 PHM_WRITE_FIELD(hwmgr->device, MC_ARB_BURST_TIME, STATE1, burst_time); in smu7_copy_and_switch_arb_sets()
551 mc_cg_config = cgs_read_register(hwmgr->device, mmMC_CG_CONFIG); in smu7_copy_and_switch_arb_sets()
553 cgs_write_register(hwmgr->device, mmMC_CG_CONFIG, mc_cg_config); in smu7_copy_and_switch_arb_sets()
554 PHM_WRITE_FIELD(hwmgr->device, MC_ARB_CG, CG_ARB_REQ, arb_dest); in smu7_copy_and_switch_arb_sets()
559 static int smu7_reset_to_default(struct pp_hwmgr *hwmgr) in smu7_reset_to_default() argument
561 return smum_send_msg_to_smc(hwmgr, PPSMC_MSG_ResetToDefaults, NULL); in smu7_reset_to_default()
571 static int smu7_initial_switch_from_arbf0_to_f1(struct pp_hwmgr *hwmgr) in smu7_initial_switch_from_arbf0_to_f1() argument
573 return smu7_copy_and_switch_arb_sets(hwmgr, in smu7_initial_switch_from_arbf0_to_f1()
577 static int smu7_force_switch_to_arbf0(struct pp_hwmgr *hwmgr) in smu7_force_switch_to_arbf0() argument
581 tmp = (cgs_read_ind_register(hwmgr->device, in smu7_force_switch_to_arbf0()
588 return smu7_copy_and_switch_arb_sets(hwmgr, in smu7_force_switch_to_arbf0()
592 static uint16_t smu7_override_pcie_speed(struct pp_hwmgr *hwmgr) in smu7_override_pcie_speed() argument
594 struct amdgpu_device *adev = (struct amdgpu_device *)(hwmgr->adev); in smu7_override_pcie_speed()
613 static uint16_t smu7_override_pcie_width(struct pp_hwmgr *hwmgr) in smu7_override_pcie_width() argument
615 struct amdgpu_device *adev = (struct amdgpu_device *)(hwmgr->adev); in smu7_override_pcie_width()
634 static int smu7_setup_default_pcie_table(struct pp_hwmgr *hwmgr) in smu7_setup_default_pcie_table() argument
636 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_default_pcie_table()
639 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_setup_default_pcie_table()
661 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_LINK); in smu7_setup_default_pcie_table()
680 smum_update_smc_table(hwmgr, SMU_BIF_TABLE); in smu7_setup_default_pcie_table()
717 if (hwmgr->chip_family == AMDGPU_FAMILY_CI) { in smu7_setup_default_pcie_table()
734 smu7_override_pcie_speed(hwmgr), smu7_override_pcie_width(hwmgr)); in smu7_setup_default_pcie_table()
739 static int smu7_reset_dpm_tables(struct pp_hwmgr *hwmgr) in smu7_reset_dpm_tables() argument
741 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_reset_dpm_tables()
747 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
752 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
757 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
762 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
767 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
781 static int smu7_setup_dpm_tables_v0(struct pp_hwmgr *hwmgr) in smu7_setup_dpm_tables_v0() argument
783 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_dpm_tables_v0()
785 hwmgr->dyn_state.vddc_dependency_on_sclk; in smu7_setup_dpm_tables_v0()
787 hwmgr->dyn_state.vddc_dependency_on_mclk; in smu7_setup_dpm_tables_v0()
789 hwmgr->dyn_state.cac_leakage_table; in smu7_setup_dpm_tables_v0()
839 allowed_vdd_mclk_table = hwmgr->dyn_state.vddci_dependency_on_mclk; in smu7_setup_dpm_tables_v0()
850 allowed_vdd_mclk_table = hwmgr->dyn_state.mvdd_dependency_on_mclk; in smu7_setup_dpm_tables_v0()
867 static int smu7_setup_dpm_tables_v1(struct pp_hwmgr *hwmgr) in smu7_setup_dpm_tables_v1() argument
869 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_dpm_tables_v1()
871 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_setup_dpm_tables_v1()
911 if (hwmgr->platform_descriptor.overdriveLimit.engineClock == 0) in smu7_setup_dpm_tables_v1()
912 hwmgr->platform_descriptor.overdriveLimit.engineClock = dep_sclk_table->entries[i-1].clk; in smu7_setup_dpm_tables_v1()
927 if (hwmgr->platform_descriptor.overdriveLimit.memoryClock == 0) in smu7_setup_dpm_tables_v1()
928 hwmgr->platform_descriptor.overdriveLimit.memoryClock = dep_mclk_table->entries[i-1].clk; in smu7_setup_dpm_tables_v1()
932 static int smu7_odn_initial_default_setting(struct pp_hwmgr *hwmgr) in smu7_odn_initial_default_setting() argument
934 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_odn_initial_default_setting()
937 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_odn_initial_default_setting()
977 static void smu7_setup_voltage_range_from_vbios(struct pp_hwmgr *hwmgr) in smu7_setup_voltage_range_from_vbios() argument
979 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_voltage_range_from_vbios()
982 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_setup_voltage_range_from_vbios()
991 atomctrl_get_voltage_range(hwmgr, &max_vddc, &min_vddc); in smu7_setup_voltage_range_from_vbios()
1005 static void smu7_check_dpm_table_updated(struct pp_hwmgr *hwmgr) in smu7_check_dpm_table_updated() argument
1007 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_dpm_table_updated()
1010 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_check_dpm_table_updated()
1059 static int smu7_setup_default_dpm_tables(struct pp_hwmgr *hwmgr) in smu7_setup_default_dpm_tables() argument
1061 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_default_dpm_tables()
1063 smu7_reset_dpm_tables(hwmgr); in smu7_setup_default_dpm_tables()
1065 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_setup_default_dpm_tables()
1066 smu7_setup_dpm_tables_v1(hwmgr); in smu7_setup_default_dpm_tables()
1067 else if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_setup_default_dpm_tables()
1068 smu7_setup_dpm_tables_v0(hwmgr); in smu7_setup_default_dpm_tables()
1070 smu7_setup_default_pcie_table(hwmgr); in smu7_setup_default_dpm_tables()
1077 if (hwmgr->od_enabled) { in smu7_setup_default_dpm_tables()
1079 smu7_check_dpm_table_updated(hwmgr); in smu7_setup_default_dpm_tables()
1081 smu7_setup_voltage_range_from_vbios(hwmgr); in smu7_setup_default_dpm_tables()
1082 smu7_odn_initial_default_setting(hwmgr); in smu7_setup_default_dpm_tables()
1088 static int smu7_enable_vrhot_gpio_interrupt(struct pp_hwmgr *hwmgr) in smu7_enable_vrhot_gpio_interrupt() argument
1091 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_enable_vrhot_gpio_interrupt()
1093 return smum_send_msg_to_smc(hwmgr, in smu7_enable_vrhot_gpio_interrupt()
1100 static int smu7_enable_sclk_control(struct pp_hwmgr *hwmgr) in smu7_enable_sclk_control() argument
1102 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, SCLK_PWRMGT_CNTL, in smu7_enable_sclk_control()
1107 static int smu7_enable_ulv(struct pp_hwmgr *hwmgr) in smu7_enable_ulv() argument
1109 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_enable_ulv()
1112 return smum_send_msg_to_smc(hwmgr, PPSMC_MSG_EnableULV, NULL); in smu7_enable_ulv()
1117 static int smu7_disable_ulv(struct pp_hwmgr *hwmgr) in smu7_disable_ulv() argument
1119 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_ulv()
1122 return smum_send_msg_to_smc(hwmgr, PPSMC_MSG_DisableULV, NULL); in smu7_disable_ulv()
1127 static int smu7_enable_deep_sleep_master_switch(struct pp_hwmgr *hwmgr) in smu7_enable_deep_sleep_master_switch() argument
1129 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_enable_deep_sleep_master_switch()
1131 if (smum_send_msg_to_smc(hwmgr, PPSMC_MSG_MASTER_DeepSleep_ON, NULL)) in smu7_enable_deep_sleep_master_switch()
1136 if (smum_send_msg_to_smc(hwmgr, in smu7_enable_deep_sleep_master_switch()
1148 static int smu7_disable_deep_sleep_master_switch(struct pp_hwmgr *hwmgr) in smu7_disable_deep_sleep_master_switch() argument
1150 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_disable_deep_sleep_master_switch()
1152 if (smum_send_msg_to_smc(hwmgr, in smu7_disable_deep_sleep_master_switch()
1164 static int smu7_disable_sclk_vce_handshake(struct pp_hwmgr *hwmgr) in smu7_disable_sclk_vce_handshake() argument
1166 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_sclk_vce_handshake()
1169 + smum_get_offsetof(hwmgr, in smu7_disable_sclk_vce_handshake()
1172 soft_register_value = cgs_read_ind_register(hwmgr->device, in smu7_disable_sclk_vce_handshake()
1175 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_sclk_vce_handshake()
1180 static int smu7_disable_handshake_uvd(struct pp_hwmgr *hwmgr) in smu7_disable_handshake_uvd() argument
1182 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_handshake_uvd()
1185 + smum_get_offsetof(hwmgr, in smu7_disable_handshake_uvd()
1188 soft_register_value = cgs_read_ind_register(hwmgr->device, in smu7_disable_handshake_uvd()
1190 soft_register_value |= smum_get_mac_definition(hwmgr, in smu7_disable_handshake_uvd()
1192 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_handshake_uvd()
1197 static int smu7_enable_sclk_mclk_dpm(struct pp_hwmgr *hwmgr) in smu7_enable_sclk_mclk_dpm() argument
1199 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_enable_sclk_mclk_dpm()
1203 if (hwmgr->chip_id >= CHIP_POLARIS10 && in smu7_enable_sclk_mclk_dpm()
1204 hwmgr->chip_id <= CHIP_VEGAM) in smu7_enable_sclk_mclk_dpm()
1205 smu7_disable_sclk_vce_handshake(hwmgr); in smu7_enable_sclk_mclk_dpm()
1208 (0 == smum_send_msg_to_smc(hwmgr, PPSMC_MSG_DPM_Enable, NULL)), in smu7_enable_sclk_mclk_dpm()
1215 if (!(hwmgr->feature_mask & PP_UVD_HANDSHAKE_MASK)) in smu7_enable_sclk_mclk_dpm()
1216 smu7_disable_handshake_uvd(hwmgr); in smu7_enable_sclk_mclk_dpm()
1219 (0 == smum_send_msg_to_smc(hwmgr, in smu7_enable_sclk_mclk_dpm()
1225 if ((hwmgr->chip_family == AMDGPU_FAMILY_CI) || in smu7_enable_sclk_mclk_dpm()
1226 (hwmgr->chip_id == CHIP_POLARIS10) || in smu7_enable_sclk_mclk_dpm()
1227 (hwmgr->chip_id == CHIP_POLARIS11) || in smu7_enable_sclk_mclk_dpm()
1228 (hwmgr->chip_id == CHIP_POLARIS12) || in smu7_enable_sclk_mclk_dpm()
1229 (hwmgr->chip_id == CHIP_TONGA) || in smu7_enable_sclk_mclk_dpm()
1230 (hwmgr->chip_id == CHIP_TOPAZ)) in smu7_enable_sclk_mclk_dpm()
1231 PHM_WRITE_FIELD(hwmgr->device, MC_SEQ_CNTL_3, CAC_EN, 0x1); in smu7_enable_sclk_mclk_dpm()
1234 if (hwmgr->chip_family == AMDGPU_FAMILY_CI) { in smu7_enable_sclk_mclk_dpm()
1235 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d30, 0x5); in smu7_enable_sclk_mclk_dpm()
1236 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d3c, 0x5); in smu7_enable_sclk_mclk_dpm()
1237 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d80, 0x100005); in smu7_enable_sclk_mclk_dpm()
1239 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d30, 0x400005); in smu7_enable_sclk_mclk_dpm()
1240 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d3c, 0x400005); in smu7_enable_sclk_mclk_dpm()
1241 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d80, 0x500005); in smu7_enable_sclk_mclk_dpm()
1243 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC0_CNTL, 0x5); in smu7_enable_sclk_mclk_dpm()
1244 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC1_CNTL, 0x5); in smu7_enable_sclk_mclk_dpm()
1245 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_CPL_CNTL, 0x100005); in smu7_enable_sclk_mclk_dpm()
1247 if (hwmgr->chip_id == CHIP_VEGAM) { in smu7_enable_sclk_mclk_dpm()
1248 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC0_CNTL, 0x400009); in smu7_enable_sclk_mclk_dpm()
1249 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC1_CNTL, 0x400009); in smu7_enable_sclk_mclk_dpm()
1251 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC0_CNTL, 0x400005); in smu7_enable_sclk_mclk_dpm()
1252 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC1_CNTL, 0x400005); in smu7_enable_sclk_mclk_dpm()
1254 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_CPL_CNTL, 0x500005); in smu7_enable_sclk_mclk_dpm()
1261 static int smu7_start_dpm(struct pp_hwmgr *hwmgr) in smu7_start_dpm() argument
1263 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_start_dpm()
1267 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, GENERAL_PWRMGT, in smu7_start_dpm()
1272 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, SCLK_PWRMGT_CNTL, in smu7_start_dpm()
1277 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_start_dpm()
1279 smum_get_offsetof(hwmgr, SMU_SoftRegisters, in smu7_start_dpm()
1281 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__PCIE, in smu7_start_dpm()
1284 if (hwmgr->chip_family == AMDGPU_FAMILY_CI) in smu7_start_dpm()
1285 cgs_write_register(hwmgr->device, 0x1488, in smu7_start_dpm()
1286 (cgs_read_register(hwmgr->device, 0x1488) & ~0x1)); in smu7_start_dpm()
1288 if (smu7_enable_sclk_mclk_dpm(hwmgr)) { in smu7_start_dpm()
1296 (0 == smum_send_msg_to_smc(hwmgr, in smu7_start_dpm()
1303 (0 == smum_send_msg_to_smc(hwmgr, in smu7_start_dpm()
1310 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_start_dpm()
1312 PP_ASSERT_WITH_CODE((0 == smum_send_msg_to_smc(hwmgr, in smu7_start_dpm()
1322 static int smu7_disable_sclk_mclk_dpm(struct pp_hwmgr *hwmgr) in smu7_disable_sclk_mclk_dpm() argument
1324 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_sclk_mclk_dpm()
1328 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_disable_sclk_mclk_dpm()
1331 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_DPM_Disable, NULL); in smu7_disable_sclk_mclk_dpm()
1336 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_disable_sclk_mclk_dpm()
1339 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_MCLKDPM_Disable, NULL); in smu7_disable_sclk_mclk_dpm()
1345 static int smu7_stop_dpm(struct pp_hwmgr *hwmgr) in smu7_stop_dpm() argument
1347 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_stop_dpm()
1350 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, GENERAL_PWRMGT, in smu7_stop_dpm()
1353 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, SCLK_PWRMGT_CNTL, in smu7_stop_dpm()
1359 (smum_send_msg_to_smc(hwmgr, in smu7_stop_dpm()
1366 smu7_disable_sclk_mclk_dpm(hwmgr); in smu7_stop_dpm()
1368 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_stop_dpm()
1372 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_Voltage_Cntl_Disable, NULL); in smu7_stop_dpm()
1377 static void smu7_set_dpm_event_sources(struct pp_hwmgr *hwmgr, uint32_t sources) in smu7_set_dpm_event_sources() argument
1406 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, CG_THERMAL_CTRL, in smu7_set_dpm_event_sources()
1408 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, GENERAL_PWRMGT, in smu7_set_dpm_event_sources()
1410 !phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_set_dpm_event_sources()
1413 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, GENERAL_PWRMGT, in smu7_set_dpm_event_sources()
1417 static int smu7_enable_auto_throttle_source(struct pp_hwmgr *hwmgr, in smu7_enable_auto_throttle_source() argument
1420 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_enable_auto_throttle_source()
1424 smu7_set_dpm_event_sources(hwmgr, data->active_auto_throttle_sources); in smu7_enable_auto_throttle_source()
1429 static int smu7_enable_thermal_auto_throttle(struct pp_hwmgr *hwmgr) in smu7_enable_thermal_auto_throttle() argument
1431 return smu7_enable_auto_throttle_source(hwmgr, PHM_AutoThrottleSource_Thermal); in smu7_enable_thermal_auto_throttle()
1434 static int smu7_disable_auto_throttle_source(struct pp_hwmgr *hwmgr, in smu7_disable_auto_throttle_source() argument
1437 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_auto_throttle_source()
1441 smu7_set_dpm_event_sources(hwmgr, data->active_auto_throttle_sources); in smu7_disable_auto_throttle_source()
1446 static int smu7_disable_thermal_auto_throttle(struct pp_hwmgr *hwmgr) in smu7_disable_thermal_auto_throttle() argument
1448 return smu7_disable_auto_throttle_source(hwmgr, PHM_AutoThrottleSource_Thermal); in smu7_disable_thermal_auto_throttle()
1451 static int smu7_pcie_performance_request(struct pp_hwmgr *hwmgr) in smu7_pcie_performance_request() argument
1453 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_pcie_performance_request()
1459 static int smu7_program_edc_didt_registers(struct pp_hwmgr *hwmgr, in smu7_program_edc_didt_registers() argument
1467 cgs_write_ind_register(hwmgr->device, in smu7_program_edc_didt_registers()
1477 static int smu7_populate_edc_leakage_registers(struct pp_hwmgr *hwmgr) in smu7_populate_edc_leakage_registers() argument
1479 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_populate_edc_leakage_registers()
1485 ret = smu7_program_edc_didt_registers(hwmgr, in smu7_populate_edc_leakage_registers()
1491 ret = smum_send_msg_to_smc(hwmgr, in smu7_populate_edc_leakage_registers()
1495 ret = smum_send_msg_to_smc(hwmgr, in smu7_populate_edc_leakage_registers()
1503 static void smu7_populate_umdpstate_clocks(struct pp_hwmgr *hwmgr) in smu7_populate_umdpstate_clocks() argument
1505 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_populate_umdpstate_clocks()
1511 hwmgr->pstate_mclk = golden_dpm_table->mclk_table.dpm_levels[0].value; in smu7_populate_umdpstate_clocks()
1515hwmgr->pstate_mclk = golden_dpm_table->mclk_table.dpm_levels[golden_dpm_table->mclk_table.count - … in smu7_populate_umdpstate_clocks()
1518 tmp_sclk = hwmgr->pstate_mclk * percentage / 100; in smu7_populate_umdpstate_clocks()
1520 if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_populate_umdpstate_clocks()
1522 hwmgr->dyn_state.vddc_dependency_on_sclk; in smu7_populate_umdpstate_clocks()
1526 hwmgr->pstate_sclk = vddc_dependency_on_sclk->entries[count].clk; in smu7_populate_umdpstate_clocks()
1531 hwmgr->pstate_sclk = vddc_dependency_on_sclk->entries[0].clk; in smu7_populate_umdpstate_clocks()
1533 hwmgr->pstate_sclk_peak = in smu7_populate_umdpstate_clocks()
1535 } else if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_populate_umdpstate_clocks()
1537 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_populate_umdpstate_clocks()
1543 hwmgr->pstate_sclk = vdd_dep_on_sclk->entries[count].clk; in smu7_populate_umdpstate_clocks()
1548 hwmgr->pstate_sclk = vdd_dep_on_sclk->entries[0].clk; in smu7_populate_umdpstate_clocks()
1550 hwmgr->pstate_sclk_peak = in smu7_populate_umdpstate_clocks()
1554 hwmgr->pstate_mclk_peak = in smu7_populate_umdpstate_clocks()
1558 hwmgr->pstate_sclk /= 100; in smu7_populate_umdpstate_clocks()
1559 hwmgr->pstate_mclk /= 100; in smu7_populate_umdpstate_clocks()
1560 hwmgr->pstate_sclk_peak /= 100; in smu7_populate_umdpstate_clocks()
1561 hwmgr->pstate_mclk_peak /= 100; in smu7_populate_umdpstate_clocks()
1564 static int smu7_enable_dpm_tasks(struct pp_hwmgr *hwmgr) in smu7_enable_dpm_tasks() argument
1569 if (smu7_voltage_control(hwmgr)) { in smu7_enable_dpm_tasks()
1570 tmp_result = smu7_enable_voltage_control(hwmgr); in smu7_enable_dpm_tasks()
1575 tmp_result = smu7_construct_voltage_tables(hwmgr); in smu7_enable_dpm_tasks()
1580 smum_initialize_mc_reg_table(hwmgr); in smu7_enable_dpm_tasks()
1582 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_enable_dpm_tasks()
1584 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_dpm_tasks()
1587 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_enable_dpm_tasks()
1589 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_dpm_tasks()
1592 tmp_result = smu7_program_static_screen_threshold_parameters(hwmgr); in smu7_enable_dpm_tasks()
1597 tmp_result = smu7_enable_display_gap(hwmgr); in smu7_enable_dpm_tasks()
1601 tmp_result = smu7_program_voting_clients(hwmgr); in smu7_enable_dpm_tasks()
1605 tmp_result = smum_process_firmware_header(hwmgr); in smu7_enable_dpm_tasks()
1609 if (hwmgr->chip_id != CHIP_VEGAM) { in smu7_enable_dpm_tasks()
1610 tmp_result = smu7_initial_switch_from_arbf0_to_f1(hwmgr); in smu7_enable_dpm_tasks()
1616 result = smu7_setup_default_dpm_tables(hwmgr); in smu7_enable_dpm_tasks()
1620 tmp_result = smum_init_smc_table(hwmgr); in smu7_enable_dpm_tasks()
1624 tmp_result = smu7_enable_vrhot_gpio_interrupt(hwmgr); in smu7_enable_dpm_tasks()
1628 if (hwmgr->chip_id >= CHIP_POLARIS10 && in smu7_enable_dpm_tasks()
1629 hwmgr->chip_id <= CHIP_VEGAM) { in smu7_enable_dpm_tasks()
1630 tmp_result = smu7_notify_has_display(hwmgr); in smu7_enable_dpm_tasks()
1634 smum_send_msg_to_smc(hwmgr, (PPSMC_Msg)PPSMC_NoDisplay, NULL); in smu7_enable_dpm_tasks()
1637 if (hwmgr->chip_id >= CHIP_POLARIS10 && in smu7_enable_dpm_tasks()
1638 hwmgr->chip_id <= CHIP_VEGAM) { in smu7_enable_dpm_tasks()
1639 tmp_result = smu7_populate_edc_leakage_registers(hwmgr); in smu7_enable_dpm_tasks()
1644 tmp_result = smu7_enable_sclk_control(hwmgr); in smu7_enable_dpm_tasks()
1648 tmp_result = smu7_enable_smc_voltage_controller(hwmgr); in smu7_enable_dpm_tasks()
1652 tmp_result = smu7_enable_ulv(hwmgr); in smu7_enable_dpm_tasks()
1656 tmp_result = smu7_enable_deep_sleep_master_switch(hwmgr); in smu7_enable_dpm_tasks()
1660 tmp_result = smu7_enable_didt_config(hwmgr); in smu7_enable_dpm_tasks()
1664 tmp_result = smu7_start_dpm(hwmgr); in smu7_enable_dpm_tasks()
1668 tmp_result = smu7_enable_smc_cac(hwmgr); in smu7_enable_dpm_tasks()
1672 tmp_result = smu7_enable_power_containment(hwmgr); in smu7_enable_dpm_tasks()
1676 tmp_result = smu7_power_control_set_level(hwmgr); in smu7_enable_dpm_tasks()
1680 tmp_result = smu7_enable_thermal_auto_throttle(hwmgr); in smu7_enable_dpm_tasks()
1684 tmp_result = smu7_pcie_performance_request(hwmgr); in smu7_enable_dpm_tasks()
1688 smu7_populate_umdpstate_clocks(hwmgr); in smu7_enable_dpm_tasks()
1693 static int smu7_avfs_control(struct pp_hwmgr *hwmgr, bool enable) in smu7_avfs_control() argument
1695 if (!hwmgr->avfs_supported) in smu7_avfs_control()
1699 if (!PHM_READ_VFPF_INDIRECT_FIELD(hwmgr->device, in smu7_avfs_control()
1702 hwmgr, PPSMC_MSG_EnableAvfs, NULL), in smu7_avfs_control()
1706 } else if (PHM_READ_VFPF_INDIRECT_FIELD(hwmgr->device, in smu7_avfs_control()
1709 hwmgr, PPSMC_MSG_DisableAvfs, NULL), in smu7_avfs_control()
1717 static int smu7_update_avfs(struct pp_hwmgr *hwmgr) in smu7_update_avfs() argument
1719 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_update_avfs()
1721 if (!hwmgr->avfs_supported) in smu7_update_avfs()
1725 smu7_avfs_control(hwmgr, false); in smu7_update_avfs()
1727 smu7_avfs_control(hwmgr, false); in smu7_update_avfs()
1728 smu7_avfs_control(hwmgr, true); in smu7_update_avfs()
1730 smu7_avfs_control(hwmgr, true); in smu7_update_avfs()
1736 static int smu7_disable_dpm_tasks(struct pp_hwmgr *hwmgr) in smu7_disable_dpm_tasks() argument
1740 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_disable_dpm_tasks()
1742 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_dpm_tasks()
1745 tmp_result = smu7_disable_power_containment(hwmgr); in smu7_disable_dpm_tasks()
1749 tmp_result = smu7_disable_smc_cac(hwmgr); in smu7_disable_dpm_tasks()
1753 tmp_result = smu7_disable_didt_config(hwmgr); in smu7_disable_dpm_tasks()
1757 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_dpm_tasks()
1759 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_dpm_tasks()
1762 tmp_result = smu7_disable_thermal_auto_throttle(hwmgr); in smu7_disable_dpm_tasks()
1766 tmp_result = smu7_avfs_control(hwmgr, false); in smu7_disable_dpm_tasks()
1770 tmp_result = smu7_stop_dpm(hwmgr); in smu7_disable_dpm_tasks()
1774 tmp_result = smu7_disable_deep_sleep_master_switch(hwmgr); in smu7_disable_dpm_tasks()
1778 tmp_result = smu7_disable_ulv(hwmgr); in smu7_disable_dpm_tasks()
1782 tmp_result = smu7_clear_voting_clients(hwmgr); in smu7_disable_dpm_tasks()
1786 tmp_result = smu7_reset_to_default(hwmgr); in smu7_disable_dpm_tasks()
1790 tmp_result = smum_stop_smc(hwmgr); in smu7_disable_dpm_tasks()
1794 tmp_result = smu7_force_switch_to_arbf0(hwmgr); in smu7_disable_dpm_tasks()
1801 static void smu7_init_dpm_defaults(struct pp_hwmgr *hwmgr) in smu7_init_dpm_defaults() argument
1803 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_init_dpm_defaults()
1805 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_init_dpm_defaults()
1806 struct amdgpu_device *adev = hwmgr->adev; in smu7_init_dpm_defaults()
1824 data->mclk_dpm_key_disabled = hwmgr->feature_mask & PP_MCLK_DPM_MASK ? false : true; in smu7_init_dpm_defaults()
1825 data->sclk_dpm_key_disabled = hwmgr->feature_mask & PP_SCLK_DPM_MASK ? false : true; in smu7_init_dpm_defaults()
1826 data->pcie_dpm_key_disabled = !(hwmgr->feature_mask & PP_PCIE_DPM_MASK); in smu7_init_dpm_defaults()
1834 data->ulv_supported = hwmgr->feature_mask & PP_ULV_MASK ? true : false; in smu7_init_dpm_defaults()
1840 if (hwmgr->chip_id >= CHIP_POLARIS10) { in smu7_init_dpm_defaults()
1859 hwmgr->workload_mask = 1 << hwmgr->workload_prority[PP_SMC_POWER_PROFILE_FULLSCREEN3D]; in smu7_init_dpm_defaults()
1860 hwmgr->power_profile_mode = PP_SMC_POWER_PROFILE_FULLSCREEN3D; in smu7_init_dpm_defaults()
1861 hwmgr->default_power_profile_mode = PP_SMC_POWER_PROFILE_FULLSCREEN3D; in smu7_init_dpm_defaults()
1863 if (hwmgr->chip_id == CHIP_HAWAII) { in smu7_init_dpm_defaults()
1874 if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1877 else if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1881 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1883 if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1889 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1891 if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1894 else if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1900 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1903 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1905 if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1908 else if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1914 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1918 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1922 if ((hwmgr->chip_id == CHIP_POLARIS12) || in smu7_init_dpm_defaults()
1928 atomctrl_get_svi2_info(hwmgr, VOLTAGE_TYPE_VDDC, &tmp1, &tmp2, in smu7_init_dpm_defaults()
1933 } else if (hwmgr->chip_family == AMDGPU_FAMILY_CI) { in smu7_init_dpm_defaults()
1937 if ((hwmgr->pp_table_version != PP_TABLE_V0) && (hwmgr->feature_mask & PP_CLOCK_STRETCH_MASK) in smu7_init_dpm_defaults()
1939 phm_cap_set(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1953 phm_cap_set(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1956 phm_cap_set(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1960 if (((adev->asic_type == CHIP_POLARIS10) && hwmgr->is_kicker) || in smu7_init_dpm_defaults()
1961 ((adev->asic_type == CHIP_POLARIS11) && hwmgr->is_kicker) || in smu7_init_dpm_defaults()
1966 if (!atomctrl_is_asic_internal_ss_supported(hwmgr)) { in smu7_init_dpm_defaults()
1967 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1969 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1975 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1980 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1985 static int smu7_calculate_ro_range(struct pp_hwmgr *hwmgr) in smu7_calculate_ro_range() argument
1987 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_calculate_ro_range()
1988 struct amdgpu_device *adev = hwmgr->adev; in smu7_calculate_ro_range()
1991 atomctrl_read_efuse(hwmgr, STRAP_EVV_REVISION_LSB, STRAP_EVV_REVISION_MSB, in smu7_calculate_ro_range()
1994 atomctrl_read_efuse(hwmgr, 568, 579, &asicrev1); in smu7_calculate_ro_range()
2004 } else if (hwmgr->chip_id == CHIP_POLARIS10) { in smu7_calculate_ro_range()
2045 static int smu7_get_evv_voltages(struct pp_hwmgr *hwmgr) in smu7_get_evv_voltages() argument
2047 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_evv_voltages()
2054 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_evv_voltages()
2057 if (hwmgr->chip_id == CHIP_POLARIS10 || in smu7_get_evv_voltages()
2058 hwmgr->chip_id == CHIP_POLARIS11 || in smu7_get_evv_voltages()
2059 hwmgr->chip_id == CHIP_POLARIS12) in smu7_get_evv_voltages()
2060 smu7_calculate_ro_range(hwmgr); in smu7_get_evv_voltages()
2066 if ((hwmgr->pp_table_version == PP_TABLE_V1) in smu7_get_evv_voltages()
2067 && !phm_get_sclk_for_voltage_evv(hwmgr, in smu7_get_evv_voltages()
2069 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_get_evv_voltages()
2082 (hwmgr, VOLTAGE_TYPE_VDDGFX, sclk, in smu7_get_evv_voltages()
2098 if ((hwmgr->pp_table_version == PP_TABLE_V0) in smu7_get_evv_voltages()
2099 || !phm_get_sclk_for_voltage_evv(hwmgr, in smu7_get_evv_voltages()
2101 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_get_evv_voltages()
2116 if (phm_get_voltage_evv_on_sclk(hwmgr, in smu7_get_evv_voltages()
2146 static void smu7_patch_ppt_v1_with_vdd_leakage(struct pp_hwmgr *hwmgr, in smu7_patch_ppt_v1_with_vdd_leakage() argument
2173 static int smu7_patch_lookup_table_with_leakage(struct pp_hwmgr *hwmgr, in smu7_patch_lookup_table_with_leakage() argument
2180 smu7_patch_ppt_v1_with_vdd_leakage(hwmgr, in smu7_patch_lookup_table_with_leakage()
2187 struct pp_hwmgr *hwmgr, struct smu7_leakage_voltage *leakage_table, in smu7_patch_clock_voltage_limits_with_vddc_leakage() argument
2191 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_patch_clock_voltage_limits_with_vddc_leakage()
2192 smu7_patch_ppt_v1_with_vdd_leakage(hwmgr, (uint16_t *)vddc, leakage_table); in smu7_patch_clock_voltage_limits_with_vddc_leakage()
2193 hwmgr->dyn_state.max_clock_voltage_on_dc.vddc = in smu7_patch_clock_voltage_limits_with_vddc_leakage()
2199 struct pp_hwmgr *hwmgr) in smu7_patch_voltage_dependency_tables_with_lookup_table() argument
2203 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_voltage_dependency_tables_with_lookup_table()
2205 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_patch_voltage_dependency_tables_with_lookup_table()
2244 static int phm_add_voltage(struct pp_hwmgr *hwmgr, in phm_add_voltage() argument
2255 i = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_VDDGFX); in phm_add_voltage()
2281 static int smu7_calc_voltage_dependency_tables(struct pp_hwmgr *hwmgr) in smu7_calc_voltage_dependency_tables() argument
2285 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_calc_voltage_dependency_tables()
2286 struct phm_ppt_v1_information *pptable_info = (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_calc_voltage_dependency_tables()
2304 phm_add_voltage(hwmgr, pptable_info->vddc_lookup_table, &v_record); in smu7_calc_voltage_dependency_tables()
2317 phm_add_voltage(hwmgr, pptable_info->vddgfx_lookup_table, &v_record); in smu7_calc_voltage_dependency_tables()
2323 static int smu7_calc_mm_voltage_dependency_table(struct pp_hwmgr *hwmgr) in smu7_calc_mm_voltage_dependency_table() argument
2327 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_calc_mm_voltage_dependency_table()
2328 struct phm_ppt_v1_information *pptable_info = (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_calc_mm_voltage_dependency_table()
2343 phm_add_voltage(hwmgr, pptable_info->vddgfx_lookup_table, &v_record); in smu7_calc_mm_voltage_dependency_table()
2349 static int smu7_sort_lookup_table(struct pp_hwmgr *hwmgr, in smu7_sort_lookup_table() argument
2372 static int smu7_complete_dependency_tables(struct pp_hwmgr *hwmgr) in smu7_complete_dependency_tables() argument
2376 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_complete_dependency_tables()
2378 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_complete_dependency_tables()
2381 tmp_result = smu7_patch_lookup_table_with_leakage(hwmgr, in smu7_complete_dependency_tables()
2386 smu7_patch_ppt_v1_with_vdd_leakage(hwmgr, in smu7_complete_dependency_tables()
2390 tmp_result = smu7_patch_lookup_table_with_leakage(hwmgr, in smu7_complete_dependency_tables()
2395 tmp_result = smu7_patch_clock_voltage_limits_with_vddc_leakage(hwmgr, in smu7_complete_dependency_tables()
2401 tmp_result = smu7_patch_voltage_dependency_tables_with_lookup_table(hwmgr); in smu7_complete_dependency_tables()
2405 tmp_result = smu7_calc_voltage_dependency_tables(hwmgr); in smu7_complete_dependency_tables()
2409 tmp_result = smu7_calc_mm_voltage_dependency_table(hwmgr); in smu7_complete_dependency_tables()
2413 tmp_result = smu7_sort_lookup_table(hwmgr, table_info->vddgfx_lookup_table); in smu7_complete_dependency_tables()
2417 tmp_result = smu7_sort_lookup_table(hwmgr, table_info->vddc_lookup_table); in smu7_complete_dependency_tables()
2424 static int smu7_find_highest_vddc(struct pp_hwmgr *hwmgr) in smu7_find_highest_vddc() argument
2427 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_find_highest_vddc()
2446 static int smu7_set_private_data_based_on_pptable_v1(struct pp_hwmgr *hwmgr) in smu7_set_private_data_based_on_pptable_v1() argument
2449 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_set_private_data_based_on_pptable_v1()
2474 if (hwmgr->chip_id >= CHIP_POLARIS10 && hwmgr->chip_id <= CHIP_VEGAM) in smu7_set_private_data_based_on_pptable_v1()
2476 smu7_find_highest_vddc(hwmgr); in smu7_set_private_data_based_on_pptable_v1()
2483 hwmgr->dyn_state.max_clock_voltage_on_ac.sclk = table_info->max_clock_voltage_on_ac.sclk; in smu7_set_private_data_based_on_pptable_v1()
2484 hwmgr->dyn_state.max_clock_voltage_on_ac.mclk = table_info->max_clock_voltage_on_ac.mclk; in smu7_set_private_data_based_on_pptable_v1()
2485 hwmgr->dyn_state.max_clock_voltage_on_ac.vddc = table_info->max_clock_voltage_on_ac.vddc; in smu7_set_private_data_based_on_pptable_v1()
2486 hwmgr->dyn_state.max_clock_voltage_on_ac.vddci = table_info->max_clock_voltage_on_ac.vddci; in smu7_set_private_data_based_on_pptable_v1()
2491 static int smu7_patch_voltage_workaround(struct pp_hwmgr *hwmgr) in smu7_patch_voltage_workaround() argument
2494 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_patch_voltage_workaround()
2499 struct amdgpu_device *adev = hwmgr->adev; in smu7_patch_voltage_workaround()
2516 PHM_WRITE_VFPF_INDIRECT_FIELD(hwmgr->device, in smu7_patch_voltage_workaround()
2535 static int smu7_thermal_parameter_init(struct pp_hwmgr *hwmgr) in smu7_thermal_parameter_init() argument
2540 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_thermal_parameter_init()
2543 if (atomctrl_get_pp_assign_pin(hwmgr, VDDC_PCC_GPIO_PINID, &gpio_pin_assignment)) { in smu7_thermal_parameter_init()
2544 temp_reg = cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCNB_PWRMGT_CNTL); in smu7_thermal_parameter_init()
2564 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCNB_PWRMGT_CNTL, temp_reg); in smu7_thermal_parameter_init()
2571 hwmgr->thermal_controller.advanceFanControlParameters.ucFanControlMode) { in smu7_thermal_parameter_init()
2572 hwmgr->thermal_controller.advanceFanControlParameters.usFanPWMMinLimit = in smu7_thermal_parameter_init()
2573 (uint16_t)hwmgr->thermal_controller.advanceFanControlParameters.ucMinimumPWMLimit; in smu7_thermal_parameter_init()
2575 hwmgr->thermal_controller.advanceFanControlParameters.usFanPWMMaxLimit = in smu7_thermal_parameter_init()
2576 (uint16_t)hwmgr->thermal_controller.advanceFanControlParameters.usDefaultMaxFanPWM; in smu7_thermal_parameter_init()
2578 hwmgr->thermal_controller.advanceFanControlParameters.usFanPWMStep = 1; in smu7_thermal_parameter_init()
2580 hwmgr->thermal_controller.advanceFanControlParameters.usFanRPMMaxLimit = 100; in smu7_thermal_parameter_init()
2582 hwmgr->thermal_controller.advanceFanControlParameters.usFanRPMMinLimit = in smu7_thermal_parameter_init()
2583 (uint16_t)hwmgr->thermal_controller.advanceFanControlParameters.ucMinimumPWMLimit; in smu7_thermal_parameter_init()
2585 hwmgr->thermal_controller.advanceFanControlParameters.usFanRPMStep = 1; in smu7_thermal_parameter_init()
2594 hwmgr->thermal_controller.advanceFanControlParameters.usMaxFanPWM = in smu7_thermal_parameter_init()
2595 hwmgr->thermal_controller.advanceFanControlParameters.usDefaultMaxFanPWM; in smu7_thermal_parameter_init()
2597 hwmgr->thermal_controller.advanceFanControlParameters.usMaxFanRPM = in smu7_thermal_parameter_init()
2598 hwmgr->thermal_controller.advanceFanControlParameters.usDefaultMaxFanRPM; in smu7_thermal_parameter_init()
2600 hwmgr->dyn_state.cac_dtp_table->usOperatingTempMinLimit = in smu7_thermal_parameter_init()
2603 hwmgr->dyn_state.cac_dtp_table->usOperatingTempMaxLimit = in smu7_thermal_parameter_init()
2606 hwmgr->dyn_state.cac_dtp_table->usDefaultTargetOperatingTemp = in smu7_thermal_parameter_init()
2609 hwmgr->dyn_state.cac_dtp_table->usOperatingTempStep = in smu7_thermal_parameter_init()
2612 hwmgr->dyn_state.cac_dtp_table->usTargetOperatingTemp = in smu7_thermal_parameter_init()
2614 if (hwmgr->feature_mask & PP_OD_FUZZY_FAN_CONTROL_MASK) in smu7_thermal_parameter_init()
2615 phm_cap_set(hwmgr->platform_descriptor.platformCaps, in smu7_thermal_parameter_init()
2629 static void smu7_patch_ppt_v0_with_vdd_leakage(struct pp_hwmgr *hwmgr, in smu7_patch_ppt_v0_with_vdd_leakage() argument
2649 static int smu7_patch_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_vddc() argument
2653 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_vddc()
2657 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_vddc()
2663 static int smu7_patch_vddci(struct pp_hwmgr *hwmgr, in smu7_patch_vddci() argument
2667 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_vddci()
2671 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_vddci()
2677 static int smu7_patch_vce_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_vce_vddc() argument
2681 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_vce_vddc()
2685 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_vce_vddc()
2692 static int smu7_patch_uvd_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_uvd_vddc() argument
2696 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_uvd_vddc()
2700 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_uvd_vddc()
2706 static int smu7_patch_vddc_shed_limit(struct pp_hwmgr *hwmgr, in smu7_patch_vddc_shed_limit() argument
2710 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_vddc_shed_limit()
2714 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].Voltage, in smu7_patch_vddc_shed_limit()
2720 static int smu7_patch_samu_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_samu_vddc() argument
2724 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_samu_vddc()
2728 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_samu_vddc()
2734 static int smu7_patch_acp_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_acp_vddc() argument
2738 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_acp_vddc()
2742 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_acp_vddc()
2748 static int smu7_patch_limits_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_limits_vddc() argument
2752 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_limits_vddc()
2756 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &vddc, in smu7_patch_limits_vddc()
2760 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &vddci, in smu7_patch_limits_vddc()
2768 static int smu7_patch_cac_vddc(struct pp_hwmgr *hwmgr, struct phm_cac_leakage_table *tab) in smu7_patch_cac_vddc() argument
2772 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_cac_vddc()
2777 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &vddc, &data->vddc_leakage); in smu7_patch_cac_vddc()
2785 static int smu7_patch_dependency_tables_with_leakage(struct pp_hwmgr *hwmgr) in smu7_patch_dependency_tables_with_leakage() argument
2789 tmp = smu7_patch_vddc(hwmgr, hwmgr->dyn_state.vddc_dependency_on_sclk); in smu7_patch_dependency_tables_with_leakage()
2793 tmp = smu7_patch_vddc(hwmgr, hwmgr->dyn_state.vddc_dependency_on_mclk); in smu7_patch_dependency_tables_with_leakage()
2797 tmp = smu7_patch_vddc(hwmgr, hwmgr->dyn_state.vddc_dep_on_dal_pwrl); in smu7_patch_dependency_tables_with_leakage()
2801 tmp = smu7_patch_vddci(hwmgr, hwmgr->dyn_state.vddci_dependency_on_mclk); in smu7_patch_dependency_tables_with_leakage()
2805 tmp = smu7_patch_vce_vddc(hwmgr, hwmgr->dyn_state.vce_clock_voltage_dependency_table); in smu7_patch_dependency_tables_with_leakage()
2809 tmp = smu7_patch_uvd_vddc(hwmgr, hwmgr->dyn_state.uvd_clock_voltage_dependency_table); in smu7_patch_dependency_tables_with_leakage()
2813 tmp = smu7_patch_samu_vddc(hwmgr, hwmgr->dyn_state.samu_clock_voltage_dependency_table); in smu7_patch_dependency_tables_with_leakage()
2817 tmp = smu7_patch_acp_vddc(hwmgr, hwmgr->dyn_state.acp_clock_voltage_dependency_table); in smu7_patch_dependency_tables_with_leakage()
2821 tmp = smu7_patch_vddc_shed_limit(hwmgr, hwmgr->dyn_state.vddc_phase_shed_limits_table); in smu7_patch_dependency_tables_with_leakage()
2825 tmp = smu7_patch_limits_vddc(hwmgr, &hwmgr->dyn_state.max_clock_voltage_on_ac); in smu7_patch_dependency_tables_with_leakage()
2829 tmp = smu7_patch_limits_vddc(hwmgr, &hwmgr->dyn_state.max_clock_voltage_on_dc); in smu7_patch_dependency_tables_with_leakage()
2833 tmp = smu7_patch_cac_vddc(hwmgr, hwmgr->dyn_state.cac_leakage_table); in smu7_patch_dependency_tables_with_leakage()
2841 static int smu7_set_private_data_based_on_pptable_v0(struct pp_hwmgr *hwmgr) in smu7_set_private_data_based_on_pptable_v0() argument
2843 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_private_data_based_on_pptable_v0()
2845 …struct phm_clock_voltage_dependency_table *allowed_sclk_vddc_table = hwmgr->dyn_state.vddc_depende… in smu7_set_private_data_based_on_pptable_v0()
2846 …struct phm_clock_voltage_dependency_table *allowed_mclk_vddc_table = hwmgr->dyn_state.vddc_depende… in smu7_set_private_data_based_on_pptable_v0()
2847 …struct phm_clock_voltage_dependency_table *allowed_mclk_vddci_table = hwmgr->dyn_state.vddci_depen… in smu7_set_private_data_based_on_pptable_v0()
2866 hwmgr->dyn_state.max_clock_voltage_on_ac.sclk = in smu7_set_private_data_based_on_pptable_v0()
2868 hwmgr->dyn_state.max_clock_voltage_on_ac.mclk = in smu7_set_private_data_based_on_pptable_v0()
2870 hwmgr->dyn_state.max_clock_voltage_on_ac.vddc = in smu7_set_private_data_based_on_pptable_v0()
2878 …if (hwmgr->dyn_state.vddci_dependency_on_mclk != NULL && hwmgr->dyn_state.vddci_dependency_on_mclk… in smu7_set_private_data_based_on_pptable_v0()
2879hwmgr->dyn_state.max_clock_voltage_on_ac.vddci = hwmgr->dyn_state.vddci_dependency_on_mclk->entrie… in smu7_set_private_data_based_on_pptable_v0()
2884 static int smu7_hwmgr_backend_fini(struct pp_hwmgr *hwmgr) in smu7_hwmgr_backend_fini() argument
2886 kfree(hwmgr->dyn_state.vddc_dep_on_dal_pwrl); in smu7_hwmgr_backend_fini()
2887 hwmgr->dyn_state.vddc_dep_on_dal_pwrl = NULL; in smu7_hwmgr_backend_fini()
2888 kfree(hwmgr->backend); in smu7_hwmgr_backend_fini()
2889 hwmgr->backend = NULL; in smu7_hwmgr_backend_fini()
2894 static int smu7_get_elb_voltages(struct pp_hwmgr *hwmgr) in smu7_get_elb_voltages() argument
2897 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_elb_voltages()
2900 if (atomctrl_get_leakage_id_from_efuse(hwmgr, &efuse_voltage_id) == 0) { in smu7_get_elb_voltages()
2903 if (atomctrl_get_leakage_vddc_base_on_leakage(hwmgr, &vddc, &vddci, in smu7_get_elb_voltages()
2925 static int smu7_update_edc_leakage_table(struct pp_hwmgr *hwmgr) in smu7_update_edc_leakage_table() argument
2927 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_update_edc_leakage_table()
2935 ret = atomctrl_get_edc_hilo_leakage_offset_table(hwmgr, in smu7_update_edc_leakage_table()
2942 atomctrl_read_efuse(hwmgr, LEAKAGE_ID_LSB, LEAKAGE_ID_MSB, &efuse); in smu7_update_edc_leakage_table()
2948 ret = atomctrl_get_edc_leakage_table(hwmgr, in smu7_update_edc_leakage_table()
2958 static int smu7_hwmgr_backend_init(struct pp_hwmgr *hwmgr) in smu7_hwmgr_backend_init() argument
2960 struct amdgpu_device *adev = hwmgr->adev; in smu7_hwmgr_backend_init()
2968 hwmgr->backend = data; in smu7_hwmgr_backend_init()
2969 smu7_patch_voltage_workaround(hwmgr); in smu7_hwmgr_backend_init()
2970 smu7_init_dpm_defaults(hwmgr); in smu7_hwmgr_backend_init()
2973 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_hwmgr_backend_init()
2975 result = smu7_get_evv_voltages(hwmgr); in smu7_hwmgr_backend_init()
2978 kfree(hwmgr->backend); in smu7_hwmgr_backend_init()
2979 hwmgr->backend = NULL; in smu7_hwmgr_backend_init()
2983 smu7_get_elb_voltages(hwmgr); in smu7_hwmgr_backend_init()
2986 if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_hwmgr_backend_init()
2987 smu7_complete_dependency_tables(hwmgr); in smu7_hwmgr_backend_init()
2988 smu7_set_private_data_based_on_pptable_v1(hwmgr); in smu7_hwmgr_backend_init()
2989 } else if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_hwmgr_backend_init()
2990 smu7_patch_dependency_tables_with_leakage(hwmgr); in smu7_hwmgr_backend_init()
2991 smu7_set_private_data_based_on_pptable_v0(hwmgr); in smu7_hwmgr_backend_init()
2995 result = phm_initializa_dynamic_state_adjustment_rule_settings(hwmgr); in smu7_hwmgr_backend_init()
3002 hwmgr->platform_descriptor.hardwareActivityPerformanceLevels = in smu7_hwmgr_backend_init()
3004 hwmgr->platform_descriptor.hardwarePerformanceLevels = 2; in smu7_hwmgr_backend_init()
3005 hwmgr->platform_descriptor.minimumClocksReductionPercentage = 50; in smu7_hwmgr_backend_init()
3014 hwmgr->platform_descriptor.vbiosInterruptId = 0x20000400; /* IRQ_SOURCE1_SW_INT */ in smu7_hwmgr_backend_init()
3016 hwmgr->platform_descriptor.clockStep.engineClock = 500; in smu7_hwmgr_backend_init()
3017 hwmgr->platform_descriptor.clockStep.memoryClock = 500; in smu7_hwmgr_backend_init()
3018 smu7_thermal_parameter_init(hwmgr); in smu7_hwmgr_backend_init()
3020 result = smu7_update_edc_leakage_table(hwmgr); in smu7_hwmgr_backend_init()
3026 smu7_hwmgr_backend_fini(hwmgr); in smu7_hwmgr_backend_init()
3030 static int smu7_force_dpm_highest(struct pp_hwmgr *hwmgr) in smu7_force_dpm_highest() argument
3032 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_force_dpm_highest()
3043 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_highest()
3057 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_highest()
3072 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_highest()
3082 static int smu7_upload_dpm_level_enable_mask(struct pp_hwmgr *hwmgr) in smu7_upload_dpm_level_enable_mask() argument
3084 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_upload_dpm_level_enable_mask()
3086 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_upload_dpm_level_enable_mask()
3087 phm_apply_dal_min_voltage_request(hwmgr); in smu7_upload_dpm_level_enable_mask()
3092 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_upload_dpm_level_enable_mask()
3100 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_upload_dpm_level_enable_mask()
3109 static int smu7_unforce_dpm_levels(struct pp_hwmgr *hwmgr) in smu7_unforce_dpm_levels() argument
3111 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_unforce_dpm_levels()
3113 if (!smum_is_dpm_running(hwmgr)) in smu7_unforce_dpm_levels()
3117 smum_send_msg_to_smc(hwmgr, in smu7_unforce_dpm_levels()
3122 return smu7_upload_dpm_level_enable_mask(hwmgr); in smu7_unforce_dpm_levels()
3125 static int smu7_force_dpm_lowest(struct pp_hwmgr *hwmgr) in smu7_force_dpm_lowest() argument
3128 (struct smu7_hwmgr *)(hwmgr->backend); in smu7_force_dpm_lowest()
3133 level = phm_get_lowest_enabled_level(hwmgr, in smu7_force_dpm_lowest()
3135 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_lowest()
3144 level = phm_get_lowest_enabled_level(hwmgr, in smu7_force_dpm_lowest()
3146 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_lowest()
3155 level = phm_get_lowest_enabled_level(hwmgr, in smu7_force_dpm_lowest()
3157 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_lowest()
3167 static int smu7_get_profiling_clk(struct pp_hwmgr *hwmgr, enum amd_dpm_forced_level level, in smu7_get_profiling_clk() argument
3171 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_profiling_clk()
3194 if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_get_profiling_clk()
3195 for (count = hwmgr->dyn_state.vddc_dependency_on_sclk->count-1; in smu7_get_profiling_clk()
3197 if (tmp_sclk >= hwmgr->dyn_state.vddc_dependency_on_sclk->entries[count].clk) { in smu7_get_profiling_clk()
3206 *sclk_mask = hwmgr->dyn_state.vddc_dependency_on_sclk->count-1; in smu7_get_profiling_clk()
3207 } else if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_get_profiling_clk()
3209 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_get_profiling_clk()
3234 static int smu7_force_dpm_level(struct pp_hwmgr *hwmgr, in smu7_force_dpm_level() argument
3244 ret = smu7_force_dpm_highest(hwmgr); in smu7_force_dpm_level()
3247 ret = smu7_force_dpm_lowest(hwmgr); in smu7_force_dpm_level()
3250 ret = smu7_unforce_dpm_levels(hwmgr); in smu7_force_dpm_level()
3256 ret = smu7_get_profiling_clk(hwmgr, level, &sclk_mask, &mclk_mask, &pcie_mask); in smu7_force_dpm_level()
3259 smu7_force_clock_level(hwmgr, PP_SCLK, 1<<sclk_mask); in smu7_force_dpm_level()
3260 smu7_force_clock_level(hwmgr, PP_MCLK, 1<<mclk_mask); in smu7_force_dpm_level()
3261 smu7_force_clock_level(hwmgr, PP_PCIE, 1<<pcie_mask); in smu7_force_dpm_level()
3270 …if (level == AMD_DPM_FORCED_LEVEL_PROFILE_PEAK && hwmgr->dpm_level != AMD_DPM_FORCED_LEVEL_PROFILE… in smu7_force_dpm_level()
3271 smu7_fan_ctrl_set_fan_speed_pwm(hwmgr, 255); in smu7_force_dpm_level()
3272 …else if (level != AMD_DPM_FORCED_LEVEL_PROFILE_PEAK && hwmgr->dpm_level == AMD_DPM_FORCED_LEVEL_PR… in smu7_force_dpm_level()
3273 smu7_fan_ctrl_reset_fan_speed_to_default(hwmgr); in smu7_force_dpm_level()
3278 static int smu7_get_power_state_size(struct pp_hwmgr *hwmgr) in smu7_get_power_state_size() argument
3283 static int smu7_vblank_too_short(struct pp_hwmgr *hwmgr, in smu7_vblank_too_short() argument
3286 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_vblank_too_short()
3289 switch (hwmgr->chip_id) { in smu7_vblank_too_short()
3293 if (hwmgr->is_kicker || (hwmgr->chip_id == CHIP_POLARIS12)) in smu7_vblank_too_short()
3312 static int smu7_apply_state_adjust_rules(struct pp_hwmgr *hwmgr, in smu7_apply_state_adjust_rules() argument
3316 struct amdgpu_device *adev = hwmgr->adev; in smu7_apply_state_adjust_rules()
3326 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_apply_state_adjust_rules()
3328 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_apply_state_adjust_rules()
3343 &(hwmgr->dyn_state.max_clock_voltage_on_ac) : in smu7_apply_state_adjust_rules()
3344 &(hwmgr->dyn_state.max_clock_voltage_on_dc); in smu7_apply_state_adjust_rules()
3356 minimum_clocks.engineClock = hwmgr->display_config->min_core_set_clock; in smu7_apply_state_adjust_rules()
3357 minimum_clocks.memoryClock = hwmgr->display_config->min_mem_set_clock; in smu7_apply_state_adjust_rules()
3359 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_apply_state_adjust_rules()
3361 max_limits = &(hwmgr->dyn_state.max_clock_voltage_on_ac); in smu7_apply_state_adjust_rules()
3384 hwmgr->platform_descriptor.platformCaps, in smu7_apply_state_adjust_rules()
3387 disable_mclk_switching_for_display = ((1 < hwmgr->display_config->num_display) && in smu7_apply_state_adjust_rules()
3388 !hwmgr->display_config->multi_monitor_in_sync) || in smu7_apply_state_adjust_rules()
3389 (hwmgr->display_config->num_display && in smu7_apply_state_adjust_rules()
3390 smu7_vblank_too_short(hwmgr, hwmgr->display_config->min_vblank_time)); in smu7_apply_state_adjust_rules()
3395 if (hwmgr->display_config->num_display == 0) { in smu7_apply_state_adjust_rules()
3396 if (hwmgr->chip_id >= CHIP_POLARIS10 && hwmgr->chip_id <= CHIP_VEGAM) in smu7_apply_state_adjust_rules()
3406 (!(hwmgr->chip_id >= CHIP_POLARIS10 && in smu7_apply_state_adjust_rules()
3407 hwmgr->chip_id <= CHIP_VEGAM))) in smu7_apply_state_adjust_rules()
3432 if (hwmgr->chip_id >= CHIP_POLARIS10 && hwmgr->chip_id <= CHIP_VEGAM) { in smu7_apply_state_adjust_rules()
3437 latency = hwmgr->display_config->dce_tolerable_mclk_in_active_latency; in smu7_apply_state_adjust_rules()
3464 if (!(hwmgr->chip_id >= CHIP_POLARIS10 && in smu7_apply_state_adjust_rules()
3465 hwmgr->chip_id <= CHIP_VEGAM)) in smu7_apply_state_adjust_rules()
3474 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_apply_state_adjust_rules()
3487 static uint32_t smu7_dpm_get_mclk(struct pp_hwmgr *hwmgr, bool low) in smu7_dpm_get_mclk() argument
3492 if (hwmgr == NULL) in smu7_dpm_get_mclk()
3495 ps = hwmgr->request_ps; in smu7_dpm_get_mclk()
3509 static uint32_t smu7_dpm_get_sclk(struct pp_hwmgr *hwmgr, bool low) in smu7_dpm_get_sclk() argument
3514 if (hwmgr == NULL) in smu7_dpm_get_sclk()
3517 ps = hwmgr->request_ps; in smu7_dpm_get_sclk()
3531 static int smu7_dpm_patch_boot_state(struct pp_hwmgr *hwmgr, in smu7_dpm_patch_boot_state() argument
3534 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_dpm_patch_boot_state()
3544 fw_info = (ATOM_FIRMWARE_INFO_V2_2 *)smu_atom_get_data_table(hwmgr->adev, index, in smu7_dpm_patch_boot_state()
3562 smu7_get_current_pcie_speed(hwmgr); in smu7_dpm_patch_boot_state()
3565 (uint16_t)smu7_get_current_pcie_lane_number(hwmgr); in smu7_dpm_patch_boot_state()
3576 static int smu7_get_number_of_powerplay_table_entries(struct pp_hwmgr *hwmgr) in smu7_get_number_of_powerplay_table_entries() argument
3581 if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_get_number_of_powerplay_table_entries()
3582 result = pp_tables_get_num_of_entries(hwmgr, &ret); in smu7_get_number_of_powerplay_table_entries()
3584 } else if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_get_number_of_powerplay_table_entries()
3585 result = get_number_of_powerplay_table_entries_v1_0(hwmgr); in smu7_get_number_of_powerplay_table_entries()
3591 static int smu7_get_pp_table_entry_callback_func_v1(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry_callback_func_v1() argument
3595 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_pp_table_entry_callback_func_v1()
3645 …(smu7_power_state->performance_level_count < smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_GRAPHIC… in smu7_get_pp_table_entry_callback_func_v1()
3651 hwmgr->platform_descriptor.hardwareActivityPerformanceLevels), in smu7_get_pp_table_entry_callback_func_v1()
3689 static int smu7_get_pp_table_entry_v1(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry_v1() argument
3694 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_pp_table_entry_v1()
3696 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_get_pp_table_entry_v1()
3704 result = get_powerplay_table_entry_v1_0(hwmgr, entry_index, state, in smu7_get_pp_table_entry_v1()
3791 static int smu7_get_pp_table_entry_callback_func_v0(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry_callback_func_v0() argument
3795 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_pp_table_entry_callback_func_v0()
3809 (ps->performance_level_count < smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_GRAPHICS)), in smu7_get_pp_table_entry_callback_func_v0()
3815 hwmgr->platform_descriptor.hardwareActivityPerformanceLevels), in smu7_get_pp_table_entry_callback_func_v0()
3834 static int smu7_get_pp_table_entry_v0(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry_v0() argument
3839 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_pp_table_entry_v0()
3841 hwmgr->dyn_state.vddci_dependency_on_mclk; in smu7_get_pp_table_entry_v0()
3849 result = pp_tables_get_entry(hwmgr, entry_index, state, in smu7_get_pp_table_entry_v0()
3941 static int smu7_get_pp_table_entry(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry() argument
3944 if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_get_pp_table_entry()
3945 return smu7_get_pp_table_entry_v0(hwmgr, entry_index, state); in smu7_get_pp_table_entry()
3946 else if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_get_pp_table_entry()
3947 return smu7_get_pp_table_entry_v1(hwmgr, entry_index, state); in smu7_get_pp_table_entry()
3952 static int smu7_get_gpu_power(struct pp_hwmgr *hwmgr, u32 *query) in smu7_get_gpu_power() argument
3954 struct amdgpu_device *adev = hwmgr->adev; in smu7_get_gpu_power()
3972 smum_send_msg_to_smc_with_parameter(hwmgr, PPSMC_MSG_GetCurrPkgPwr, 0, &tmp); in smu7_get_gpu_power()
3979 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_PmStatusLogStart, NULL); in smu7_get_gpu_power()
3980 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_get_gpu_power()
3985 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_PmStatusLogSample, NULL); in smu7_get_gpu_power()
3986 tmp = cgs_read_ind_register(hwmgr->device, in smu7_get_gpu_power()
3997 static int smu7_read_sensor(struct pp_hwmgr *hwmgr, int idx, in smu7_read_sensor() argument
4002 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_read_sensor()
4003 struct amdgpu_device *adev = hwmgr->adev; in smu7_read_sensor()
4012 ret = smum_send_msg_to_smc(hwmgr, PPSMC_MSG_API_GetSclkFrequency, &sclk); in smu7_read_sensor()
4019 ret = smum_send_msg_to_smc(hwmgr, PPSMC_MSG_API_GetMclkFrequency, &mclk); in smu7_read_sensor()
4027 offset = data->soft_regs_start + smum_get_offsetof(hwmgr, in smu7_read_sensor()
4033 activity_percent = cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, offset); in smu7_read_sensor()
4040 *((uint32_t *)value) = smu7_thermal_get_temperature(hwmgr); in smu7_read_sensor()
4056 return smu7_get_gpu_power(hwmgr, (uint32_t *)value); in smu7_read_sensor()
4066 return smu7_get_gpu_power(hwmgr, (uint32_t *)value); in smu7_read_sensor()
4070 val_vid = PHM_READ_INDIRECT_FIELD(hwmgr->device, in smu7_read_sensor()
4073 val_vid = PHM_READ_INDIRECT_FIELD(hwmgr->device, in smu7_read_sensor()
4083 static int smu7_find_dpm_states_clocks_in_dpm_table(struct pp_hwmgr *hwmgr, const void *input) in smu7_find_dpm_states_clocks_in_dpm_table() argument
4089 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_find_dpm_states_clocks_in_dpm_table()
4131 if (data->display_timing.num_existing_displays != hwmgr->display_config->num_display) in smu7_find_dpm_states_clocks_in_dpm_table()
4137 static uint16_t smu7_get_maximum_link_speed(struct pp_hwmgr *hwmgr, in smu7_get_maximum_link_speed() argument
4142 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_maximum_link_speed()
4163 struct pp_hwmgr *hwmgr, const void *input) in smu7_request_link_speed_change_before_state_change() argument
4167 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_request_link_speed_change_before_state_change()
4173 uint16_t target_link_speed = smu7_get_maximum_link_speed(hwmgr, smu7_nps); in smu7_request_link_speed_change_before_state_change()
4177 current_link_speed = smu7_get_maximum_link_speed(hwmgr, polaris10_cps); in smu7_request_link_speed_change_before_state_change()
4188 if (0 == amdgpu_acpi_pcie_performance_request(hwmgr->adev, PCIE_PERF_REQ_GEN3, false)) in smu7_request_link_speed_change_before_state_change()
4195 if (0 == amdgpu_acpi_pcie_performance_request(hwmgr->adev, PCIE_PERF_REQ_GEN2, false)) in smu7_request_link_speed_change_before_state_change()
4200 data->force_pcie_gen = smu7_get_current_pcie_speed(hwmgr); in smu7_request_link_speed_change_before_state_change()
4211 static int smu7_freeze_sclk_mclk_dpm(struct pp_hwmgr *hwmgr) in smu7_freeze_sclk_mclk_dpm() argument
4213 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_freeze_sclk_mclk_dpm()
4221 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_freeze_sclk_mclk_dpm()
4224 PP_ASSERT_WITH_CODE(0 == smum_send_msg_to_smc(hwmgr, in smu7_freeze_sclk_mclk_dpm()
4235 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_freeze_sclk_mclk_dpm()
4238 PP_ASSERT_WITH_CODE(0 == smum_send_msg_to_smc(hwmgr, in smu7_freeze_sclk_mclk_dpm()
4249 struct pp_hwmgr *hwmgr, const void *input) in smu7_populate_and_upload_sclk_mclk_dpm_levels() argument
4252 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_populate_and_upload_sclk_mclk_dpm_levels()
4262 if (hwmgr->od_enabled && data->need_update_smu7_dpm_table & DPMTABLE_OD_UPDATE_SCLK) { in smu7_populate_and_upload_sclk_mclk_dpm_levels()
4269 if (hwmgr->od_enabled && data->need_update_smu7_dpm_table & DPMTABLE_OD_UPDATE_MCLK) { in smu7_populate_and_upload_sclk_mclk_dpm_levels()
4278 result = smum_populate_all_graphic_levels(hwmgr); in smu7_populate_and_upload_sclk_mclk_dpm_levels()
4287 result = smum_populate_all_memory_levels(hwmgr); in smu7_populate_and_upload_sclk_mclk_dpm_levels()
4296 static int smu7_trim_single_dpm_states(struct pp_hwmgr *hwmgr, in smu7_trim_single_dpm_states() argument
4306 if ((!hwmgr->od_enabled || force_trim) in smu7_trim_single_dpm_states()
4317 static int smu7_trim_dpm_states(struct pp_hwmgr *hwmgr, in smu7_trim_dpm_states() argument
4320 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_trim_dpm_states()
4329 smu7_trim_single_dpm_states(hwmgr, in smu7_trim_dpm_states()
4334 smu7_trim_single_dpm_states(hwmgr, in smu7_trim_dpm_states()
4343 struct pp_hwmgr *hwmgr, const void *input) in smu7_generate_dpm_level_enable_mask() argument
4348 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_generate_dpm_level_enable_mask()
4353 result = smu7_trim_dpm_states(hwmgr, smu7_ps); in smu7_generate_dpm_level_enable_mask()
4367 static int smu7_unfreeze_sclk_mclk_dpm(struct pp_hwmgr *hwmgr) in smu7_unfreeze_sclk_mclk_dpm() argument
4369 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_unfreeze_sclk_mclk_dpm()
4378 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_unfreeze_sclk_mclk_dpm()
4381 PP_ASSERT_WITH_CODE(0 == smum_send_msg_to_smc(hwmgr, in smu7_unfreeze_sclk_mclk_dpm()
4392 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_unfreeze_sclk_mclk_dpm()
4395 PP_ASSERT_WITH_CODE(0 == smum_send_msg_to_smc(hwmgr, in smu7_unfreeze_sclk_mclk_dpm()
4408 struct pp_hwmgr *hwmgr, const void *input) in smu7_notify_link_speed_change_after_state_change() argument
4412 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_notify_link_speed_change_after_state_change()
4415 uint16_t target_link_speed = smu7_get_maximum_link_speed(hwmgr, smu7_ps); in smu7_notify_link_speed_change_after_state_change()
4427 smu7_get_current_pcie_speed(hwmgr) > 0) in smu7_notify_link_speed_change_after_state_change()
4431 if (amdgpu_acpi_pcie_performance_request(hwmgr->adev, request, false)) { in smu7_notify_link_speed_change_after_state_change()
4443 static int smu7_notify_no_display(struct pp_hwmgr *hwmgr) in smu7_notify_no_display() argument
4445 return (smum_send_msg_to_smc(hwmgr, (PPSMC_Msg)PPSMC_NoDisplay, NULL) == 0) ? 0 : -EINVAL; in smu7_notify_no_display()
4448 static int smu7_notify_has_display(struct pp_hwmgr *hwmgr) in smu7_notify_has_display() argument
4450 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_notify_has_display()
4452 if (hwmgr->feature_mask & PP_VBI_TIME_SUPPORT_MASK) { in smu7_notify_has_display()
4453 if (hwmgr->chip_id == CHIP_VEGAM) in smu7_notify_has_display()
4454 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_notify_has_display()
4458 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_notify_has_display()
4464 return (smum_send_msg_to_smc(hwmgr, (PPSMC_Msg)PPSMC_HasDisplay, NULL) == 0) ? 0 : -EINVAL; in smu7_notify_has_display()
4467 static int smu7_notify_smc_display(struct pp_hwmgr *hwmgr) in smu7_notify_smc_display() argument
4469 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_notify_smc_display()
4473 result = smu7_notify_no_display(hwmgr); in smu7_notify_smc_display()
4475 result = smu7_notify_has_display(hwmgr); in smu7_notify_smc_display()
4480 static int smu7_set_power_state_tasks(struct pp_hwmgr *hwmgr, const void *input) in smu7_set_power_state_tasks() argument
4483 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_power_state_tasks()
4485 tmp_result = smu7_find_dpm_states_clocks_in_dpm_table(hwmgr, input); in smu7_set_power_state_tasks()
4490 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_set_power_state_tasks()
4493 smu7_request_link_speed_change_before_state_change(hwmgr, input); in smu7_set_power_state_tasks()
4499 tmp_result = smu7_freeze_sclk_mclk_dpm(hwmgr); in smu7_set_power_state_tasks()
4503 tmp_result = smu7_populate_and_upload_sclk_mclk_dpm_levels(hwmgr, input); in smu7_set_power_state_tasks()
4512 if (hwmgr->hardcode_pp_table != NULL) in smu7_set_power_state_tasks()
4515 tmp_result = smu7_update_avfs(hwmgr); in smu7_set_power_state_tasks()
4520 tmp_result = smu7_generate_dpm_level_enable_mask(hwmgr, input); in smu7_set_power_state_tasks()
4525 tmp_result = smum_update_sclk_threshold(hwmgr); in smu7_set_power_state_tasks()
4530 tmp_result = smu7_unfreeze_sclk_mclk_dpm(hwmgr); in smu7_set_power_state_tasks()
4535 tmp_result = smu7_upload_dpm_level_enable_mask(hwmgr); in smu7_set_power_state_tasks()
4540 tmp_result = smu7_notify_smc_display(hwmgr); in smu7_set_power_state_tasks()
4545 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_set_power_state_tasks()
4548 smu7_notify_link_speed_change_after_state_change(hwmgr, input); in smu7_set_power_state_tasks()
4557 static int smu7_set_max_fan_pwm_output(struct pp_hwmgr *hwmgr, uint16_t us_max_fan_pwm) in smu7_set_max_fan_pwm_output() argument
4559 hwmgr->thermal_controller. in smu7_set_max_fan_pwm_output()
4562 return smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_set_max_fan_pwm_output()
4568 smu7_notify_smc_display_config_after_ps_adjustment(struct pp_hwmgr *hwmgr) in smu7_notify_smc_display_config_after_ps_adjustment() argument
4579 static int smu7_program_display_gap(struct pp_hwmgr *hwmgr) in smu7_program_display_gap() argument
4581 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_program_display_gap()
4582 …uint32_t display_gap = cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_DISPLAY_GAP_CNT… in smu7_program_display_gap()
4588 …display_gap = PHM_SET_FIELD(display_gap, CG_DISPLAY_GAP_CNTL, DISP_GAP, (hwmgr->display_config->nu… in smu7_program_display_gap()
4589 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_DISPLAY_GAP_CNTL, display_gap); in smu7_program_display_gap()
4591 ref_clock = amdgpu_asic_get_xclk((struct amdgpu_device *)hwmgr->adev); in smu7_program_display_gap()
4592 refresh_rate = hwmgr->display_config->vrefresh; in smu7_program_display_gap()
4599 pre_vbi_time_in_us = frame_time_in_us - 200 - hwmgr->display_config->min_vblank_time; in smu7_program_display_gap()
4610 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_DISPLAY_GAP_CNTL2, display_gap2); in smu7_program_display_gap()
4612 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_display_gap()
4613 data->soft_regs_start + smum_get_offsetof(hwmgr, in smu7_program_display_gap()
4617 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_display_gap()
4618 data->soft_regs_start + smum_get_offsetof(hwmgr, in smu7_program_display_gap()
4626 static int smu7_display_configuration_changed_task(struct pp_hwmgr *hwmgr) in smu7_display_configuration_changed_task() argument
4628 return smu7_program_display_gap(hwmgr); in smu7_display_configuration_changed_task()
4638 static int smu7_set_max_fan_rpm_output(struct pp_hwmgr *hwmgr, uint16_t us_max_fan_rpm) in smu7_set_max_fan_rpm_output() argument
4640 hwmgr->thermal_controller. in smu7_set_max_fan_rpm_output()
4643 return smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_set_max_fan_rpm_output()
4652 static int smu7_register_irq_handlers(struct pp_hwmgr *hwmgr) in smu7_register_irq_handlers() argument
4662 amdgpu_irq_add_id((struct amdgpu_device *)(hwmgr->adev), in smu7_register_irq_handlers()
4666 amdgpu_irq_add_id((struct amdgpu_device *)(hwmgr->adev), in smu7_register_irq_handlers()
4672 amdgpu_irq_add_id((struct amdgpu_device *)(hwmgr->adev), in smu7_register_irq_handlers()
4681 smu7_check_smc_update_required_for_display_configuration(struct pp_hwmgr *hwmgr) in smu7_check_smc_update_required_for_display_configuration() argument
4683 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_smc_update_required_for_display_configuration()
4686 if (data->display_timing.num_existing_displays != hwmgr->display_config->num_display) in smu7_check_smc_update_required_for_display_configuration()
4689 if (data->display_timing.vrefresh != hwmgr->display_config->vrefresh) in smu7_check_smc_update_required_for_display_configuration()
4692 if (hwmgr->chip_id >= CHIP_POLARIS10 && in smu7_check_smc_update_required_for_display_configuration()
4693 hwmgr->chip_id <= CHIP_VEGAM && in smu7_check_smc_update_required_for_display_configuration()
4697 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, PHM_PlatformCaps_SclkDeepSleep)) { in smu7_check_smc_update_required_for_display_configuration()
4698 if (data->display_timing.min_clock_in_sr != hwmgr->display_config->min_core_set_clock_in_sr && in smu7_check_smc_update_required_for_display_configuration()
4700 hwmgr->display_config->min_core_set_clock_in_sr >= SMU7_MINIMUM_ENGINE_CLOCK)) in smu7_check_smc_update_required_for_display_configuration()
4715 static int smu7_check_states_equal(struct pp_hwmgr *hwmgr, in smu7_check_states_equal() argument
4722 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_states_equal()
4755 static int smu7_check_mc_firmware(struct pp_hwmgr *hwmgr) in smu7_check_mc_firmware() argument
4757 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_mc_firmware()
4766 smu7_get_mc_microcode_version(hwmgr); in smu7_check_mc_firmware()
4770 cgs_write_register(hwmgr->device, mmMC_SEQ_IO_DEBUG_INDEX, in smu7_check_mc_firmware()
4772 tmp = cgs_read_register(hwmgr->device, mmMC_SEQ_IO_DEBUG_DATA); in smu7_check_mc_firmware()
4777 if ((hwmgr->chip_id == CHIP_POLARIS10) || in smu7_check_mc_firmware()
4778 (hwmgr->chip_id == CHIP_POLARIS11) || in smu7_check_mc_firmware()
4779 (hwmgr->chip_id == CHIP_POLARIS12)) in smu7_check_mc_firmware()
4780 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_EnableFFC, NULL); in smu7_check_mc_firmware()
4784 if ((hwmgr->chip_id == CHIP_POLARIS10) || in smu7_check_mc_firmware()
4785 (hwmgr->chip_id == CHIP_POLARIS11) || in smu7_check_mc_firmware()
4786 (hwmgr->chip_id == CHIP_POLARIS12)) in smu7_check_mc_firmware()
4787 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_DisableFFC, NULL); in smu7_check_mc_firmware()
4793 static int smu7_read_clock_registers(struct pp_hwmgr *hwmgr) in smu7_read_clock_registers() argument
4795 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_read_clock_registers()
4798 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_FUNC_CNTL); in smu7_read_clock_registers()
4800 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_FUNC_CNTL_2); in smu7_read_clock_registers()
4802 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_FUNC_CNTL_3); in smu7_read_clock_registers()
4804 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_FUNC_CNTL_4); in smu7_read_clock_registers()
4806 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_SPREAD_SPECTRUM); in smu7_read_clock_registers()
4808 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_SPREAD_SPECTRUM_2); in smu7_read_clock_registers()
4810 cgs_read_register(hwmgr->device, mmDLL_CNTL); in smu7_read_clock_registers()
4812 cgs_read_register(hwmgr->device, mmMCLK_PWRMGT_CNTL); in smu7_read_clock_registers()
4814 cgs_read_register(hwmgr->device, mmMPLL_AD_FUNC_CNTL); in smu7_read_clock_registers()
4816 cgs_read_register(hwmgr->device, mmMPLL_DQ_FUNC_CNTL); in smu7_read_clock_registers()
4818 cgs_read_register(hwmgr->device, mmMPLL_FUNC_CNTL); in smu7_read_clock_registers()
4820 cgs_read_register(hwmgr->device, mmMPLL_FUNC_CNTL_1); in smu7_read_clock_registers()
4822 cgs_read_register(hwmgr->device, mmMPLL_FUNC_CNTL_2); in smu7_read_clock_registers()
4824 cgs_read_register(hwmgr->device, mmMPLL_SS1); in smu7_read_clock_registers()
4826 cgs_read_register(hwmgr->device, mmMPLL_SS2); in smu7_read_clock_registers()
4837 static int smu7_get_memory_type(struct pp_hwmgr *hwmgr) in smu7_get_memory_type() argument
4839 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_memory_type()
4840 struct amdgpu_device *adev = hwmgr->adev; in smu7_get_memory_type()
4853 static int smu7_enable_acpi_power_management(struct pp_hwmgr *hwmgr) in smu7_enable_acpi_power_management() argument
4855 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_acpi_power_management()
4867 static int smu7_init_power_gate_state(struct pp_hwmgr *hwmgr) in smu7_init_power_gate_state() argument
4869 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_init_power_gate_state()
4877 static int smu7_init_sclk_threshold(struct pp_hwmgr *hwmgr) in smu7_init_sclk_threshold() argument
4879 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_init_sclk_threshold()
4885 static int smu7_setup_asic_task(struct pp_hwmgr *hwmgr) in smu7_setup_asic_task() argument
4889 smu7_check_mc_firmware(hwmgr); in smu7_setup_asic_task()
4891 tmp_result = smu7_read_clock_registers(hwmgr); in smu7_setup_asic_task()
4895 tmp_result = smu7_get_memory_type(hwmgr); in smu7_setup_asic_task()
4899 tmp_result = smu7_enable_acpi_power_management(hwmgr); in smu7_setup_asic_task()
4903 tmp_result = smu7_init_power_gate_state(hwmgr); in smu7_setup_asic_task()
4907 tmp_result = smu7_get_mc_microcode_version(hwmgr); in smu7_setup_asic_task()
4911 tmp_result = smu7_init_sclk_threshold(hwmgr); in smu7_setup_asic_task()
4918 static int smu7_force_clock_level(struct pp_hwmgr *hwmgr, in smu7_force_clock_level() argument
4921 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_force_clock_level()
4929 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_clock_level()
4936 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_clock_level()
4947 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_PCIeDPM_UnForceLevel, in smu7_force_clock_level()
4950 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_clock_level()
4964 static int smu7_print_clock_levels(struct pp_hwmgr *hwmgr, in smu7_print_clock_levels() argument
4967 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_print_clock_levels()
4979 ret = smum_send_msg_to_smc(hwmgr, PPSMC_MSG_API_GetSclkFrequency, &clock); in smu7_print_clock_levels()
4995 ret = smum_send_msg_to_smc(hwmgr, PPSMC_MSG_API_GetMclkFrequency, &clock); in smu7_print_clock_levels()
5011 pcie_speed = smu7_get_current_pcie_speed(hwmgr); in smu7_print_clock_levels()
5027 if (hwmgr->od_enabled) { in smu7_print_clock_levels()
5036 if (hwmgr->od_enabled) { in smu7_print_clock_levels()
5045 if (hwmgr->od_enabled) { in smu7_print_clock_levels()
5049 hwmgr->platform_descriptor.overdriveLimit.engineClock/100); in smu7_print_clock_levels()
5052 hwmgr->platform_descriptor.overdriveLimit.memoryClock/100); in smu7_print_clock_levels()
5064 static void smu7_set_fan_control_mode(struct pp_hwmgr *hwmgr, uint32_t mode) in smu7_set_fan_control_mode() argument
5068 smu7_fan_ctrl_set_fan_speed_pwm(hwmgr, 255); in smu7_set_fan_control_mode()
5071 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_set_fan_control_mode()
5073 smu7_fan_ctrl_stop_smc_fan_control(hwmgr); in smu7_set_fan_control_mode()
5076 if (!smu7_fan_ctrl_set_static_mode(hwmgr, mode)) in smu7_set_fan_control_mode()
5077 smu7_fan_ctrl_start_smc_fan_control(hwmgr); in smu7_set_fan_control_mode()
5084 static uint32_t smu7_get_fan_control_mode(struct pp_hwmgr *hwmgr) in smu7_get_fan_control_mode() argument
5086 return hwmgr->fan_ctrl_enabled ? AMD_FAN_CTRL_AUTO : AMD_FAN_CTRL_MANUAL; in smu7_get_fan_control_mode()
5089 static int smu7_get_sclk_od(struct pp_hwmgr *hwmgr) in smu7_get_sclk_od() argument
5091 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_sclk_od()
5105 static int smu7_set_sclk_od(struct pp_hwmgr *hwmgr, uint32_t value) in smu7_set_sclk_od() argument
5107 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_sclk_od()
5116 ps = hwmgr->request_ps; in smu7_set_sclk_od()
5131 static int smu7_get_mclk_od(struct pp_hwmgr *hwmgr) in smu7_get_mclk_od() argument
5133 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_mclk_od()
5147 static int smu7_set_mclk_od(struct pp_hwmgr *hwmgr, uint32_t value) in smu7_set_mclk_od() argument
5149 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_mclk_od()
5158 ps = hwmgr->request_ps; in smu7_set_mclk_od()
5174 static int smu7_get_sclks(struct pp_hwmgr *hwmgr, struct amd_pp_clocks *clocks) in smu7_get_sclks() argument
5177 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_sclks()
5182 if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_get_sclks()
5189 } else if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_get_sclks()
5190 sclk_table = hwmgr->dyn_state.vddc_dependency_on_sclk; in smu7_get_sclks()
5199 static uint32_t smu7_get_mem_latency(struct pp_hwmgr *hwmgr, uint32_t clk) in smu7_get_mem_latency() argument
5201 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_mem_latency()
5211 static int smu7_get_mclks(struct pp_hwmgr *hwmgr, struct amd_pp_clocks *clocks) in smu7_get_mclks() argument
5214 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_mclks()
5219 if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_get_mclks()
5225 clocks->latency[i] = smu7_get_mem_latency(hwmgr, in smu7_get_mclks()
5229 } else if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_get_mclks()
5230 mclk_table = hwmgr->dyn_state.vddc_dependency_on_mclk; in smu7_get_mclks()
5238 static int smu7_get_clock_by_type(struct pp_hwmgr *hwmgr, enum amd_pp_clock_type type, in smu7_get_clock_by_type() argument
5243 smu7_get_sclks(hwmgr, clocks); in smu7_get_clock_by_type()
5246 smu7_get_mclks(hwmgr, clocks); in smu7_get_clock_by_type()
5255 static int smu7_get_sclks_with_latency(struct pp_hwmgr *hwmgr, in smu7_get_sclks_with_latency() argument
5259 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_sclks_with_latency()
5276 static int smu7_get_mclks_with_latency(struct pp_hwmgr *hwmgr, in smu7_get_mclks_with_latency() argument
5280 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_mclks_with_latency()
5283 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_mclks_with_latency()
5296 smu7_get_mem_latency(hwmgr, dep_mclk_table->entries[i].clk); in smu7_get_mclks_with_latency()
5305 static int smu7_get_clock_by_type_with_latency(struct pp_hwmgr *hwmgr, in smu7_get_clock_by_type_with_latency() argument
5309 if (!(hwmgr->chip_id >= CHIP_POLARIS10 && in smu7_get_clock_by_type_with_latency()
5310 hwmgr->chip_id <= CHIP_VEGAM)) in smu7_get_clock_by_type_with_latency()
5315 smu7_get_sclks_with_latency(hwmgr, clocks); in smu7_get_clock_by_type_with_latency()
5318 smu7_get_mclks_with_latency(hwmgr, clocks); in smu7_get_clock_by_type_with_latency()
5327 static int smu7_set_watermarks_for_clocks_ranges(struct pp_hwmgr *hwmgr, in smu7_set_watermarks_for_clocks_ranges() argument
5331 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_set_watermarks_for_clocks_ranges()
5337 (struct polaris10_smumgr *)(hwmgr->smu_backend); in smu7_set_watermarks_for_clocks_ranges()
5344 if (!(hwmgr->chip_id >= CHIP_POLARIS10 && in smu7_set_watermarks_for_clocks_ranges()
5345 hwmgr->chip_id <= CHIP_VEGAM)) in smu7_set_watermarks_for_clocks_ranges()
5367 return smu7_copy_bytes_to_smc(hwmgr, in smu7_set_watermarks_for_clocks_ranges()
5374 static int smu7_notify_cac_buffer_info(struct pp_hwmgr *hwmgr, in smu7_notify_cac_buffer_info() argument
5381 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_notify_cac_buffer_info()
5383 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
5385 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
5389 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
5391 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
5395 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
5397 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
5401 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
5403 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
5407 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
5409 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
5415 static int smu7_get_max_high_clocks(struct pp_hwmgr *hwmgr, in smu7_get_max_high_clocks() argument
5418 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_max_high_clocks()
5434 static int smu7_get_thermal_temperature_range(struct pp_hwmgr *hwmgr, in smu7_get_thermal_temperature_range() argument
5437 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_thermal_temperature_range()
5439 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_thermal_temperature_range()
5443 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_get_thermal_temperature_range()
5446 else if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_get_thermal_temperature_range()
5455 static bool smu7_check_clk_voltage_valid(struct pp_hwmgr *hwmgr, in smu7_check_clk_voltage_valid() argument
5460 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_clk_voltage_valid()
5471 hwmgr->platform_descriptor.overdriveLimit.engineClock < clk) { in smu7_check_clk_voltage_valid()
5474 hwmgr->platform_descriptor.overdriveLimit.engineClock/100); in smu7_check_clk_voltage_valid()
5479 hwmgr->platform_descriptor.overdriveLimit.memoryClock < clk) { in smu7_check_clk_voltage_valid()
5482 hwmgr->platform_descriptor.overdriveLimit.memoryClock/100); in smu7_check_clk_voltage_valid()
5492 static int smu7_odn_edit_dpm_table(struct pp_hwmgr *hwmgr, in smu7_odn_edit_dpm_table() argument
5499 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_odn_edit_dpm_table()
5508 if (!hwmgr->od_enabled) { in smu7_odn_edit_dpm_table()
5527 smu7_odn_initial_default_setting(hwmgr); in smu7_odn_edit_dpm_table()
5530 smu7_check_dpm_table_updated(hwmgr); in smu7_odn_edit_dpm_table()
5545 if (smu7_check_clk_voltage_valid(hwmgr, type, input_clk, input_vol)) { in smu7_odn_edit_dpm_table()
5559 static int smu7_get_power_profile_mode(struct pp_hwmgr *hwmgr, char *buf) in smu7_get_power_profile_mode() argument
5561 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_power_profile_mode()
5586 if (i == hwmgr->power_profile_mode) { in smu7_get_power_profile_mode()
5619 static void smu7_patch_compute_profile_mode(struct pp_hwmgr *hwmgr, in smu7_patch_compute_profile_mode() argument
5622 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_compute_profile_mode()
5632 smu7_force_clock_level(hwmgr, PP_SCLK, 3 << (level-1)); in smu7_patch_compute_profile_mode()
5634 } else if (hwmgr->power_profile_mode == PP_SMC_POWER_PROFILE_COMPUTE) { in smu7_patch_compute_profile_mode()
5635 smu7_force_clock_level(hwmgr, PP_SCLK, data->dpm_level_enable_mask.sclk_dpm_enable_mask); in smu7_patch_compute_profile_mode()
5639 static int smu7_set_power_profile_mode(struct pp_hwmgr *hwmgr, long *input, uint32_t size) in smu7_set_power_profile_mode() argument
5641 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_power_profile_mode()
5672 if (!smum_update_dpm_settings(hwmgr, &tmp)) { in smu7_set_power_profile_mode()
5674 hwmgr->power_profile_mode = mode; in smu7_set_power_profile_mode()
5682 if (mode == hwmgr->power_profile_mode) in smu7_set_power_profile_mode()
5686 if (!smum_update_dpm_settings(hwmgr, &tmp)) { in smu7_set_power_profile_mode()
5699 smu7_patch_compute_profile_mode(hwmgr, mode); in smu7_set_power_profile_mode()
5700 hwmgr->power_profile_mode = mode; in smu7_set_power_profile_mode()
5710 static int smu7_get_performance_level(struct pp_hwmgr *hwmgr, const struct pp_hw_power_state *state, in smu7_get_performance_level() argument
5717 if (level == NULL || hwmgr == NULL || state == NULL) in smu7_get_performance_level()
5731 static int smu7_power_off_asic(struct pp_hwmgr *hwmgr) in smu7_power_off_asic() argument
5735 result = smu7_disable_dpm_tasks(hwmgr); in smu7_power_off_asic()
5825 int smu7_init_function_pointers(struct pp_hwmgr *hwmgr) in smu7_init_function_pointers() argument
5827 hwmgr->hwmgr_func = &smu7_hwmgr_funcs; in smu7_init_function_pointers()
5828 if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_init_function_pointers()
5829 hwmgr->pptable_func = &pptable_funcs; in smu7_init_function_pointers()
5830 else if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_init_function_pointers()
5831 hwmgr->pptable_func = &pptable_v1_0_funcs; in smu7_init_function_pointers()