1 // SPDX-License-Identifier: MIT
2 //
3 // Copyright 2024 Advanced Micro Devices, Inc.
4
5 #include "dml2_dpmm_dcn4.h"
6 #include "dml2_internal_shared_types.h"
7 #include "dml_top_types.h"
8 #include "lib_float_math.h"
9
dram_bw_kbps_to_uclk_khz(unsigned long long bandwidth_kbps,const struct dml2_dram_params * dram_config)10 static double dram_bw_kbps_to_uclk_khz(unsigned long long bandwidth_kbps, const struct dml2_dram_params *dram_config)
11 {
12 double uclk_khz = 0;
13 unsigned long uclk_mbytes_per_tick = 0;
14
15 uclk_mbytes_per_tick = dram_config->channel_count * dram_config->channel_width_bytes * dram_config->transactions_per_clock;
16
17 uclk_khz = (double)bandwidth_kbps / uclk_mbytes_per_tick;
18
19 return uclk_khz;
20 }
21
get_minimum_clocks_for_latency(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,double * uclk,double * fclk,double * dcfclk)22 static void get_minimum_clocks_for_latency(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out,
23 double *uclk,
24 double *fclk,
25 double *dcfclk)
26 {
27 int min_clock_index_for_latency;
28
29 if (in_out->display_cfg->stage3.success)
30 min_clock_index_for_latency = in_out->display_cfg->stage3.min_clk_index_for_latency;
31 else
32 min_clock_index_for_latency = in_out->display_cfg->stage1.min_clk_index_for_latency;
33
34 *dcfclk = in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].min_dcfclk_khz;
35 *fclk = in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].min_fclk_khz;
36 *uclk = dram_bw_kbps_to_uclk_khz(in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].pre_derate_dram_bw_kbps,
37 &in_out->soc_bb->clk_table.dram_config);
38 }
39
dml_round_up(double a)40 static unsigned long dml_round_up(double a)
41 {
42 if (a - (unsigned long)a > 0) {
43 return ((unsigned long)a) + 1;
44 }
45 return (unsigned long)a;
46 }
47
calculate_system_active_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)48 static void calculate_system_active_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
49 {
50 double min_uclk_avg, min_uclk_urgent, min_uclk_bw;
51 double min_fclk_avg, min_fclk_urgent, min_fclk_bw;
52 double min_dcfclk_avg, min_dcfclk_urgent, min_dcfclk_bw;
53 double min_uclk_latency, min_fclk_latency, min_dcfclk_latency;
54 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
55
56 min_uclk_avg = dram_bw_kbps_to_uclk_khz(mode_support_result->global.active.average_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
57 min_uclk_avg = (double)min_uclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dram_derate_percent_pixel / 100);
58
59 min_uclk_urgent = dram_bw_kbps_to_uclk_khz(mode_support_result->global.active.urgent_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
60 if (in_out->display_cfg->display_config.hostvm_enable)
61 min_uclk_urgent = (double)min_uclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dram_derate_percent_pixel_and_vm / 100);
62 else
63 min_uclk_urgent = (double)min_uclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dram_derate_percent_pixel / 100);
64
65 min_uclk_bw = min_uclk_urgent > min_uclk_avg ? min_uclk_urgent : min_uclk_avg;
66
67 min_fclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
68 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.fclk_derate_percent / 100);
69
70 min_fclk_urgent = (double)mode_support_result->global.active.urgent_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
71 min_fclk_urgent = (double)min_fclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.fclk_derate_percent / 100);
72
73 min_fclk_bw = min_fclk_urgent > min_fclk_avg ? min_fclk_urgent : min_fclk_avg;
74
75 min_dcfclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
76 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_average.dcfclk_derate_percent / 100);
77
78 min_dcfclk_urgent = (double)mode_support_result->global.active.urgent_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
79 min_dcfclk_urgent = (double)min_dcfclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.system_active_urgent.dcfclk_derate_percent / 100);
80
81 min_dcfclk_bw = min_dcfclk_urgent > min_dcfclk_avg ? min_dcfclk_urgent : min_dcfclk_avg;
82
83 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
84
85 in_out->programming->min_clocks.dcn4x.active.uclk_khz = dml_round_up(min_uclk_bw > min_uclk_latency ? min_uclk_bw : min_uclk_latency);
86 in_out->programming->min_clocks.dcn4x.active.fclk_khz = dml_round_up(min_fclk_bw > min_fclk_latency ? min_fclk_bw : min_fclk_latency);
87 in_out->programming->min_clocks.dcn4x.active.dcfclk_khz = dml_round_up(min_dcfclk_bw > min_dcfclk_latency ? min_dcfclk_bw : min_dcfclk_latency);
88 }
89
calculate_svp_prefetch_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)90 static void calculate_svp_prefetch_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
91 {
92 double min_uclk_avg, min_uclk_urgent, min_uclk_bw;
93 double min_fclk_avg, min_fclk_urgent, min_fclk_bw;
94 double min_dcfclk_avg, min_dcfclk_urgent, min_dcfclk_bw;
95 double min_fclk_latency, min_dcfclk_latency;
96 double min_uclk_latency;
97 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
98
99 min_uclk_avg = dram_bw_kbps_to_uclk_khz(mode_support_result->global.svp_prefetch.average_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
100 min_uclk_avg = (double)min_uclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_average.dram_derate_percent_pixel / 100);
101
102 min_uclk_urgent = dram_bw_kbps_to_uclk_khz(mode_support_result->global.svp_prefetch.urgent_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
103 min_uclk_urgent = (double)min_uclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_urgent.dram_derate_percent_pixel / 100);
104
105 min_uclk_bw = min_uclk_urgent > min_uclk_avg ? min_uclk_urgent : min_uclk_avg;
106
107 min_fclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
108 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_average.fclk_derate_percent / 100);
109
110 min_fclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
111 min_fclk_urgent = (double)min_fclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_urgent.fclk_derate_percent / 100);
112
113 min_fclk_bw = min_fclk_urgent > min_fclk_avg ? min_fclk_urgent : min_fclk_avg;
114
115 min_dcfclk_avg = (double)mode_support_result->global.svp_prefetch.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
116 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_average.dcfclk_derate_percent / 100);
117
118 min_dcfclk_urgent = (double)mode_support_result->global.svp_prefetch.urgent_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
119 min_dcfclk_urgent = (double)min_dcfclk_urgent / ((double)in_out->soc_bb->qos_parameters.derate_table.dcn_mall_prefetch_urgent.dcfclk_derate_percent / 100);
120
121 min_dcfclk_bw = min_dcfclk_urgent > min_dcfclk_avg ? min_dcfclk_urgent : min_dcfclk_avg;
122
123 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
124
125 in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz = dml_round_up(min_uclk_bw > min_uclk_latency ? min_uclk_bw : min_uclk_latency);
126 in_out->programming->min_clocks.dcn4x.svp_prefetch.fclk_khz = dml_round_up(min_fclk_bw > min_fclk_latency ? min_fclk_bw : min_fclk_latency);
127 in_out->programming->min_clocks.dcn4x.svp_prefetch.dcfclk_khz = dml_round_up(min_dcfclk_bw > min_dcfclk_latency ? min_dcfclk_bw : min_dcfclk_latency);
128 }
129
calculate_idle_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)130 static void calculate_idle_minimums(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
131 {
132 double min_uclk_avg;
133 double min_fclk_avg;
134 double min_dcfclk_avg;
135 double min_uclk_latency, min_fclk_latency, min_dcfclk_latency;
136 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
137
138 min_uclk_avg = dram_bw_kbps_to_uclk_khz(mode_support_result->global.active.average_bw_dram_kbps, &in_out->soc_bb->clk_table.dram_config);
139 min_uclk_avg = (double)min_uclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_idle_average.dram_derate_percent_pixel / 100);
140
141 min_fclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->fabric_datapath_to_dcn_data_return_bytes;
142 min_fclk_avg = (double)min_fclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_idle_average.fclk_derate_percent / 100);
143
144 min_dcfclk_avg = (double)mode_support_result->global.active.average_bw_sdp_kbps / in_out->soc_bb->return_bus_width_bytes;
145 min_dcfclk_avg = (double)min_dcfclk_avg / ((double)in_out->soc_bb->qos_parameters.derate_table.system_idle_average.dcfclk_derate_percent / 100);
146
147 get_minimum_clocks_for_latency(in_out, &min_uclk_latency, &min_fclk_latency, &min_dcfclk_latency);
148
149 in_out->programming->min_clocks.dcn4x.idle.uclk_khz = dml_round_up(min_uclk_avg > min_uclk_latency ? min_uclk_avg : min_uclk_latency);
150 in_out->programming->min_clocks.dcn4x.idle.fclk_khz = dml_round_up(min_fclk_avg > min_fclk_latency ? min_fclk_avg : min_fclk_latency);
151 in_out->programming->min_clocks.dcn4x.idle.dcfclk_khz = dml_round_up(min_dcfclk_avg > min_dcfclk_latency ? min_dcfclk_avg : min_dcfclk_latency);
152 }
153
add_margin_and_round_to_dfs_grainularity(double clock_khz,double margin,unsigned long vco_freq_khz,unsigned long * rounded_khz,uint32_t * divider_id)154 static bool add_margin_and_round_to_dfs_grainularity(double clock_khz, double margin, unsigned long vco_freq_khz, unsigned long *rounded_khz, uint32_t *divider_id)
155 {
156 enum dentist_divider_range {
157 DFS_DIVIDER_RANGE_1_START = 8, /* 2.00 */
158 DFS_DIVIDER_RANGE_1_STEP = 1, /* 0.25 */
159 DFS_DIVIDER_RANGE_2_START = 64, /* 16.00 */
160 DFS_DIVIDER_RANGE_2_STEP = 2, /* 0.50 */
161 DFS_DIVIDER_RANGE_3_START = 128, /* 32.00 */
162 DFS_DIVIDER_RANGE_3_STEP = 4, /* 1.00 */
163 DFS_DIVIDER_RANGE_4_START = 248, /* 62.00 */
164 DFS_DIVIDER_RANGE_4_STEP = 264, /* 66.00 */
165 DFS_DIVIDER_RANGE_SCALE_FACTOR = 4
166 };
167
168 enum DFS_base_divider_id {
169 DFS_BASE_DID_1 = 0x08,
170 DFS_BASE_DID_2 = 0x40,
171 DFS_BASE_DID_3 = 0x60,
172 DFS_BASE_DID_4 = 0x7e,
173 DFS_MAX_DID = 0x7f
174 };
175
176 unsigned int divider;
177
178 if (clock_khz < 1 || vco_freq_khz < 1 || clock_khz > vco_freq_khz)
179 return false;
180
181 clock_khz *= 1.0 + margin;
182
183 divider = (unsigned int)((int)DFS_DIVIDER_RANGE_SCALE_FACTOR * (vco_freq_khz / clock_khz));
184
185 /* we want to floor here to get higher clock than required rather than lower */
186 if (divider < DFS_DIVIDER_RANGE_2_START) {
187 if (divider < DFS_DIVIDER_RANGE_1_START)
188 *divider_id = DFS_BASE_DID_1;
189 else
190 *divider_id = DFS_BASE_DID_1 + ((divider - DFS_DIVIDER_RANGE_1_START) / DFS_DIVIDER_RANGE_1_STEP);
191 } else if (divider < DFS_DIVIDER_RANGE_3_START) {
192 *divider_id = DFS_BASE_DID_2 + ((divider - DFS_DIVIDER_RANGE_2_START) / DFS_DIVIDER_RANGE_2_STEP);
193 } else if (divider < DFS_DIVIDER_RANGE_4_START) {
194 *divider_id = DFS_BASE_DID_3 + ((divider - DFS_DIVIDER_RANGE_3_START) / DFS_DIVIDER_RANGE_3_STEP);
195 } else {
196 *divider_id = DFS_BASE_DID_4 + ((divider - DFS_DIVIDER_RANGE_4_START) / DFS_DIVIDER_RANGE_4_STEP);
197 if (*divider_id > DFS_MAX_DID)
198 *divider_id = DFS_MAX_DID;
199 }
200
201 *rounded_khz = vco_freq_khz * DFS_DIVIDER_RANGE_SCALE_FACTOR / divider;
202
203 return true;
204 }
205
round_to_non_dfs_granularity(unsigned long dispclk_khz,unsigned long dpprefclk_khz,unsigned long dtbrefclk_khz,unsigned long * rounded_dispclk_khz,unsigned long * rounded_dpprefclk_khz,unsigned long * rounded_dtbrefclk_khz)206 static bool round_to_non_dfs_granularity(unsigned long dispclk_khz, unsigned long dpprefclk_khz, unsigned long dtbrefclk_khz,
207 unsigned long *rounded_dispclk_khz, unsigned long *rounded_dpprefclk_khz, unsigned long *rounded_dtbrefclk_khz)
208 {
209 unsigned long pll_frequency_khz;
210
211 pll_frequency_khz = (unsigned long) math_max2(600000, math_ceil2(math_max3(dispclk_khz, dpprefclk_khz, dtbrefclk_khz), 1000));
212
213 *rounded_dispclk_khz = pll_frequency_khz / (unsigned long) math_min2(pll_frequency_khz / dispclk_khz, 32);
214
215 *rounded_dpprefclk_khz = pll_frequency_khz / (unsigned long) math_min2(pll_frequency_khz / dpprefclk_khz, 32);
216
217 if (dtbrefclk_khz > 0) {
218 *rounded_dtbrefclk_khz = pll_frequency_khz / (unsigned long) math_min2(pll_frequency_khz / dtbrefclk_khz, 32);
219 } else {
220 *rounded_dtbrefclk_khz = 0;
221 }
222
223 return true;
224 }
225
round_up_and_copy_to_next_dpm(unsigned long min_value,unsigned long * rounded_value,const struct dml2_clk_table * clock_table)226 static bool round_up_and_copy_to_next_dpm(unsigned long min_value, unsigned long *rounded_value, const struct dml2_clk_table *clock_table)
227 {
228 bool result = false;
229 int index = 0;
230
231 if (clock_table->num_clk_values > 2) {
232 while (index < clock_table->num_clk_values && clock_table->clk_values_khz[index] < min_value)
233 index++;
234
235 if (index < clock_table->num_clk_values) {
236 *rounded_value = clock_table->clk_values_khz[index];
237 result = true;
238 }
239 } else if (clock_table->clk_values_khz[clock_table->num_clk_values - 1] >= min_value) {
240 *rounded_value = min_value;
241 result = true;
242 }
243 return result;
244 }
245
round_up_to_next_dpm(unsigned long * clock_value,const struct dml2_clk_table * clock_table)246 static bool round_up_to_next_dpm(unsigned long *clock_value, const struct dml2_clk_table *clock_table)
247 {
248 return round_up_and_copy_to_next_dpm(*clock_value, clock_value, clock_table);
249 }
250
map_soc_min_clocks_to_dpm_fine_grained(struct dml2_display_cfg_programming * display_cfg,const struct dml2_soc_state_table * state_table)251 static bool map_soc_min_clocks_to_dpm_fine_grained(struct dml2_display_cfg_programming *display_cfg, const struct dml2_soc_state_table *state_table)
252 {
253 bool result;
254
255 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.active.dcfclk_khz, &state_table->dcfclk);
256 if (result)
257 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.active.fclk_khz, &state_table->fclk);
258 if (result)
259 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.active.uclk_khz, &state_table->uclk);
260
261 if (result)
262 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch.dcfclk_khz, &state_table->dcfclk);
263 if (result)
264 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch.fclk_khz, &state_table->fclk);
265 if (result)
266 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.svp_prefetch.uclk_khz, &state_table->uclk);
267
268 if (result)
269 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.idle.dcfclk_khz, &state_table->dcfclk);
270 if (result)
271 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.idle.fclk_khz, &state_table->fclk);
272 if (result)
273 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.idle.uclk_khz, &state_table->uclk);
274
275 return result;
276 }
277
map_soc_min_clocks_to_dpm_coarse_grained(struct dml2_display_cfg_programming * display_cfg,const struct dml2_soc_state_table * state_table)278 static bool map_soc_min_clocks_to_dpm_coarse_grained(struct dml2_display_cfg_programming *display_cfg, const struct dml2_soc_state_table *state_table)
279 {
280 bool result;
281 int index;
282
283 result = false;
284 for (index = 0; index < state_table->uclk.num_clk_values; index++) {
285 if (display_cfg->min_clocks.dcn4x.active.dcfclk_khz <= state_table->dcfclk.clk_values_khz[index] &&
286 display_cfg->min_clocks.dcn4x.active.fclk_khz <= state_table->fclk.clk_values_khz[index] &&
287 display_cfg->min_clocks.dcn4x.active.uclk_khz <= state_table->uclk.clk_values_khz[index]) {
288 display_cfg->min_clocks.dcn4x.active.dcfclk_khz = state_table->dcfclk.clk_values_khz[index];
289 display_cfg->min_clocks.dcn4x.active.fclk_khz = state_table->fclk.clk_values_khz[index];
290 display_cfg->min_clocks.dcn4x.active.uclk_khz = state_table->uclk.clk_values_khz[index];
291 result = true;
292 break;
293 }
294 }
295
296 if (result) {
297 result = false;
298 for (index = 0; index < state_table->uclk.num_clk_values; index++) {
299 if (display_cfg->min_clocks.dcn4x.idle.dcfclk_khz <= state_table->dcfclk.clk_values_khz[index] &&
300 display_cfg->min_clocks.dcn4x.idle.fclk_khz <= state_table->fclk.clk_values_khz[index] &&
301 display_cfg->min_clocks.dcn4x.idle.uclk_khz <= state_table->uclk.clk_values_khz[index]) {
302 display_cfg->min_clocks.dcn4x.idle.dcfclk_khz = state_table->dcfclk.clk_values_khz[index];
303 display_cfg->min_clocks.dcn4x.idle.fclk_khz = state_table->fclk.clk_values_khz[index];
304 display_cfg->min_clocks.dcn4x.idle.uclk_khz = state_table->uclk.clk_values_khz[index];
305 result = true;
306 break;
307 }
308 }
309 }
310
311 // SVP is not supported on any coarse grained SoCs
312 display_cfg->min_clocks.dcn4x.svp_prefetch.dcfclk_khz = 0;
313 display_cfg->min_clocks.dcn4x.svp_prefetch.fclk_khz = 0;
314 display_cfg->min_clocks.dcn4x.svp_prefetch.uclk_khz = 0;
315
316 return result;
317 }
318
map_min_clocks_to_dpm(const struct dml2_core_mode_support_result * mode_support_result,struct dml2_display_cfg_programming * display_cfg,const struct dml2_soc_state_table * state_table)319 static bool map_min_clocks_to_dpm(const struct dml2_core_mode_support_result *mode_support_result, struct dml2_display_cfg_programming *display_cfg, const struct dml2_soc_state_table *state_table)
320 {
321 bool result = false;
322 bool dcfclk_fine_grained = false, fclk_fine_grained = false, clock_state_count_identical = false;
323 unsigned int i;
324
325 if (!state_table || !display_cfg)
326 return false;
327
328 if (state_table->dcfclk.num_clk_values == 2) {
329 dcfclk_fine_grained = true;
330 }
331
332 if (state_table->fclk.num_clk_values == 2) {
333 fclk_fine_grained = true;
334 }
335
336 if (state_table->fclk.num_clk_values == state_table->dcfclk.num_clk_values &&
337 state_table->fclk.num_clk_values == state_table->uclk.num_clk_values) {
338 clock_state_count_identical = true;
339 }
340
341 if (dcfclk_fine_grained || fclk_fine_grained || !clock_state_count_identical)
342 result = map_soc_min_clocks_to_dpm_fine_grained(display_cfg, state_table);
343 else
344 result = map_soc_min_clocks_to_dpm_coarse_grained(display_cfg, state_table);
345
346 if (result)
347 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.dispclk_khz, &state_table->dispclk);
348
349 if (result)
350 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.deepsleep_dcfclk_khz, &state_table->dcfclk);
351
352 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
353 if (result)
354 result = round_up_to_next_dpm(&display_cfg->plane_programming[i].min_clocks.dcn4x.dppclk_khz, &state_table->dppclk);
355 }
356
357 for (i = 0; i < display_cfg->display_config.num_streams; i++) {
358 if (result)
359 result = round_up_and_copy_to_next_dpm(mode_support_result->per_stream[i].dscclk_khz, &display_cfg->stream_programming[i].min_clocks.dcn4x.dscclk_khz, &state_table->dscclk);
360 if (result)
361 result = round_up_and_copy_to_next_dpm(mode_support_result->per_stream[i].dtbclk_khz, &display_cfg->stream_programming[i].min_clocks.dcn4x.dtbclk_khz, &state_table->dtbclk);
362 if (result)
363 result = round_up_and_copy_to_next_dpm(mode_support_result->per_stream[i].phyclk_khz, &display_cfg->stream_programming[i].min_clocks.dcn4x.phyclk_khz, &state_table->phyclk);
364 }
365
366 if (result)
367 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.dpprefclk_khz, &state_table->dppclk);
368
369 if (result)
370 result = round_up_to_next_dpm(&display_cfg->min_clocks.dcn4x.dtbrefclk_khz, &state_table->dtbclk);
371
372 return result;
373 }
374
are_timings_trivially_synchronizable(struct dml2_display_cfg * display_config,int mask)375 static bool are_timings_trivially_synchronizable(struct dml2_display_cfg *display_config, int mask)
376 {
377 unsigned char i;
378 bool identical = true;
379 bool contains_drr = false;
380 unsigned char remap_array[DML2_MAX_PLANES];
381 unsigned char remap_array_size = 0;
382
383 // Create a remap array to enable simple iteration through only masked stream indicies
384 for (i = 0; i < display_config->num_streams; i++) {
385 if (mask & (0x1 << i)) {
386 remap_array[remap_array_size++] = i;
387 }
388 }
389
390 // 0 or 1 display is always trivially synchronizable
391 if (remap_array_size <= 1)
392 return true;
393
394 // Check that all displays timings are the same
395 for (i = 1; i < remap_array_size; i++) {
396 if (memcmp(&display_config->stream_descriptors[remap_array[i - 1]].timing, &display_config->stream_descriptors[remap_array[i]].timing, sizeof(struct dml2_timing_cfg))) {
397 identical = false;
398 break;
399 }
400 }
401
402 // Check if any displays are drr
403 for (i = 0; i < remap_array_size; i++) {
404 if (display_config->stream_descriptors[remap_array[i]].timing.drr_config.enabled) {
405 contains_drr = true;
406 break;
407 }
408 }
409
410 // Trivial sync is possible if all displays are identical and none are DRR
411 return !contains_drr && identical;
412 }
413
find_smallest_idle_time_in_vblank_us(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,int mask)414 static int find_smallest_idle_time_in_vblank_us(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out, int mask)
415 {
416 unsigned char i;
417 int min_idle_us = 0;
418 unsigned char remap_array[DML2_MAX_PLANES];
419 unsigned char remap_array_size = 0;
420 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
421
422 // Create a remap array to enable simple iteration through only masked stream indicies
423 for (i = 0; i < in_out->programming->display_config.num_streams; i++) {
424 if (mask & (0x1 << i)) {
425 remap_array[remap_array_size++] = i;
426 }
427 }
428
429 if (remap_array_size == 0)
430 return 0;
431
432 min_idle_us = mode_support_result->cfg_support_info.stream_support_info[remap_array[0]].vblank_reserved_time_us;
433
434 for (i = 1; i < remap_array_size; i++) {
435 if (min_idle_us > mode_support_result->cfg_support_info.stream_support_info[remap_array[i]].vblank_reserved_time_us)
436 min_idle_us = mode_support_result->cfg_support_info.stream_support_info[remap_array[i]].vblank_reserved_time_us;
437 }
438
439 return min_idle_us;
440 }
441
determine_power_management_features_with_vblank_only(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)442 static bool determine_power_management_features_with_vblank_only(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
443 {
444 int min_idle_us;
445
446 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, 0xF)) {
447 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, 0xF);
448
449 if (min_idle_us >= in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us)
450 in_out->programming->uclk_pstate_supported = true;
451
452 if (min_idle_us >= in_out->soc_bb->power_management_parameters.fclk_change_blackout_us)
453 in_out->programming->fclk_pstate_supported = true;
454 }
455
456 return true;
457 }
458
get_displays_without_vactive_margin_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,int latency_hiding_requirement_us)459 static int get_displays_without_vactive_margin_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out, int latency_hiding_requirement_us)
460 {
461 unsigned int i;
462 int displays_without_vactive_margin_mask = 0x0;
463 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
464
465 for (i = 0; i < in_out->programming->display_config.num_planes; i++) {
466 if (mode_support_result->cfg_support_info.plane_support_info[i].active_latency_hiding_us
467 < latency_hiding_requirement_us)
468 displays_without_vactive_margin_mask |= (0x1 << i);
469 }
470
471 return displays_without_vactive_margin_mask;
472 }
473
get_displays_with_fams_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out,int latency_hiding_requirement_us)474 static int get_displays_with_fams_mask(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out, int latency_hiding_requirement_us)
475 {
476 unsigned int i;
477 int displays_with_fams_mask = 0x0;
478
479 for (i = 0; i < in_out->programming->display_config.num_planes; i++) {
480 if (in_out->programming->display_config.plane_descriptors->overrides.legacy_svp_config != dml2_svp_mode_override_auto)
481 displays_with_fams_mask |= (0x1 << i);
482 }
483
484 return displays_with_fams_mask;
485 }
486
determine_power_management_features_with_vactive_and_vblank(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)487 static bool determine_power_management_features_with_vactive_and_vblank(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
488 {
489 int displays_without_vactive_margin_mask = 0x0;
490 int min_idle_us = 0;
491
492 if (in_out->programming->uclk_pstate_supported == false) {
493 displays_without_vactive_margin_mask =
494 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us));
495
496 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, displays_without_vactive_margin_mask)) {
497 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, displays_without_vactive_margin_mask);
498
499 if (min_idle_us >= in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us)
500 in_out->programming->uclk_pstate_supported = true;
501 }
502 }
503
504 if (in_out->programming->fclk_pstate_supported == false) {
505 displays_without_vactive_margin_mask =
506 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.fclk_change_blackout_us));
507
508 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, displays_without_vactive_margin_mask)) {
509 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, displays_without_vactive_margin_mask);
510
511 if (min_idle_us >= in_out->soc_bb->power_management_parameters.fclk_change_blackout_us)
512 in_out->programming->fclk_pstate_supported = true;
513 }
514 }
515
516 return true;
517 }
518
determine_power_management_features_with_fams(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)519 static bool determine_power_management_features_with_fams(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
520 {
521 int displays_without_vactive_margin_mask = 0x0;
522 int displays_without_fams_mask = 0x0;
523
524 displays_without_vactive_margin_mask =
525 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us));
526
527 displays_without_fams_mask =
528 get_displays_with_fams_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.dram_clk_change_blackout_us));
529
530 if ((displays_without_vactive_margin_mask & ~displays_without_fams_mask) == 0)
531 in_out->programming->uclk_pstate_supported = true;
532
533 return true;
534 }
535
clamp_uclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)536 static void clamp_uclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
537 {
538 in_out->programming->min_clocks.dcn4x.active.uclk_khz = in_out->soc_bb->clk_table.uclk.clk_values_khz[in_out->soc_bb->clk_table.uclk.num_clk_values - 1];
539 in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz = in_out->soc_bb->clk_table.uclk.clk_values_khz[in_out->soc_bb->clk_table.uclk.num_clk_values - 1];
540 in_out->programming->min_clocks.dcn4x.idle.uclk_khz = in_out->soc_bb->clk_table.uclk.clk_values_khz[in_out->soc_bb->clk_table.uclk.num_clk_values - 1];
541 }
542
clamp_fclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)543 static void clamp_fclk_to_max(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
544 {
545 in_out->programming->min_clocks.dcn4x.active.fclk_khz = in_out->soc_bb->clk_table.fclk.clk_values_khz[in_out->soc_bb->clk_table.fclk.num_clk_values - 1];
546 in_out->programming->min_clocks.dcn4x.idle.fclk_khz = in_out->soc_bb->clk_table.fclk.clk_values_khz[in_out->soc_bb->clk_table.fclk.num_clk_values - 1];
547 }
548
map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)549 static bool map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
550 {
551 int i;
552 bool result;
553 double dispclk_khz;
554 const struct dml2_core_mode_support_result *mode_support_result = &in_out->display_cfg->mode_support_result;
555
556 calculate_system_active_minimums(in_out);
557 calculate_svp_prefetch_minimums(in_out);
558 calculate_idle_minimums(in_out);
559
560 // In NV4, there's no support for FCLK or DCFCLK DPM change before SVP prefetch starts, therefore
561 // active minimums must be boosted to prefetch minimums
562 if (in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz > in_out->programming->min_clocks.dcn4x.active.uclk_khz)
563 in_out->programming->min_clocks.dcn4x.active.uclk_khz = in_out->programming->min_clocks.dcn4x.svp_prefetch.uclk_khz;
564
565 if (in_out->programming->min_clocks.dcn4x.svp_prefetch.fclk_khz > in_out->programming->min_clocks.dcn4x.active.fclk_khz)
566 in_out->programming->min_clocks.dcn4x.active.fclk_khz = in_out->programming->min_clocks.dcn4x.svp_prefetch.fclk_khz;
567
568 if (in_out->programming->min_clocks.dcn4x.svp_prefetch.dcfclk_khz > in_out->programming->min_clocks.dcn4x.active.dcfclk_khz)
569 in_out->programming->min_clocks.dcn4x.active.dcfclk_khz = in_out->programming->min_clocks.dcn4x.svp_prefetch.dcfclk_khz;
570
571 // need some massaging for the dispclk ramping cases:
572 dispclk_khz = mode_support_result->global.dispclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0) * (1.0 + in_out->ip->dispclk_ramp_margin_percent / 100.0);
573 // ramping margin should not make dispclk exceed the maximum dispclk speed:
574 dispclk_khz = math_min2(dispclk_khz, in_out->min_clk_table->max_clocks_khz.dispclk);
575 // but still the required dispclk can be more than the maximum dispclk speed:
576 dispclk_khz = math_max2(dispclk_khz, mode_support_result->global.dispclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0));
577
578 // DPP Ref is always set to max of all DPP clocks
579 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
580 if (in_out->programming->min_clocks.dcn4x.dpprefclk_khz < mode_support_result->per_plane[i].dppclk_khz)
581 in_out->programming->min_clocks.dcn4x.dpprefclk_khz = mode_support_result->per_plane[i].dppclk_khz;
582 }
583 in_out->programming->min_clocks.dcn4x.dpprefclk_khz = (unsigned long) (in_out->programming->min_clocks.dcn4x.dpprefclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0));
584
585 // DTB Ref is always set to max of all DTB clocks
586 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
587 if (in_out->programming->min_clocks.dcn4x.dtbrefclk_khz < mode_support_result->per_stream[i].dtbclk_khz)
588 in_out->programming->min_clocks.dcn4x.dtbrefclk_khz = mode_support_result->per_stream[i].dtbclk_khz;
589 }
590 in_out->programming->min_clocks.dcn4x.dtbrefclk_khz = (unsigned long)(in_out->programming->min_clocks.dcn4x.dtbrefclk_khz * (1 + in_out->soc_bb->dcn_downspread_percent / 100.0));
591
592 if (in_out->soc_bb->no_dfs) {
593 round_to_non_dfs_granularity((unsigned long)dispclk_khz, in_out->programming->min_clocks.dcn4x.dpprefclk_khz, in_out->programming->min_clocks.dcn4x.dtbrefclk_khz,
594 &in_out->programming->min_clocks.dcn4x.dispclk_khz, &in_out->programming->min_clocks.dcn4x.dpprefclk_khz, &in_out->programming->min_clocks.dcn4x.dtbrefclk_khz);
595 } else {
596 add_margin_and_round_to_dfs_grainularity(dispclk_khz, 0.0,
597 (unsigned long)(in_out->soc_bb->dispclk_dppclk_vco_speed_mhz * 1000), &in_out->programming->min_clocks.dcn4x.dispclk_khz, &in_out->programming->min_clocks.dcn4x.divider_ids.dispclk_did);
598
599 add_margin_and_round_to_dfs_grainularity(in_out->programming->min_clocks.dcn4x.dpprefclk_khz, 0.0,
600 (unsigned long)(in_out->soc_bb->dispclk_dppclk_vco_speed_mhz * 1000), &in_out->programming->min_clocks.dcn4x.dpprefclk_khz, &in_out->programming->min_clocks.dcn4x.divider_ids.dpprefclk_did);
601
602 add_margin_and_round_to_dfs_grainularity(in_out->programming->min_clocks.dcn4x.dtbrefclk_khz, 0.0,
603 (unsigned long)(in_out->soc_bb->dispclk_dppclk_vco_speed_mhz * 1000), &in_out->programming->min_clocks.dcn4x.dtbrefclk_khz, &in_out->programming->min_clocks.dcn4x.divider_ids.dtbrefclk_did);
604 }
605
606
607 for (i = 0; i < DML2_MAX_DCN_PIPES; i++) {
608 in_out->programming->plane_programming[i].min_clocks.dcn4x.dppclk_khz = (unsigned long)(in_out->programming->min_clocks.dcn4x.dpprefclk_khz / 255.0
609 * math_ceil2(in_out->display_cfg->mode_support_result.per_plane[i].dppclk_khz * (1.0 + in_out->soc_bb->dcn_downspread_percent / 100.0) * 255.0 / in_out->programming->min_clocks.dcn4x.dpprefclk_khz, 1.0));
610 }
611
612 in_out->programming->min_clocks.dcn4x.deepsleep_dcfclk_khz = mode_support_result->global.dcfclk_deepsleep_khz;
613 in_out->programming->min_clocks.dcn4x.socclk_khz = mode_support_result->global.socclk_khz;
614
615 result = map_min_clocks_to_dpm(mode_support_result, in_out->programming, &in_out->soc_bb->clk_table);
616
617 // By default, all power management features are not enabled
618 in_out->programming->fclk_pstate_supported = false;
619 in_out->programming->uclk_pstate_supported = false;
620
621 return result;
622 }
623
dpmm_dcn3_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)624 bool dpmm_dcn3_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
625 {
626 bool result;
627
628 result = map_mode_to_soc_dpm(in_out);
629
630 // Check if any can be enabled by nominal vblank idle time
631 determine_power_management_features_with_vblank_only(in_out);
632
633 // Check if any can be enabled in vactive/vblank
634 determine_power_management_features_with_vactive_and_vblank(in_out);
635
636 // Check if any can be enabled via fams
637 determine_power_management_features_with_fams(in_out);
638
639 if (in_out->programming->uclk_pstate_supported == false)
640 clamp_uclk_to_max(in_out);
641
642 if (in_out->programming->fclk_pstate_supported == false)
643 clamp_fclk_to_max(in_out);
644
645 return result;
646 }
647
dpmm_dcn4_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out * in_out)648 bool dpmm_dcn4_map_mode_to_soc_dpm(struct dml2_dpmm_map_mode_to_soc_dpm_params_in_out *in_out)
649 {
650 bool result;
651 int displays_without_vactive_margin_mask = 0x0;
652 int min_idle_us = 0;
653
654 result = map_mode_to_soc_dpm(in_out);
655
656 if (in_out->display_cfg->stage3.success)
657 in_out->programming->uclk_pstate_supported = true;
658
659 displays_without_vactive_margin_mask =
660 get_displays_without_vactive_margin_mask(in_out, (int)(in_out->soc_bb->power_management_parameters.fclk_change_blackout_us));
661
662 if (displays_without_vactive_margin_mask == 0) {
663 in_out->programming->fclk_pstate_supported = true;
664 } else {
665 if (are_timings_trivially_synchronizable(&in_out->programming->display_config, displays_without_vactive_margin_mask)) {
666 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, displays_without_vactive_margin_mask);
667
668 if (min_idle_us >= in_out->soc_bb->power_management_parameters.fclk_change_blackout_us)
669 in_out->programming->fclk_pstate_supported = true;
670 }
671 }
672
673 if (in_out->programming->uclk_pstate_supported == false)
674 clamp_uclk_to_max(in_out);
675
676 if (in_out->programming->fclk_pstate_supported == false)
677 clamp_fclk_to_max(in_out);
678
679 min_idle_us = find_smallest_idle_time_in_vblank_us(in_out, 0xFF);
680 if (in_out->soc_bb->power_management_parameters.stutter_enter_plus_exit_latency_us > 0 &&
681 min_idle_us >= in_out->soc_bb->power_management_parameters.stutter_enter_plus_exit_latency_us)
682 in_out->programming->stutter.supported_in_blank = true;
683 else
684 in_out->programming->stutter.supported_in_blank = false;
685
686 // TODO: Fix me Sam
687 if (in_out->soc_bb->power_management_parameters.z8_min_idle_time > 0 &&
688 in_out->programming->informative.power_management.z8.stutter_period >= in_out->soc_bb->power_management_parameters.z8_min_idle_time)
689 in_out->programming->z8_stutter.meets_eco = true;
690 else
691 in_out->programming->z8_stutter.meets_eco = false;
692
693 if (in_out->soc_bb->power_management_parameters.z8_stutter_exit_latency_us > 0 &&
694 min_idle_us >= in_out->soc_bb->power_management_parameters.z8_stutter_exit_latency_us)
695 in_out->programming->z8_stutter.supported_in_blank = true;
696 else
697 in_out->programming->z8_stutter.supported_in_blank = false;
698
699 return result;
700 }
701
dpmm_dcn4_map_watermarks(struct dml2_dpmm_map_watermarks_params_in_out * in_out)702 bool dpmm_dcn4_map_watermarks(struct dml2_dpmm_map_watermarks_params_in_out *in_out)
703 {
704 const struct dml2_display_cfg *display_cfg = &in_out->display_cfg->display_config;
705 const struct dml2_core_internal_display_mode_lib *mode_lib = &in_out->core->clean_me_up.mode_lib;
706 struct dml2_dchub_global_register_set *dchubbub_regs = &in_out->programming->global_regs;
707
708 double refclk_freq_in_mhz = (display_cfg->overrides.hw.dlg_ref_clk_mhz > 0) ? (double)display_cfg->overrides.hw.dlg_ref_clk_mhz : mode_lib->soc.dchub_refclk_mhz;
709
710 /* set A */
711 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].fclk_pstate = (int unsigned)(mode_lib->mp.Watermark.FCLKChangeWatermark * refclk_freq_in_mhz);
712 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].sr_enter = (int unsigned)(mode_lib->mp.Watermark.StutterEnterPlusExitWatermark * refclk_freq_in_mhz);
713 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].sr_exit = (int unsigned)(mode_lib->mp.Watermark.StutterExitWatermark * refclk_freq_in_mhz);
714 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].temp_read_or_ppt = (int unsigned)(mode_lib->mp.Watermark.g6_temp_read_watermark_us * refclk_freq_in_mhz);
715 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].uclk_pstate = (int unsigned)(mode_lib->mp.Watermark.DRAMClockChangeWatermark * refclk_freq_in_mhz);
716 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].urgent = (int unsigned)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
717 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].usr = (int unsigned)(mode_lib->mp.Watermark.USRRetrainingWatermark * refclk_freq_in_mhz);
718 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].refcyc_per_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
719 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].refcyc_per_meta_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
720 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].frac_urg_bw_flip = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthImmediateFlip * 1000);
721 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].frac_urg_bw_nom = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidth * 1000);
722 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_A].frac_urg_bw_mall = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthMALL * 1000);
723
724 /* set B */
725 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].fclk_pstate = (int unsigned)(mode_lib->mp.Watermark.FCLKChangeWatermark * refclk_freq_in_mhz);
726 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].sr_enter = (int unsigned)(mode_lib->mp.Watermark.StutterEnterPlusExitWatermark * refclk_freq_in_mhz);
727 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].sr_exit = (int unsigned)(mode_lib->mp.Watermark.StutterExitWatermark * refclk_freq_in_mhz);
728 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].temp_read_or_ppt = (int unsigned)(mode_lib->mp.Watermark.g6_temp_read_watermark_us * refclk_freq_in_mhz);
729 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].uclk_pstate = (int unsigned)(mode_lib->mp.Watermark.DRAMClockChangeWatermark * refclk_freq_in_mhz);
730 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].urgent = (int unsigned)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
731 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].usr = (int unsigned)(mode_lib->mp.Watermark.USRRetrainingWatermark * refclk_freq_in_mhz);
732 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].refcyc_per_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
733 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].refcyc_per_meta_trip_to_mem = (unsigned int)(mode_lib->mp.Watermark.UrgentWatermark * refclk_freq_in_mhz);
734 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].frac_urg_bw_flip = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthImmediateFlip * 1000);
735 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].frac_urg_bw_nom = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidth * 1000);
736 dchubbub_regs->wm_regs[DML2_DCHUB_WATERMARK_SET_B].frac_urg_bw_mall = (unsigned int)(mode_lib->mp.FractionOfUrgentBandwidthMALL * 1000);
737
738 dchubbub_regs->num_watermark_sets = 2;
739
740 return true;
741 }
742