This source file includes following definitions.
- rn_update_clocks
- get_vco_frequency_from_reg
- rn_dump_clk_registers_internal
- rn_dump_clk_registers
- rn_get_clk_states
- rn_enable_pme_wa
- build_watermark_ranges
- clk_mgr_helper_populate_bw_params
- rn_clk_mgr_construct
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26 #include "dccg.h"
27 #include "clk_mgr_internal.h"
28
29
30 #include "dcn20/dcn20_clk_mgr.h"
31 #include "rn_clk_mgr.h"
32
33
34 #include "dce100/dce_clk_mgr.h"
35 #include "rn_clk_mgr_vbios_smu.h"
36 #include "reg_helper.h"
37 #include "core_types.h"
38 #include "dm_helpers.h"
39
40 #include "atomfirmware.h"
41 #include "clk/clk_10_0_2_offset.h"
42 #include "clk/clk_10_0_2_sh_mask.h"
43 #include "renoir_ip_offset.h"
44
45
46
47
48 #define LPDDR_MEM_RETRAIN_LATENCY 4.977
49
50
51
52 #define REG(reg_name) \
53 (CLK_BASE.instance[0].segment[mm ## reg_name ## _BASE_IDX] + mm ## reg_name)
54
55 void rn_update_clocks(struct clk_mgr *clk_mgr_base,
56 struct dc_state *context,
57 bool safe_to_lower)
58 {
59 struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
60 struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
61 struct dc *dc = clk_mgr_base->ctx->dc;
62 int display_count;
63 bool update_dppclk = false;
64 bool update_dispclk = false;
65 bool enter_display_off = false;
66 bool dpp_clock_lowered = false;
67 struct dmcu *dmcu = clk_mgr_base->ctx->dc->res_pool->dmcu;
68
69 display_count = clk_mgr_helper_get_active_display_cnt(dc, context);
70
71 if (display_count == 0)
72 enter_display_off = true;
73
74 if (enter_display_off == safe_to_lower) {
75 rn_vbios_smu_set_display_count(clk_mgr, display_count);
76 }
77
78 if (should_set_clock(safe_to_lower, new_clocks->phyclk_khz, clk_mgr_base->clks.phyclk_khz)) {
79 clk_mgr_base->clks.phyclk_khz = new_clocks->phyclk_khz;
80 rn_vbios_smu_set_phyclk(clk_mgr, clk_mgr_base->clks.phyclk_khz);
81 }
82
83 if (should_set_clock(safe_to_lower, new_clocks->dcfclk_khz, clk_mgr_base->clks.dcfclk_khz)) {
84 clk_mgr_base->clks.dcfclk_khz = new_clocks->dcfclk_khz;
85 rn_vbios_smu_set_hard_min_dcfclk(clk_mgr, clk_mgr_base->clks.dcfclk_khz);
86 }
87
88 if (should_set_clock(safe_to_lower,
89 new_clocks->dcfclk_deep_sleep_khz, clk_mgr_base->clks.dcfclk_deep_sleep_khz)) {
90 clk_mgr_base->clks.dcfclk_deep_sleep_khz = new_clocks->dcfclk_deep_sleep_khz;
91 rn_vbios_smu_set_min_deep_sleep_dcfclk(clk_mgr, clk_mgr_base->clks.dcfclk_deep_sleep_khz);
92 }
93
94
95 if (!IS_DIAG_DC(dc->ctx->dce_environment)) {
96 if (new_clocks->dppclk_khz < 100000)
97 new_clocks->dppclk_khz = 100000;
98 }
99
100 if (should_set_clock(safe_to_lower, new_clocks->dppclk_khz, clk_mgr->base.clks.dppclk_khz)) {
101 if (clk_mgr->base.clks.dppclk_khz > new_clocks->dppclk_khz)
102 dpp_clock_lowered = true;
103 clk_mgr_base->clks.dppclk_khz = new_clocks->dppclk_khz;
104 update_dppclk = true;
105 }
106
107 if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) {
108 clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz;
109 rn_vbios_smu_set_dispclk(clk_mgr, clk_mgr_base->clks.dispclk_khz);
110
111 update_dispclk = true;
112 }
113
114 if (dpp_clock_lowered) {
115
116 dcn20_update_clocks_update_dpp_dto(clk_mgr, context);
117 rn_vbios_smu_set_dppclk(clk_mgr, clk_mgr_base->clks.dppclk_khz);
118 } else {
119
120 if (update_dppclk || update_dispclk)
121 rn_vbios_smu_set_dppclk(clk_mgr, clk_mgr_base->clks.dppclk_khz);
122 if (update_dppclk)
123 dcn20_update_clocks_update_dpp_dto(clk_mgr, context);
124 }
125
126 if (update_dispclk &&
127 dmcu && dmcu->funcs->is_dmcu_initialized(dmcu)) {
128
129 dmcu->funcs->set_psr_wait_loop(dmcu,
130 clk_mgr_base->clks.dispclk_khz / 1000 / 7);
131 }
132 }
133
134
135 static int get_vco_frequency_from_reg(struct clk_mgr_internal *clk_mgr)
136 {
137
138 struct fixed31_32 pll_req;
139 unsigned int fbmult_frac_val = 0;
140 unsigned int fbmult_int_val = 0;
141
142
143
144
145
146
147
148 REG_GET(CLK1_CLK_PLL_REQ, FbMult_frac, &fbmult_frac_val);
149 REG_GET(CLK1_CLK_PLL_REQ, FbMult_int, &fbmult_int_val);
150
151 pll_req = dc_fixpt_from_int(fbmult_int_val);
152
153
154
155
156
157 pll_req.value |= fbmult_frac_val << 16;
158
159
160 pll_req = dc_fixpt_mul_int(pll_req, clk_mgr->dfs_ref_freq_khz);
161
162
163 return dc_fixpt_floor(pll_req);
164 }
165
166 static void rn_dump_clk_registers_internal(struct rn_clk_internal *internal, struct clk_mgr *clk_mgr_base)
167 {
168 struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
169
170 internal->CLK1_CLK3_CURRENT_CNT = REG_READ(CLK1_CLK3_CURRENT_CNT);
171 internal->CLK1_CLK3_BYPASS_CNTL = REG_READ(CLK1_CLK3_BYPASS_CNTL);
172
173 internal->CLK1_CLK3_DS_CNTL = REG_READ(CLK1_CLK3_DS_CNTL);
174 internal->CLK1_CLK3_ALLOW_DS = REG_READ(CLK1_CLK3_ALLOW_DS);
175
176 internal->CLK1_CLK1_CURRENT_CNT = REG_READ(CLK1_CLK1_CURRENT_CNT);
177 internal->CLK1_CLK1_BYPASS_CNTL = REG_READ(CLK1_CLK1_BYPASS_CNTL);
178
179 internal->CLK1_CLK2_CURRENT_CNT = REG_READ(CLK1_CLK2_CURRENT_CNT);
180 internal->CLK1_CLK2_BYPASS_CNTL = REG_READ(CLK1_CLK2_BYPASS_CNTL);
181
182 internal->CLK1_CLK0_CURRENT_CNT = REG_READ(CLK1_CLK0_CURRENT_CNT);
183 internal->CLK1_CLK0_BYPASS_CNTL = REG_READ(CLK1_CLK0_BYPASS_CNTL);
184 }
185
186
187 static void rn_dump_clk_registers(struct clk_state_registers_and_bypass *regs_and_bypass,
188 struct clk_mgr *clk_mgr_base, struct clk_log_info *log_info)
189 {
190 struct rn_clk_internal internal = {0};
191 char *bypass_clks[5] = {"0x0 DFS", "0x1 REFCLK", "0x2 ERROR", "0x3 400 FCH", "0x4 600 FCH"};
192 unsigned int chars_printed = 0;
193 unsigned int remaining_buffer = log_info->bufSize;
194
195 rn_dump_clk_registers_internal(&internal, clk_mgr_base);
196
197 regs_and_bypass->dcfclk = internal.CLK1_CLK3_CURRENT_CNT / 10;
198 regs_and_bypass->dcf_deep_sleep_divider = internal.CLK1_CLK3_DS_CNTL / 10;
199 regs_and_bypass->dcf_deep_sleep_allow = internal.CLK1_CLK3_ALLOW_DS;
200 regs_and_bypass->dprefclk = internal.CLK1_CLK2_CURRENT_CNT / 10;
201 regs_and_bypass->dispclk = internal.CLK1_CLK0_CURRENT_CNT / 10;
202 regs_and_bypass->dppclk = internal.CLK1_CLK1_CURRENT_CNT / 10;
203
204 regs_and_bypass->dppclk_bypass = internal.CLK1_CLK1_BYPASS_CNTL & 0x0007;
205 if (regs_and_bypass->dppclk_bypass < 0 || regs_and_bypass->dppclk_bypass > 4)
206 regs_and_bypass->dppclk_bypass = 0;
207 regs_and_bypass->dcfclk_bypass = internal.CLK1_CLK3_BYPASS_CNTL & 0x0007;
208 if (regs_and_bypass->dcfclk_bypass < 0 || regs_and_bypass->dcfclk_bypass > 4)
209 regs_and_bypass->dcfclk_bypass = 0;
210 regs_and_bypass->dispclk_bypass = internal.CLK1_CLK0_BYPASS_CNTL & 0x0007;
211 if (regs_and_bypass->dispclk_bypass < 0 || regs_and_bypass->dispclk_bypass > 4)
212 regs_and_bypass->dispclk_bypass = 0;
213 regs_and_bypass->dprefclk_bypass = internal.CLK1_CLK2_BYPASS_CNTL & 0x0007;
214 if (regs_and_bypass->dprefclk_bypass < 0 || regs_and_bypass->dprefclk_bypass > 4)
215 regs_and_bypass->dprefclk_bypass = 0;
216
217 if (log_info->enabled) {
218 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "clk_type,clk_value,deepsleep_cntl,deepsleep_allow,bypass\n");
219 remaining_buffer -= chars_printed;
220 *log_info->sum_chars_printed += chars_printed;
221 log_info->pBuf += chars_printed;
222
223 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "dcfclk,%d,%d,%d,%s\n",
224 regs_and_bypass->dcfclk,
225 regs_and_bypass->dcf_deep_sleep_divider,
226 regs_and_bypass->dcf_deep_sleep_allow,
227 bypass_clks[(int) regs_and_bypass->dcfclk_bypass]);
228 remaining_buffer -= chars_printed;
229 *log_info->sum_chars_printed += chars_printed;
230 log_info->pBuf += chars_printed;
231
232 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "dprefclk,%d,N/A,N/A,%s\n",
233 regs_and_bypass->dprefclk,
234 bypass_clks[(int) regs_and_bypass->dprefclk_bypass]);
235 remaining_buffer -= chars_printed;
236 *log_info->sum_chars_printed += chars_printed;
237 log_info->pBuf += chars_printed;
238
239 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "dispclk,%d,N/A,N/A,%s\n",
240 regs_and_bypass->dispclk,
241 bypass_clks[(int) regs_and_bypass->dispclk_bypass]);
242 remaining_buffer -= chars_printed;
243 *log_info->sum_chars_printed += chars_printed;
244 log_info->pBuf += chars_printed;
245
246
247 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "SPLIT\n");
248 remaining_buffer -= chars_printed;
249 *log_info->sum_chars_printed += chars_printed;
250 log_info->pBuf += chars_printed;
251
252
253 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "reg_name,value,clk_type\n");
254 remaining_buffer -= chars_printed;
255 *log_info->sum_chars_printed += chars_printed;
256 log_info->pBuf += chars_printed;
257
258 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK3_CURRENT_CNT,%d,dcfclk\n",
259 internal.CLK1_CLK3_CURRENT_CNT);
260 remaining_buffer -= chars_printed;
261 *log_info->sum_chars_printed += chars_printed;
262 log_info->pBuf += chars_printed;
263
264 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK3_DS_CNTL,%d,dcf_deep_sleep_divider\n",
265 internal.CLK1_CLK3_DS_CNTL);
266 remaining_buffer -= chars_printed;
267 *log_info->sum_chars_printed += chars_printed;
268 log_info->pBuf += chars_printed;
269
270 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK3_ALLOW_DS,%d,dcf_deep_sleep_allow\n",
271 internal.CLK1_CLK3_ALLOW_DS);
272 remaining_buffer -= chars_printed;
273 *log_info->sum_chars_printed += chars_printed;
274 log_info->pBuf += chars_printed;
275
276 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK2_CURRENT_CNT,%d,dprefclk\n",
277 internal.CLK1_CLK2_CURRENT_CNT);
278 remaining_buffer -= chars_printed;
279 *log_info->sum_chars_printed += chars_printed;
280 log_info->pBuf += chars_printed;
281
282 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK0_CURRENT_CNT,%d,dispclk\n",
283 internal.CLK1_CLK0_CURRENT_CNT);
284 remaining_buffer -= chars_printed;
285 *log_info->sum_chars_printed += chars_printed;
286 log_info->pBuf += chars_printed;
287
288 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK1_CURRENT_CNT,%d,dppclk\n",
289 internal.CLK1_CLK1_CURRENT_CNT);
290 remaining_buffer -= chars_printed;
291 *log_info->sum_chars_printed += chars_printed;
292 log_info->pBuf += chars_printed;
293
294 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK3_BYPASS_CNTL,%d,dcfclk_bypass\n",
295 internal.CLK1_CLK3_BYPASS_CNTL);
296 remaining_buffer -= chars_printed;
297 *log_info->sum_chars_printed += chars_printed;
298 log_info->pBuf += chars_printed;
299
300 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK2_BYPASS_CNTL,%d,dprefclk_bypass\n",
301 internal.CLK1_CLK2_BYPASS_CNTL);
302 remaining_buffer -= chars_printed;
303 *log_info->sum_chars_printed += chars_printed;
304 log_info->pBuf += chars_printed;
305
306 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK0_BYPASS_CNTL,%d,dispclk_bypass\n",
307 internal.CLK1_CLK0_BYPASS_CNTL);
308 remaining_buffer -= chars_printed;
309 *log_info->sum_chars_printed += chars_printed;
310 log_info->pBuf += chars_printed;
311
312 chars_printed = snprintf_count(log_info->pBuf, remaining_buffer, "CLK1_CLK1_BYPASS_CNTL,%d,dppclk_bypass\n",
313 internal.CLK1_CLK1_BYPASS_CNTL);
314 remaining_buffer -= chars_printed;
315 *log_info->sum_chars_printed += chars_printed;
316 log_info->pBuf += chars_printed;
317 }
318 }
319
320
321 void rn_get_clk_states(struct clk_mgr *clk_mgr_base, struct clk_states *s)
322 {
323 struct clk_state_registers_and_bypass sb = { 0 };
324 struct clk_log_info log_info = { 0 };
325
326 rn_dump_clk_registers(&sb, clk_mgr_base, &log_info);
327
328 s->dprefclk_khz = sb.dprefclk;
329 }
330
331 void rn_enable_pme_wa(struct clk_mgr *clk_mgr_base)
332 {
333 struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
334
335 rn_vbios_smu_enable_pme_wa(clk_mgr);
336 }
337
338 static struct clk_mgr_funcs dcn21_funcs = {
339 .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz,
340 .update_clocks = rn_update_clocks,
341 .init_clocks = dcn2_init_clocks,
342 .enable_pme_wa = rn_enable_pme_wa,
343
344 };
345
346 struct clk_bw_params rn_bw_params = {
347 .vram_type = Ddr4MemType,
348 .num_channels = 1,
349 .clk_table = {
350 .entries = {
351 {
352 .voltage = 0,
353 .dcfclk_mhz = 400,
354 .fclk_mhz = 400,
355 .memclk_mhz = 800,
356 .socclk_mhz = 0,
357 },
358 {
359 .voltage = 0,
360 .dcfclk_mhz = 483,
361 .fclk_mhz = 800,
362 .memclk_mhz = 1600,
363 .socclk_mhz = 0,
364 },
365 {
366 .voltage = 0,
367 .dcfclk_mhz = 602,
368 .fclk_mhz = 1067,
369 .memclk_mhz = 1067,
370 .socclk_mhz = 0,
371 },
372 {
373 .voltage = 0,
374 .dcfclk_mhz = 738,
375 .fclk_mhz = 1333,
376 .memclk_mhz = 1600,
377 .socclk_mhz = 0,
378 },
379 },
380
381 .num_entries = 4,
382 },
383
384 .wm_table = {
385 .entries = {
386 {
387 .wm_inst = WM_A,
388 .wm_type = WM_TYPE_PSTATE_CHG,
389 .pstate_latency_us = 23.84,
390 .valid = true,
391 },
392 {
393 .wm_inst = WM_B,
394 .wm_type = WM_TYPE_PSTATE_CHG,
395 .pstate_latency_us = 23.84,
396 .valid = true,
397 },
398 {
399 .wm_inst = WM_C,
400 .wm_type = WM_TYPE_PSTATE_CHG,
401 .pstate_latency_us = 23.84,
402 .valid = true,
403 },
404 {
405 .wm_inst = WM_D,
406 .wm_type = WM_TYPE_PSTATE_CHG,
407 .pstate_latency_us = 23.84,
408 .valid = true,
409 },
410 },
411 }
412 };
413
414 void build_watermark_ranges(struct clk_bw_params *bw_params, struct pp_smu_wm_range_sets *ranges)
415 {
416 int i, num_valid_sets;
417
418 num_valid_sets = 0;
419
420 for (i = 0; i < WM_SET_COUNT; i++) {
421
422 if (!bw_params->wm_table.entries[i].valid)
423 continue;
424
425 ranges->reader_wm_sets[num_valid_sets].wm_inst = bw_params->wm_table.entries[i].wm_inst;
426 ranges->reader_wm_sets[num_valid_sets].wm_type = bw_params->wm_table.entries[i].wm_type;;
427
428 ranges->reader_wm_sets[num_valid_sets].min_drain_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MIN;
429 ranges->reader_wm_sets[num_valid_sets].max_drain_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MAX;
430
431
432 if (ranges->reader_wm_sets[num_valid_sets].wm_type == WM_TYPE_PSTATE_CHG) {
433 if (i == 0)
434 ranges->reader_wm_sets[num_valid_sets].min_fill_clk_mhz = 0;
435 else {
436
437 ranges->reader_wm_sets[num_valid_sets].min_fill_clk_mhz = bw_params->clk_table.entries[i - 1].fclk_mhz + 1;
438 }
439 ranges->reader_wm_sets[num_valid_sets].max_fill_clk_mhz = bw_params->clk_table.entries[i].fclk_mhz;
440
441 } else {
442
443 ranges->reader_wm_sets[num_valid_sets].min_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MIN;
444 ranges->reader_wm_sets[num_valid_sets].max_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MAX;
445
446
447 ranges->reader_wm_sets[num_valid_sets - 1].max_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MAX;
448 }
449 num_valid_sets++;
450 }
451
452 ASSERT(num_valid_sets != 0);
453 ranges->num_reader_wm_sets = num_valid_sets;
454
455
456 ranges->reader_wm_sets[0].min_drain_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MIN;
457 ranges->reader_wm_sets[0].min_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MIN;
458 ranges->reader_wm_sets[ranges->num_reader_wm_sets - 1].max_drain_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MAX;
459 ranges->reader_wm_sets[ranges->num_reader_wm_sets - 1].max_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MAX;
460
461
462 ranges->num_writer_wm_sets = 1;
463 ranges->writer_wm_sets[0].wm_inst = WM_A;
464 ranges->writer_wm_sets[0].min_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MIN;
465 ranges->writer_wm_sets[0].max_fill_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MAX;
466 ranges->writer_wm_sets[0].min_drain_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MIN;
467 ranges->writer_wm_sets[0].max_drain_clk_mhz = PP_SMU_WM_SET_RANGE_CLK_UNCONSTRAINED_MAX;
468
469 }
470
471 void clk_mgr_helper_populate_bw_params(struct clk_bw_params *bw_params, struct dpm_clocks *clock_table, struct hw_asic_id *asic_id)
472 {
473 int i;
474
475 ASSERT(PP_SMU_NUM_FCLK_DPM_LEVELS <= MAX_NUM_DPM_LVL);
476
477 for (i = 0; i < PP_SMU_NUM_FCLK_DPM_LEVELS; i++) {
478 if (clock_table->FClocks[i].Freq == 0)
479 break;
480
481 bw_params->clk_table.entries[i].dcfclk_mhz = clock_table->DcfClocks[i].Freq;
482 bw_params->clk_table.entries[i].fclk_mhz = clock_table->FClocks[i].Freq;
483 bw_params->clk_table.entries[i].memclk_mhz = clock_table->MemClocks[i].Freq;
484 bw_params->clk_table.entries[i].socclk_mhz = clock_table->SocClocks[i].Freq;
485 bw_params->clk_table.entries[i].voltage = clock_table->FClocks[i].Vol;
486 }
487 bw_params->clk_table.num_entries = i;
488
489 bw_params->vram_type = asic_id->vram_type;
490 bw_params->num_channels = asic_id->vram_width / DDR4_DRAM_WIDTH;
491
492 for (i = 0; i < WM_SET_COUNT; i++) {
493 bw_params->wm_table.entries[i].wm_inst = i;
494
495 if (clock_table->FClocks[i].Freq == 0) {
496 bw_params->wm_table.entries[i].valid = false;
497 continue;
498 }
499
500 bw_params->wm_table.entries[i].wm_type = WM_TYPE_PSTATE_CHG;
501 bw_params->wm_table.entries[i].valid = true;
502 }
503
504 if (bw_params->vram_type == LpDdr4MemType) {
505
506
507
508 bw_params->wm_table.entries[WM_D].pstate_latency_us = LPDDR_MEM_RETRAIN_LATENCY;
509 bw_params->wm_table.entries[WM_D].wm_inst = WM_D;
510 bw_params->wm_table.entries[WM_D].wm_type = WM_TYPE_RETRAINING;
511 bw_params->wm_table.entries[WM_D].valid = true;
512 }
513
514 }
515
516 void rn_clk_mgr_construct(
517 struct dc_context *ctx,
518 struct clk_mgr_internal *clk_mgr,
519 struct pp_smu_funcs *pp_smu,
520 struct dccg *dccg)
521 {
522 struct dc_debug_options *debug = &ctx->dc->debug;
523 struct dpm_clocks clock_table = { 0 };
524 struct clk_state_registers_and_bypass s = { 0 };
525
526 clk_mgr->base.ctx = ctx;
527 clk_mgr->base.funcs = &dcn21_funcs;
528
529 clk_mgr->pp_smu = pp_smu;
530
531 clk_mgr->dccg = dccg;
532 clk_mgr->dfs_bypass_disp_clk = 0;
533
534 clk_mgr->dprefclk_ss_percentage = 0;
535 clk_mgr->dprefclk_ss_divider = 1000;
536 clk_mgr->ss_on_dprefclk = false;
537 clk_mgr->dfs_ref_freq_khz = 48000;
538
539 clk_mgr->smu_ver = rn_vbios_smu_get_smu_version(clk_mgr);
540
541 if (IS_FPGA_MAXIMUS_DC(ctx->dce_environment)) {
542 dcn21_funcs.update_clocks = dcn2_update_clocks_fpga;
543 clk_mgr->dentist_vco_freq_khz = 3600000;
544 clk_mgr->base.dprefclk_khz = 600000;
545 } else {
546 struct clk_log_info log_info = {0};
547
548
549 clk_mgr->dentist_vco_freq_khz = get_vco_frequency_from_reg(clk_mgr);
550
551
552 if (clk_mgr->dentist_vco_freq_khz == 0)
553 clk_mgr->dentist_vco_freq_khz = 3600000;
554
555 rn_dump_clk_registers(&s, &clk_mgr->base, &log_info);
556 clk_mgr->base.dprefclk_khz = s.dprefclk;
557
558 if (clk_mgr->base.dprefclk_khz != 600000) {
559 clk_mgr->base.dprefclk_khz = 600000;
560 ASSERT(1);
561 }
562
563
564 if (clk_mgr->base.dprefclk_khz == 0)
565 clk_mgr->base.dprefclk_khz = 600000;
566 }
567
568 dce_clock_read_ss_info(clk_mgr);
569
570 clk_mgr->base.bw_params = &rn_bw_params;
571
572 if (pp_smu) {
573 pp_smu->rn_funcs.get_dpm_clock_table(&pp_smu->rn_funcs.pp_smu, &clock_table);
574 clk_mgr_helper_populate_bw_params(clk_mgr->base.bw_params, &clock_table, &ctx->asic_id);
575 }
576
577
578
579
580
581
582 if (!debug->disable_pplib_wm_range) {
583 struct pp_smu_wm_range_sets ranges = {0};
584
585 build_watermark_ranges(clk_mgr->base.bw_params, &ranges);
586
587
588 if (pp_smu && pp_smu->rn_funcs.set_wm_ranges)
589 pp_smu->rn_funcs.set_wm_ranges(&pp_smu->rn_funcs.pp_smu, &ranges);
590 }
591
592
593 if (!debug->disable_48mhz_pwrdwn)
594 rn_vbios_smu_enable_48mhz_tmdp_refclk_pwrdwn(clk_mgr);
595 }
596