This source file includes following definitions.
- dce_dmcu_init
- dce_dmcu_load_iram
- dce_get_dmcu_psr_state
- dce_dmcu_set_psr_enable
- dce_dmcu_setup_psr
- dce_is_dmcu_initialized
- dce_psr_wait_loop
- dce_get_psr_wait_loop
- dcn10_get_dmcu_version
- dcn10_dmcu_enable_fractional_pwm
- dcn10_dmcu_init
- dcn10_dmcu_load_iram
- dcn10_get_dmcu_psr_state
- dcn10_dmcu_set_psr_enable
- dcn10_dmcu_setup_psr
- dcn10_psr_wait_loop
- dcn10_get_psr_wait_loop
- dcn10_is_dmcu_initialized
- dcn20_lock_phy
- dcn20_unlock_phy
- dce_dmcu_construct
- dce_dmcu_create
- dcn10_dmcu_create
- dcn20_dmcu_create
- dce_dmcu_destroy
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26 #include <linux/delay.h>
27 #include <linux/slab.h>
28
29 #include "core_types.h"
30 #include "link_encoder.h"
31 #include "dce_dmcu.h"
32 #include "dm_services.h"
33 #include "reg_helper.h"
34 #include "fixed31_32.h"
35 #include "dc.h"
36
37 #define TO_DCE_DMCU(dmcu)\
38 container_of(dmcu, struct dce_dmcu, base)
39
40 #define REG(reg) \
41 (dmcu_dce->regs->reg)
42
43 #undef FN
44 #define FN(reg_name, field_name) \
45 dmcu_dce->dmcu_shift->field_name, dmcu_dce->dmcu_mask->field_name
46
47 #define CTX \
48 dmcu_dce->base.ctx
49
50
51 #define PSR_ENABLE 0x20
52 #define PSR_EXIT 0x21
53 #define PSR_SET 0x23
54 #define PSR_SET_WAITLOOP 0x31
55 #define MCP_INIT_DMCU 0x88
56 #define MCP_INIT_IRAM 0x89
57 #define MCP_SYNC_PHY_LOCK 0x90
58 #define MCP_SYNC_PHY_UNLOCK 0x91
59 #define MCP_BL_SET_PWM_FRAC 0x6A
60 #define MASTER_COMM_CNTL_REG__MASTER_COMM_INTERRUPT_MASK 0x00000001L
61
62 static bool dce_dmcu_init(struct dmcu *dmcu)
63 {
64
65 return true;
66 }
67
68 bool dce_dmcu_load_iram(struct dmcu *dmcu,
69 unsigned int start_offset,
70 const char *src,
71 unsigned int bytes)
72 {
73 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
74 unsigned int count = 0;
75
76
77 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
78 IRAM_HOST_ACCESS_EN, 1,
79 IRAM_WR_ADDR_AUTO_INC, 1);
80
81 REG_WAIT(DCI_MEM_PWR_STATUS, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
82
83 REG_WRITE(DMCU_IRAM_WR_CTRL, start_offset);
84
85 for (count = 0; count < bytes; count++)
86 REG_WRITE(DMCU_IRAM_WR_DATA, src[count]);
87
88
89 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
90 IRAM_HOST_ACCESS_EN, 0,
91 IRAM_WR_ADDR_AUTO_INC, 0);
92
93 return true;
94 }
95
96 static void dce_get_dmcu_psr_state(struct dmcu *dmcu, uint32_t *psr_state)
97 {
98 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
99
100 uint32_t psr_state_offset = 0xf0;
101
102
103 REG_UPDATE(DMCU_RAM_ACCESS_CTRL, IRAM_HOST_ACCESS_EN, 1);
104
105 REG_WAIT(DCI_MEM_PWR_STATUS, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
106
107
108 REG_WRITE(DMCU_IRAM_RD_CTRL, psr_state_offset);
109
110
111 *psr_state = REG_READ(DMCU_IRAM_RD_DATA);
112
113
114
115
116 REG_UPDATE(DMCU_RAM_ACCESS_CTRL, IRAM_HOST_ACCESS_EN, 0);
117 }
118
119 static void dce_dmcu_set_psr_enable(struct dmcu *dmcu, bool enable, bool wait)
120 {
121 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
122 unsigned int dmcu_max_retry_on_wait_reg_ready = 801;
123 unsigned int dmcu_wait_reg_ready_interval = 100;
124
125 unsigned int retryCount;
126 uint32_t psr_state = 0;
127
128
129 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0,
130 dmcu_wait_reg_ready_interval,
131 dmcu_max_retry_on_wait_reg_ready);
132
133
134 if (enable)
135 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
136 PSR_ENABLE);
137 else
138 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
139 PSR_EXIT);
140
141
142 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
143 if (wait == true) {
144 for (retryCount = 0; retryCount <= 100; retryCount++) {
145 dce_get_dmcu_psr_state(dmcu, &psr_state);
146 if (enable) {
147 if (psr_state != 0)
148 break;
149 } else {
150 if (psr_state == 0)
151 break;
152 }
153 udelay(10);
154 }
155 }
156 }
157
158 static bool dce_dmcu_setup_psr(struct dmcu *dmcu,
159 struct dc_link *link,
160 struct psr_context *psr_context)
161 {
162 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
163
164 unsigned int dmcu_max_retry_on_wait_reg_ready = 801;
165 unsigned int dmcu_wait_reg_ready_interval = 100;
166
167 union dce_dmcu_psr_config_data_reg1 masterCmdData1;
168 union dce_dmcu_psr_config_data_reg2 masterCmdData2;
169 union dce_dmcu_psr_config_data_reg3 masterCmdData3;
170
171 link->link_enc->funcs->psr_program_dp_dphy_fast_training(link->link_enc,
172 psr_context->psrExitLinkTrainingRequired);
173
174
175
176 REG_UPDATE_4(DMCU_INTERRUPT_TO_UC_EN_MASK,
177 STATIC_SCREEN1_INT_TO_UC_EN, 0,
178 STATIC_SCREEN2_INT_TO_UC_EN, 0,
179 STATIC_SCREEN3_INT_TO_UC_EN, 0,
180 STATIC_SCREEN4_INT_TO_UC_EN, 0);
181
182 switch (psr_context->controllerId) {
183
184 case 1:
185 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
186 STATIC_SCREEN1_INT_TO_UC_EN, 1);
187 break;
188 case 2:
189 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
190 STATIC_SCREEN2_INT_TO_UC_EN, 1);
191 break;
192 case 3:
193 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
194 STATIC_SCREEN3_INT_TO_UC_EN, 1);
195 break;
196 case 4:
197 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
198 STATIC_SCREEN4_INT_TO_UC_EN, 1);
199 break;
200 case 5:
201
202
203
204
205 break;
206 case 6:
207
208
209
210
211
212 break;
213 default:
214 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
215 STATIC_SCREEN1_INT_TO_UC_EN, 1);
216 break;
217 }
218
219 link->link_enc->funcs->psr_program_secondary_packet(link->link_enc,
220 psr_context->sdpTransmitLineNumDeadline);
221
222
223 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0,
224 dmcu_wait_reg_ready_interval,
225 dmcu_max_retry_on_wait_reg_ready);
226
227
228 masterCmdData1.u32All = 0;
229 masterCmdData1.bits.timehyst_frames = psr_context->timehyst_frames;
230 masterCmdData1.bits.hyst_lines = psr_context->hyst_lines;
231 masterCmdData1.bits.rfb_update_auto_en =
232 psr_context->rfb_update_auto_en;
233 masterCmdData1.bits.dp_port_num = psr_context->transmitterId;
234 masterCmdData1.bits.dcp_sel = psr_context->controllerId;
235 masterCmdData1.bits.phy_type = psr_context->phyType;
236 masterCmdData1.bits.frame_cap_ind =
237 psr_context->psrFrameCaptureIndicationReq;
238 masterCmdData1.bits.aux_chan = psr_context->channel;
239 masterCmdData1.bits.aux_repeat = psr_context->aux_repeats;
240 dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1),
241 masterCmdData1.u32All);
242
243 masterCmdData2.u32All = 0;
244 masterCmdData2.bits.dig_fe = psr_context->engineId;
245 masterCmdData2.bits.dig_be = psr_context->transmitterId;
246 masterCmdData2.bits.skip_wait_for_pll_lock =
247 psr_context->skipPsrWaitForPllLock;
248 masterCmdData2.bits.frame_delay = psr_context->frame_delay;
249 masterCmdData2.bits.smu_phy_id = psr_context->smuPhyId;
250 masterCmdData2.bits.num_of_controllers =
251 psr_context->numberOfControllers;
252 dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG2),
253 masterCmdData2.u32All);
254
255 masterCmdData3.u32All = 0;
256 masterCmdData3.bits.psr_level = psr_context->psr_level.u32all;
257 dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG3),
258 masterCmdData3.u32All);
259
260
261 REG_UPDATE(MASTER_COMM_CMD_REG,
262 MASTER_COMM_CMD_REG_BYTE0, PSR_SET);
263
264
265 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
266
267 return true;
268 }
269
270 static bool dce_is_dmcu_initialized(struct dmcu *dmcu)
271 {
272 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
273 unsigned int dmcu_uc_reset;
274
275
276 REG_GET(DMCU_STATUS, UC_IN_RESET, &dmcu_uc_reset);
277
278
279 if (dmcu_uc_reset)
280 return false;
281
282 return true;
283 }
284
285 static void dce_psr_wait_loop(
286 struct dmcu *dmcu,
287 unsigned int wait_loop_number)
288 {
289 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
290 union dce_dmcu_psr_config_data_wait_loop_reg1 masterCmdData1;
291
292 if (dmcu->cached_wait_loop_number == wait_loop_number)
293 return;
294
295
296 if (!dce_is_dmcu_initialized(dmcu))
297 return;
298
299
300 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
301
302 masterCmdData1.u32 = 0;
303 masterCmdData1.bits.wait_loop = wait_loop_number;
304 dmcu->cached_wait_loop_number = wait_loop_number;
305 dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1), masterCmdData1.u32);
306
307
308 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, PSR_SET_WAITLOOP);
309
310
311 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
312 }
313
314 static void dce_get_psr_wait_loop(
315 struct dmcu *dmcu, unsigned int *psr_wait_loop_number)
316 {
317 *psr_wait_loop_number = dmcu->cached_wait_loop_number;
318 return;
319 }
320
321 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
322 static void dcn10_get_dmcu_version(struct dmcu *dmcu)
323 {
324 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
325 uint32_t dmcu_version_offset = 0xf1;
326
327
328 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
329 IRAM_HOST_ACCESS_EN, 1,
330 IRAM_RD_ADDR_AUTO_INC, 1);
331
332 REG_WAIT(DMU_MEM_PWR_CNTL, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
333
334
335 REG_WRITE(DMCU_IRAM_RD_CTRL, dmcu_version_offset);
336 dmcu->dmcu_version.interface_version = REG_READ(DMCU_IRAM_RD_DATA);
337 dmcu->dmcu_version.abm_version = REG_READ(DMCU_IRAM_RD_DATA);
338 dmcu->dmcu_version.psr_version = REG_READ(DMCU_IRAM_RD_DATA);
339 dmcu->dmcu_version.build_version = ((REG_READ(DMCU_IRAM_RD_DATA) << 8) |
340 REG_READ(DMCU_IRAM_RD_DATA));
341
342
343 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
344 IRAM_HOST_ACCESS_EN, 0,
345 IRAM_RD_ADDR_AUTO_INC, 0);
346 }
347
348 static void dcn10_dmcu_enable_fractional_pwm(struct dmcu *dmcu,
349 uint32_t fractional_pwm)
350 {
351 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
352
353
354 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
355
356
357 REG_WRITE(MASTER_COMM_DATA_REG1, fractional_pwm);
358
359
360 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
361 MCP_BL_SET_PWM_FRAC);
362
363
364 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
365
366
367 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
368 }
369
370 static bool dcn10_dmcu_init(struct dmcu *dmcu)
371 {
372 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
373 const struct dc_config *config = &dmcu->ctx->dc->config;
374 bool status = false;
375
376
377
378
379
380
381 dmcu->dmcu_state = REG_READ(DC_DMCU_SCRATCH);
382
383 switch (dmcu->dmcu_state) {
384 case DMCU_UNLOADED:
385 status = false;
386 break;
387 case DMCU_LOADED_UNINITIALIZED:
388
389 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
390
391
392 REG_WRITE(MASTER_COMM_DATA_REG1, 0xFFFF);
393
394
395 REG_WRITE(MASTER_COMM_DATA_REG2, abm_gain_stepsize);
396
397
398 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
399 MCP_INIT_DMCU);
400
401
402 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
403
404
405 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
406
407
408 dmcu->dmcu_state = REG_READ(DC_DMCU_SCRATCH);
409
410
411 if (dmcu->dmcu_state == DMCU_RUNNING) {
412
413 dcn10_get_dmcu_version(dmcu);
414
415
416 dcn10_dmcu_enable_fractional_pwm(dmcu,
417 (config->disable_fractional_pwm == false) ? 1 : 0);
418 status = true;
419 } else {
420 status = false;
421 }
422
423 break;
424 case DMCU_RUNNING:
425 status = true;
426 break;
427 default:
428 status = false;
429 break;
430 }
431
432 return status;
433 }
434
435
436 static bool dcn10_dmcu_load_iram(struct dmcu *dmcu,
437 unsigned int start_offset,
438 const char *src,
439 unsigned int bytes)
440 {
441 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
442 unsigned int count = 0;
443
444
445 if (dmcu->dmcu_state != DMCU_RUNNING)
446 return false;
447
448
449 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
450 IRAM_HOST_ACCESS_EN, 1,
451 IRAM_WR_ADDR_AUTO_INC, 1);
452
453 REG_WAIT(DMU_MEM_PWR_CNTL, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
454
455 REG_WRITE(DMCU_IRAM_WR_CTRL, start_offset);
456
457 for (count = 0; count < bytes; count++)
458 REG_WRITE(DMCU_IRAM_WR_DATA, src[count]);
459
460
461 REG_UPDATE_2(DMCU_RAM_ACCESS_CTRL,
462 IRAM_HOST_ACCESS_EN, 0,
463 IRAM_WR_ADDR_AUTO_INC, 0);
464
465
466 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
467
468
469 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
470 MCP_INIT_IRAM);
471
472
473 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
474
475
476 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 100, 800);
477
478 return true;
479 }
480
481 static void dcn10_get_dmcu_psr_state(struct dmcu *dmcu, uint32_t *psr_state)
482 {
483 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
484
485 uint32_t psr_state_offset = 0xf0;
486
487
488 if (dmcu->dmcu_state != DMCU_RUNNING)
489 return;
490
491
492 REG_UPDATE(DMCU_RAM_ACCESS_CTRL, IRAM_HOST_ACCESS_EN, 1);
493
494 REG_WAIT(DMU_MEM_PWR_CNTL, DMCU_IRAM_MEM_PWR_STATE, 0, 2, 10);
495
496
497 REG_WRITE(DMCU_IRAM_RD_CTRL, psr_state_offset);
498
499
500 *psr_state = REG_READ(DMCU_IRAM_RD_DATA);
501
502
503
504
505 REG_UPDATE(DMCU_RAM_ACCESS_CTRL, IRAM_HOST_ACCESS_EN, 0);
506 }
507
508 static void dcn10_dmcu_set_psr_enable(struct dmcu *dmcu, bool enable, bool wait)
509 {
510 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
511 unsigned int dmcu_max_retry_on_wait_reg_ready = 801;
512 unsigned int dmcu_wait_reg_ready_interval = 100;
513
514 unsigned int retryCount;
515 uint32_t psr_state = 0;
516
517
518 if (dmcu->dmcu_state != DMCU_RUNNING)
519 return;
520
521 dcn10_get_dmcu_psr_state(dmcu, &psr_state);
522 if (psr_state == 0 && !enable)
523 return;
524
525 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0,
526 dmcu_wait_reg_ready_interval,
527 dmcu_max_retry_on_wait_reg_ready);
528
529
530 if (enable)
531 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
532 PSR_ENABLE);
533 else
534 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0,
535 PSR_EXIT);
536
537
538 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
539
540
541
542
543
544 if (wait == true) {
545 for (retryCount = 0; retryCount <= 1000; retryCount++) {
546 dcn10_get_dmcu_psr_state(dmcu, &psr_state);
547 if (enable) {
548 if (psr_state != 0)
549 break;
550 } else {
551 if (psr_state == 0)
552 break;
553 }
554 udelay(500);
555 }
556
557
558 if (retryCount >= 1000)
559 ASSERT(0);
560 }
561 }
562
563 static bool dcn10_dmcu_setup_psr(struct dmcu *dmcu,
564 struct dc_link *link,
565 struct psr_context *psr_context)
566 {
567 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
568
569 unsigned int dmcu_max_retry_on_wait_reg_ready = 801;
570 unsigned int dmcu_wait_reg_ready_interval = 100;
571
572 union dce_dmcu_psr_config_data_reg1 masterCmdData1;
573 union dce_dmcu_psr_config_data_reg2 masterCmdData2;
574 union dce_dmcu_psr_config_data_reg3 masterCmdData3;
575
576
577 if (dmcu->dmcu_state != DMCU_RUNNING)
578 return false;
579
580 link->link_enc->funcs->psr_program_dp_dphy_fast_training(link->link_enc,
581 psr_context->psrExitLinkTrainingRequired);
582
583
584
585 REG_UPDATE_4(DMCU_INTERRUPT_TO_UC_EN_MASK,
586 STATIC_SCREEN1_INT_TO_UC_EN, 0,
587 STATIC_SCREEN2_INT_TO_UC_EN, 0,
588 STATIC_SCREEN3_INT_TO_UC_EN, 0,
589 STATIC_SCREEN4_INT_TO_UC_EN, 0);
590
591 switch (psr_context->controllerId) {
592
593 case 1:
594 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
595 STATIC_SCREEN1_INT_TO_UC_EN, 1);
596 break;
597 case 2:
598 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
599 STATIC_SCREEN2_INT_TO_UC_EN, 1);
600 break;
601 case 3:
602 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
603 STATIC_SCREEN3_INT_TO_UC_EN, 1);
604 break;
605 case 4:
606 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
607 STATIC_SCREEN4_INT_TO_UC_EN, 1);
608 break;
609 case 5:
610
611
612
613
614 break;
615 case 6:
616
617
618
619
620
621 break;
622 default:
623 REG_UPDATE(DMCU_INTERRUPT_TO_UC_EN_MASK,
624 STATIC_SCREEN1_INT_TO_UC_EN, 1);
625 break;
626 }
627
628 link->link_enc->funcs->psr_program_secondary_packet(link->link_enc,
629 psr_context->sdpTransmitLineNumDeadline);
630
631 if (psr_context->allow_smu_optimizations)
632 REG_UPDATE(SMU_INTERRUPT_CONTROL, DC_SMU_INT_ENABLE, 1);
633
634
635 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0,
636 dmcu_wait_reg_ready_interval,
637 dmcu_max_retry_on_wait_reg_ready);
638
639
640 masterCmdData1.u32All = 0;
641 masterCmdData1.bits.timehyst_frames = psr_context->timehyst_frames;
642 masterCmdData1.bits.hyst_lines = psr_context->hyst_lines;
643 masterCmdData1.bits.rfb_update_auto_en =
644 psr_context->rfb_update_auto_en;
645 masterCmdData1.bits.dp_port_num = psr_context->transmitterId;
646 masterCmdData1.bits.dcp_sel = psr_context->controllerId;
647 masterCmdData1.bits.phy_type = psr_context->phyType;
648 masterCmdData1.bits.frame_cap_ind =
649 psr_context->psrFrameCaptureIndicationReq;
650 masterCmdData1.bits.aux_chan = psr_context->channel;
651 masterCmdData1.bits.aux_repeat = psr_context->aux_repeats;
652 masterCmdData1.bits.allow_smu_optimizations = psr_context->allow_smu_optimizations;
653 dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1),
654 masterCmdData1.u32All);
655
656 masterCmdData2.u32All = 0;
657 masterCmdData2.bits.dig_fe = psr_context->engineId;
658 masterCmdData2.bits.dig_be = psr_context->transmitterId;
659 masterCmdData2.bits.skip_wait_for_pll_lock =
660 psr_context->skipPsrWaitForPllLock;
661 masterCmdData2.bits.frame_delay = psr_context->frame_delay;
662 masterCmdData2.bits.smu_phy_id = psr_context->smuPhyId;
663 masterCmdData2.bits.num_of_controllers =
664 psr_context->numberOfControllers;
665 dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG2),
666 masterCmdData2.u32All);
667
668 masterCmdData3.u32All = 0;
669 masterCmdData3.bits.psr_level = psr_context->psr_level.u32all;
670 dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG3),
671 masterCmdData3.u32All);
672
673
674
675 REG_UPDATE(MASTER_COMM_CMD_REG,
676 MASTER_COMM_CMD_REG_BYTE0, PSR_SET);
677
678
679 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
680
681
682 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
683
684 return true;
685 }
686
687 static void dcn10_psr_wait_loop(
688 struct dmcu *dmcu,
689 unsigned int wait_loop_number)
690 {
691 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
692 union dce_dmcu_psr_config_data_wait_loop_reg1 masterCmdData1;
693
694
695 if (dmcu->dmcu_state != DMCU_RUNNING)
696 return;
697
698 if (wait_loop_number != 0) {
699
700 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
701
702 masterCmdData1.u32 = 0;
703 masterCmdData1.bits.wait_loop = wait_loop_number;
704 dmcu->cached_wait_loop_number = wait_loop_number;
705 dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1), masterCmdData1.u32);
706
707
708 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, PSR_SET_WAITLOOP);
709
710
711 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
712 }
713 }
714
715 static void dcn10_get_psr_wait_loop(
716 struct dmcu *dmcu, unsigned int *psr_wait_loop_number)
717 {
718 *psr_wait_loop_number = dmcu->cached_wait_loop_number;
719 return;
720 }
721
722 static bool dcn10_is_dmcu_initialized(struct dmcu *dmcu)
723 {
724
725 if (dmcu->dmcu_state != DMCU_RUNNING)
726 return false;
727 return true;
728 }
729
730 #endif
731
732 #if defined(CONFIG_DRM_AMD_DC_DCN2_0)
733
734 static bool dcn20_lock_phy(struct dmcu *dmcu)
735 {
736 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
737
738
739 if (dmcu->dmcu_state != DMCU_RUNNING)
740 return false;
741
742
743 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
744
745
746 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, MCP_SYNC_PHY_LOCK);
747
748
749 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
750
751
752 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
753
754 return true;
755 }
756
757 static bool dcn20_unlock_phy(struct dmcu *dmcu)
758 {
759 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(dmcu);
760
761
762 if (dmcu->dmcu_state != DMCU_RUNNING)
763 return false;
764
765
766 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
767
768
769 REG_UPDATE(MASTER_COMM_CMD_REG, MASTER_COMM_CMD_REG_BYTE0, MCP_SYNC_PHY_UNLOCK);
770
771
772 REG_UPDATE(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 1);
773
774
775 REG_WAIT(MASTER_COMM_CNTL_REG, MASTER_COMM_INTERRUPT, 0, 1, 10000);
776
777 return true;
778 }
779
780 #endif
781
782 static const struct dmcu_funcs dce_funcs = {
783 .dmcu_init = dce_dmcu_init,
784 .load_iram = dce_dmcu_load_iram,
785 .set_psr_enable = dce_dmcu_set_psr_enable,
786 .setup_psr = dce_dmcu_setup_psr,
787 .get_psr_state = dce_get_dmcu_psr_state,
788 .set_psr_wait_loop = dce_psr_wait_loop,
789 .get_psr_wait_loop = dce_get_psr_wait_loop,
790 .is_dmcu_initialized = dce_is_dmcu_initialized
791 };
792
793 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
794 static const struct dmcu_funcs dcn10_funcs = {
795 .dmcu_init = dcn10_dmcu_init,
796 .load_iram = dcn10_dmcu_load_iram,
797 .set_psr_enable = dcn10_dmcu_set_psr_enable,
798 .setup_psr = dcn10_dmcu_setup_psr,
799 .get_psr_state = dcn10_get_dmcu_psr_state,
800 .set_psr_wait_loop = dcn10_psr_wait_loop,
801 .get_psr_wait_loop = dcn10_get_psr_wait_loop,
802 .is_dmcu_initialized = dcn10_is_dmcu_initialized
803 };
804 #endif
805
806 #if defined(CONFIG_DRM_AMD_DC_DCN2_0)
807 static const struct dmcu_funcs dcn20_funcs = {
808 .dmcu_init = dcn10_dmcu_init,
809 .load_iram = dcn10_dmcu_load_iram,
810 .set_psr_enable = dcn10_dmcu_set_psr_enable,
811 .setup_psr = dcn10_dmcu_setup_psr,
812 .get_psr_state = dcn10_get_dmcu_psr_state,
813 .set_psr_wait_loop = dcn10_psr_wait_loop,
814 .get_psr_wait_loop = dcn10_get_psr_wait_loop,
815 .is_dmcu_initialized = dcn10_is_dmcu_initialized,
816 .lock_phy = dcn20_lock_phy,
817 .unlock_phy = dcn20_unlock_phy
818 };
819 #endif
820
821 static void dce_dmcu_construct(
822 struct dce_dmcu *dmcu_dce,
823 struct dc_context *ctx,
824 const struct dce_dmcu_registers *regs,
825 const struct dce_dmcu_shift *dmcu_shift,
826 const struct dce_dmcu_mask *dmcu_mask)
827 {
828 struct dmcu *base = &dmcu_dce->base;
829
830 base->ctx = ctx;
831 base->funcs = &dce_funcs;
832 base->cached_wait_loop_number = 0;
833
834 dmcu_dce->regs = regs;
835 dmcu_dce->dmcu_shift = dmcu_shift;
836 dmcu_dce->dmcu_mask = dmcu_mask;
837 }
838
839 struct dmcu *dce_dmcu_create(
840 struct dc_context *ctx,
841 const struct dce_dmcu_registers *regs,
842 const struct dce_dmcu_shift *dmcu_shift,
843 const struct dce_dmcu_mask *dmcu_mask)
844 {
845 struct dce_dmcu *dmcu_dce = kzalloc(sizeof(*dmcu_dce), GFP_KERNEL);
846
847 if (dmcu_dce == NULL) {
848 BREAK_TO_DEBUGGER();
849 return NULL;
850 }
851
852 dce_dmcu_construct(
853 dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
854
855 dmcu_dce->base.funcs = &dce_funcs;
856
857 return &dmcu_dce->base;
858 }
859
860 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
861 struct dmcu *dcn10_dmcu_create(
862 struct dc_context *ctx,
863 const struct dce_dmcu_registers *regs,
864 const struct dce_dmcu_shift *dmcu_shift,
865 const struct dce_dmcu_mask *dmcu_mask)
866 {
867 struct dce_dmcu *dmcu_dce = kzalloc(sizeof(*dmcu_dce), GFP_KERNEL);
868
869 if (dmcu_dce == NULL) {
870 BREAK_TO_DEBUGGER();
871 return NULL;
872 }
873
874 dce_dmcu_construct(
875 dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
876
877 dmcu_dce->base.funcs = &dcn10_funcs;
878
879 return &dmcu_dce->base;
880 }
881 #endif
882
883 #if defined(CONFIG_DRM_AMD_DC_DCN2_0)
884 struct dmcu *dcn20_dmcu_create(
885 struct dc_context *ctx,
886 const struct dce_dmcu_registers *regs,
887 const struct dce_dmcu_shift *dmcu_shift,
888 const struct dce_dmcu_mask *dmcu_mask)
889 {
890 struct dce_dmcu *dmcu_dce = kzalloc(sizeof(*dmcu_dce), GFP_KERNEL);
891
892 if (dmcu_dce == NULL) {
893 BREAK_TO_DEBUGGER();
894 return NULL;
895 }
896
897 dce_dmcu_construct(
898 dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
899
900 dmcu_dce->base.funcs = &dcn20_funcs;
901
902 return &dmcu_dce->base;
903 }
904 #endif
905
906 void dce_dmcu_destroy(struct dmcu **dmcu)
907 {
908 struct dce_dmcu *dmcu_dce = TO_DCE_DMCU(*dmcu);
909
910 if (dmcu_dce->base.dmcu_state == DMCU_RUNNING)
911 dmcu_dce->base.funcs->set_psr_enable(*dmcu, false, true);
912
913 kfree(dmcu_dce);
914 *dmcu = NULL;
915 }