This source file includes following definitions.
- avivo_crtc_load_lut
- dce4_crtc_load_lut
- dce5_crtc_load_lut
- legacy_crtc_load_lut
- radeon_crtc_load_lut
- radeon_crtc_gamma_set
- radeon_crtc_destroy
- radeon_unpin_work_func
- radeon_crtc_handle_vblank
- radeon_crtc_handle_flip
- radeon_flip_work_func
- radeon_crtc_page_flip_target
- radeon_crtc_set_config
- radeon_crtc_init
- radeon_print_display_setup
- radeon_setup_enc_conn
- avivo_reduce_ratio
- avivo_get_fb_ref_div
- radeon_compute_pll_avivo
- radeon_div
- radeon_compute_pll_legacy
- radeon_framebuffer_init
- radeon_user_framebuffer_create
- radeon_modeset_create_props
- radeon_update_display_priority
- radeon_afmt_init
- radeon_afmt_fini
- radeon_modeset_init
- radeon_modeset_fini
- is_hdtv_mode
- radeon_crtc_scaling_mode_fixup
- radeon_get_crtc_scanoutpos
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27 #include <linux/pm_runtime.h>
28 #include <linux/gcd.h>
29
30 #include <asm/div64.h>
31
32 #include <drm/drm_crtc_helper.h>
33 #include <drm/drm_device.h>
34 #include <drm/drm_drv.h>
35 #include <drm/drm_edid.h>
36 #include <drm/drm_fb_helper.h>
37 #include <drm/drm_fourcc.h>
38 #include <drm/drm_gem_framebuffer_helper.h>
39 #include <drm/drm_pci.h>
40 #include <drm/drm_plane_helper.h>
41 #include <drm/drm_probe_helper.h>
42 #include <drm/drm_vblank.h>
43 #include <drm/radeon_drm.h>
44
45 #include "atom.h"
46 #include "radeon.h"
47
48 static void avivo_crtc_load_lut(struct drm_crtc *crtc)
49 {
50 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
51 struct drm_device *dev = crtc->dev;
52 struct radeon_device *rdev = dev->dev_private;
53 u16 *r, *g, *b;
54 int i;
55
56 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
57 WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0);
58
59 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
60 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
61 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
62
63 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
64 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
65 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
66
67 WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id);
68 WREG32(AVIVO_DC_LUT_RW_MODE, 0);
69 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK, 0x0000003f);
70
71 WREG8(AVIVO_DC_LUT_RW_INDEX, 0);
72 r = crtc->gamma_store;
73 g = r + crtc->gamma_size;
74 b = g + crtc->gamma_size;
75 for (i = 0; i < 256; i++) {
76 WREG32(AVIVO_DC_LUT_30_COLOR,
77 ((*r++ & 0xffc0) << 14) |
78 ((*g++ & 0xffc0) << 4) |
79 (*b++ >> 6));
80 }
81
82
83 WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id, ~1);
84 }
85
86 static void dce4_crtc_load_lut(struct drm_crtc *crtc)
87 {
88 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
89 struct drm_device *dev = crtc->dev;
90 struct radeon_device *rdev = dev->dev_private;
91 u16 *r, *g, *b;
92 int i;
93
94 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
95 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
96
97 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
98 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
99 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
100
101 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
102 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
103 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
104
105 WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
106 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
107
108 WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
109 r = crtc->gamma_store;
110 g = r + crtc->gamma_size;
111 b = g + crtc->gamma_size;
112 for (i = 0; i < 256; i++) {
113 WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
114 ((*r++ & 0xffc0) << 14) |
115 ((*g++ & 0xffc0) << 4) |
116 (*b++ >> 6));
117 }
118 }
119
120 static void dce5_crtc_load_lut(struct drm_crtc *crtc)
121 {
122 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
123 struct drm_device *dev = crtc->dev;
124 struct radeon_device *rdev = dev->dev_private;
125 u16 *r, *g, *b;
126 int i;
127
128 DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
129
130 msleep(10);
131
132 WREG32(NI_INPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
133 (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS) |
134 NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS)));
135 WREG32(NI_PRESCALE_GRPH_CONTROL + radeon_crtc->crtc_offset,
136 NI_GRPH_PRESCALE_BYPASS);
137 WREG32(NI_PRESCALE_OVL_CONTROL + radeon_crtc->crtc_offset,
138 NI_OVL_PRESCALE_BYPASS);
139 WREG32(NI_INPUT_GAMMA_CONTROL + radeon_crtc->crtc_offset,
140 (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT) |
141 NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT)));
142
143 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
144
145 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
146 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
147 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
148
149 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
150 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
151 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
152
153 WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
154 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
155
156 WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
157 r = crtc->gamma_store;
158 g = r + crtc->gamma_size;
159 b = g + crtc->gamma_size;
160 for (i = 0; i < 256; i++) {
161 WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
162 ((*r++ & 0xffc0) << 14) |
163 ((*g++ & 0xffc0) << 4) |
164 (*b++ >> 6));
165 }
166
167 WREG32(NI_DEGAMMA_CONTROL + radeon_crtc->crtc_offset,
168 (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
169 NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
170 NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
171 NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS)));
172 WREG32(NI_GAMUT_REMAP_CONTROL + radeon_crtc->crtc_offset,
173 (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS) |
174 NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS)));
175 WREG32(NI_REGAMMA_CONTROL + radeon_crtc->crtc_offset,
176 (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS) |
177 NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS)));
178 WREG32(NI_OUTPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
179 (NI_OUTPUT_CSC_GRPH_MODE(radeon_crtc->output_csc) |
180 NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS)));
181
182 WREG32(0x6940 + radeon_crtc->crtc_offset, 0);
183 if (ASIC_IS_DCE8(rdev)) {
184
185
186
187 WREG32(CIK_ALPHA_CONTROL + radeon_crtc->crtc_offset,
188 CIK_CURSOR_ALPHA_BLND_ENA);
189 }
190 }
191
192 static void legacy_crtc_load_lut(struct drm_crtc *crtc)
193 {
194 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
195 struct drm_device *dev = crtc->dev;
196 struct radeon_device *rdev = dev->dev_private;
197 u16 *r, *g, *b;
198 int i;
199 uint32_t dac2_cntl;
200
201 dac2_cntl = RREG32(RADEON_DAC_CNTL2);
202 if (radeon_crtc->crtc_id == 0)
203 dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL;
204 else
205 dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL;
206 WREG32(RADEON_DAC_CNTL2, dac2_cntl);
207
208 WREG8(RADEON_PALETTE_INDEX, 0);
209 r = crtc->gamma_store;
210 g = r + crtc->gamma_size;
211 b = g + crtc->gamma_size;
212 for (i = 0; i < 256; i++) {
213 WREG32(RADEON_PALETTE_30_DATA,
214 ((*r++ & 0xffc0) << 14) |
215 ((*g++ & 0xffc0) << 4) |
216 (*b++ >> 6));
217 }
218 }
219
220 void radeon_crtc_load_lut(struct drm_crtc *crtc)
221 {
222 struct drm_device *dev = crtc->dev;
223 struct radeon_device *rdev = dev->dev_private;
224
225 if (!crtc->enabled)
226 return;
227
228 if (ASIC_IS_DCE5(rdev))
229 dce5_crtc_load_lut(crtc);
230 else if (ASIC_IS_DCE4(rdev))
231 dce4_crtc_load_lut(crtc);
232 else if (ASIC_IS_AVIVO(rdev))
233 avivo_crtc_load_lut(crtc);
234 else
235 legacy_crtc_load_lut(crtc);
236 }
237
238 static int radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green,
239 u16 *blue, uint32_t size,
240 struct drm_modeset_acquire_ctx *ctx)
241 {
242 radeon_crtc_load_lut(crtc);
243
244 return 0;
245 }
246
247 static void radeon_crtc_destroy(struct drm_crtc *crtc)
248 {
249 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
250
251 drm_crtc_cleanup(crtc);
252 destroy_workqueue(radeon_crtc->flip_queue);
253 kfree(radeon_crtc);
254 }
255
256
257
258
259
260
261
262
263 static void radeon_unpin_work_func(struct work_struct *__work)
264 {
265 struct radeon_flip_work *work =
266 container_of(__work, struct radeon_flip_work, unpin_work);
267 int r;
268
269
270 r = radeon_bo_reserve(work->old_rbo, false);
271 if (likely(r == 0)) {
272 r = radeon_bo_unpin(work->old_rbo);
273 if (unlikely(r != 0)) {
274 DRM_ERROR("failed to unpin buffer after flip\n");
275 }
276 radeon_bo_unreserve(work->old_rbo);
277 } else
278 DRM_ERROR("failed to reserve buffer after flip\n");
279
280 drm_gem_object_put_unlocked(&work->old_rbo->tbo.base);
281 kfree(work);
282 }
283
284 void radeon_crtc_handle_vblank(struct radeon_device *rdev, int crtc_id)
285 {
286 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
287 unsigned long flags;
288 u32 update_pending;
289 int vpos, hpos;
290
291
292 if (radeon_crtc == NULL)
293 return;
294
295
296
297
298
299
300
301
302
303
304 if ((radeon_use_pflipirq == 2) && ASIC_IS_DCE4(rdev))
305 return;
306
307 spin_lock_irqsave(&rdev->ddev->event_lock, flags);
308 if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
309 DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
310 "RADEON_FLIP_SUBMITTED(%d)\n",
311 radeon_crtc->flip_status,
312 RADEON_FLIP_SUBMITTED);
313 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
314 return;
315 }
316
317 update_pending = radeon_page_flip_pending(rdev, crtc_id);
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337 if (update_pending &&
338 (DRM_SCANOUTPOS_VALID &
339 radeon_get_crtc_scanoutpos(rdev->ddev, crtc_id,
340 GET_DISTANCE_TO_VBLANKSTART,
341 &vpos, &hpos, NULL, NULL,
342 &rdev->mode_info.crtcs[crtc_id]->base.hwmode)) &&
343 ((vpos >= 0 && hpos < 0) || (hpos >= 0 && !ASIC_IS_AVIVO(rdev)))) {
344
345
346
347
348
349
350 update_pending = 0;
351 }
352 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
353 if (!update_pending)
354 radeon_crtc_handle_flip(rdev, crtc_id);
355 }
356
357
358
359
360
361
362
363
364
365 void radeon_crtc_handle_flip(struct radeon_device *rdev, int crtc_id)
366 {
367 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
368 struct radeon_flip_work *work;
369 unsigned long flags;
370
371
372 if (radeon_crtc == NULL)
373 return;
374
375 spin_lock_irqsave(&rdev->ddev->event_lock, flags);
376 work = radeon_crtc->flip_work;
377 if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
378 DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
379 "RADEON_FLIP_SUBMITTED(%d)\n",
380 radeon_crtc->flip_status,
381 RADEON_FLIP_SUBMITTED);
382 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
383 return;
384 }
385
386
387 radeon_crtc->flip_status = RADEON_FLIP_NONE;
388 radeon_crtc->flip_work = NULL;
389
390
391 if (work->event)
392 drm_crtc_send_vblank_event(&radeon_crtc->base, work->event);
393
394 spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
395
396 drm_crtc_vblank_put(&radeon_crtc->base);
397 radeon_irq_kms_pflip_irq_put(rdev, work->crtc_id);
398 queue_work(radeon_crtc->flip_queue, &work->unpin_work);
399 }
400
401
402
403
404
405
406
407
408 static void radeon_flip_work_func(struct work_struct *__work)
409 {
410 struct radeon_flip_work *work =
411 container_of(__work, struct radeon_flip_work, flip_work);
412 struct radeon_device *rdev = work->rdev;
413 struct drm_device *dev = rdev->ddev;
414 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[work->crtc_id];
415
416 struct drm_crtc *crtc = &radeon_crtc->base;
417 unsigned long flags;
418 int r;
419 int vpos, hpos;
420
421 down_read(&rdev->exclusive_lock);
422 if (work->fence) {
423 struct radeon_fence *fence;
424
425 fence = to_radeon_fence(work->fence);
426 if (fence && fence->rdev == rdev) {
427 r = radeon_fence_wait(fence, false);
428 if (r == -EDEADLK) {
429 up_read(&rdev->exclusive_lock);
430 do {
431 r = radeon_gpu_reset(rdev);
432 } while (r == -EAGAIN);
433 down_read(&rdev->exclusive_lock);
434 }
435 } else
436 r = dma_fence_wait(work->fence, false);
437
438 if (r)
439 DRM_ERROR("failed to wait on page flip fence (%d)!\n", r);
440
441
442
443
444
445
446 dma_fence_put(work->fence);
447 work->fence = NULL;
448 }
449
450
451
452
453
454
455 while (radeon_crtc->enabled &&
456 (radeon_get_crtc_scanoutpos(dev, work->crtc_id, 0,
457 &vpos, &hpos, NULL, NULL,
458 &crtc->hwmode)
459 & (DRM_SCANOUTPOS_VALID | DRM_SCANOUTPOS_IN_VBLANK)) ==
460 (DRM_SCANOUTPOS_VALID | DRM_SCANOUTPOS_IN_VBLANK) &&
461 (!ASIC_IS_AVIVO(rdev) ||
462 ((int) (work->target_vblank -
463 dev->driver->get_vblank_counter(dev, work->crtc_id)) > 0)))
464 usleep_range(1000, 2000);
465
466
467 spin_lock_irqsave(&crtc->dev->event_lock, flags);
468
469
470 radeon_irq_kms_pflip_irq_get(rdev, radeon_crtc->crtc_id);
471
472
473 radeon_page_flip(rdev, radeon_crtc->crtc_id, work->base, work->async);
474
475 radeon_crtc->flip_status = RADEON_FLIP_SUBMITTED;
476 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
477 up_read(&rdev->exclusive_lock);
478 }
479
480 static int radeon_crtc_page_flip_target(struct drm_crtc *crtc,
481 struct drm_framebuffer *fb,
482 struct drm_pending_vblank_event *event,
483 uint32_t page_flip_flags,
484 uint32_t target,
485 struct drm_modeset_acquire_ctx *ctx)
486 {
487 struct drm_device *dev = crtc->dev;
488 struct radeon_device *rdev = dev->dev_private;
489 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
490 struct drm_gem_object *obj;
491 struct radeon_flip_work *work;
492 struct radeon_bo *new_rbo;
493 uint32_t tiling_flags, pitch_pixels;
494 uint64_t base;
495 unsigned long flags;
496 int r;
497
498 work = kzalloc(sizeof *work, GFP_KERNEL);
499 if (work == NULL)
500 return -ENOMEM;
501
502 INIT_WORK(&work->flip_work, radeon_flip_work_func);
503 INIT_WORK(&work->unpin_work, radeon_unpin_work_func);
504
505 work->rdev = rdev;
506 work->crtc_id = radeon_crtc->crtc_id;
507 work->event = event;
508 work->async = (page_flip_flags & DRM_MODE_PAGE_FLIP_ASYNC) != 0;
509
510
511 obj = crtc->primary->fb->obj[0];
512
513
514 drm_gem_object_get(obj);
515 work->old_rbo = gem_to_radeon_bo(obj);
516
517 obj = fb->obj[0];
518 new_rbo = gem_to_radeon_bo(obj);
519
520
521 DRM_DEBUG_DRIVER("flip-ioctl() cur_rbo = %p, new_rbo = %p\n",
522 work->old_rbo, new_rbo);
523
524 r = radeon_bo_reserve(new_rbo, false);
525 if (unlikely(r != 0)) {
526 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
527 goto cleanup;
528 }
529
530 r = radeon_bo_pin_restricted(new_rbo, RADEON_GEM_DOMAIN_VRAM,
531 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, &base);
532 if (unlikely(r != 0)) {
533 radeon_bo_unreserve(new_rbo);
534 r = -EINVAL;
535 DRM_ERROR("failed to pin new rbo buffer before flip\n");
536 goto cleanup;
537 }
538 work->fence = dma_fence_get(dma_resv_get_excl(new_rbo->tbo.base.resv));
539 radeon_bo_get_tiling_flags(new_rbo, &tiling_flags, NULL);
540 radeon_bo_unreserve(new_rbo);
541
542 if (!ASIC_IS_AVIVO(rdev)) {
543
544 base -= radeon_crtc->legacy_display_base_addr;
545 pitch_pixels = fb->pitches[0] / fb->format->cpp[0];
546
547 if (tiling_flags & RADEON_TILING_MACRO) {
548 if (ASIC_IS_R300(rdev)) {
549 base &= ~0x7ff;
550 } else {
551 int byteshift = fb->format->cpp[0] * 8 >> 4;
552 int tile_addr = (((crtc->y >> 3) * pitch_pixels + crtc->x) >> (8 - byteshift)) << 11;
553 base += tile_addr + ((crtc->x << byteshift) % 256) + ((crtc->y % 8) << 8);
554 }
555 } else {
556 int offset = crtc->y * pitch_pixels + crtc->x;
557 switch (fb->format->cpp[0] * 8) {
558 case 8:
559 default:
560 offset *= 1;
561 break;
562 case 15:
563 case 16:
564 offset *= 2;
565 break;
566 case 24:
567 offset *= 3;
568 break;
569 case 32:
570 offset *= 4;
571 break;
572 }
573 base += offset;
574 }
575 base &= ~7;
576 }
577 work->base = base;
578 work->target_vblank = target - (uint32_t)drm_crtc_vblank_count(crtc) +
579 dev->driver->get_vblank_counter(dev, work->crtc_id);
580
581
582 spin_lock_irqsave(&crtc->dev->event_lock, flags);
583
584 if (radeon_crtc->flip_status != RADEON_FLIP_NONE) {
585 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
586 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
587 r = -EBUSY;
588 goto pflip_cleanup;
589 }
590 radeon_crtc->flip_status = RADEON_FLIP_PENDING;
591 radeon_crtc->flip_work = work;
592
593
594 crtc->primary->fb = fb;
595
596 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
597
598 queue_work(radeon_crtc->flip_queue, &work->flip_work);
599 return 0;
600
601 pflip_cleanup:
602 if (unlikely(radeon_bo_reserve(new_rbo, false) != 0)) {
603 DRM_ERROR("failed to reserve new rbo in error path\n");
604 goto cleanup;
605 }
606 if (unlikely(radeon_bo_unpin(new_rbo) != 0)) {
607 DRM_ERROR("failed to unpin new rbo in error path\n");
608 }
609 radeon_bo_unreserve(new_rbo);
610
611 cleanup:
612 drm_gem_object_put_unlocked(&work->old_rbo->tbo.base);
613 dma_fence_put(work->fence);
614 kfree(work);
615 return r;
616 }
617
618 static int
619 radeon_crtc_set_config(struct drm_mode_set *set,
620 struct drm_modeset_acquire_ctx *ctx)
621 {
622 struct drm_device *dev;
623 struct radeon_device *rdev;
624 struct drm_crtc *crtc;
625 bool active = false;
626 int ret;
627
628 if (!set || !set->crtc)
629 return -EINVAL;
630
631 dev = set->crtc->dev;
632
633 ret = pm_runtime_get_sync(dev->dev);
634 if (ret < 0)
635 return ret;
636
637 ret = drm_crtc_helper_set_config(set, ctx);
638
639 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head)
640 if (crtc->enabled)
641 active = true;
642
643 pm_runtime_mark_last_busy(dev->dev);
644
645 rdev = dev->dev_private;
646
647
648 if (active && !rdev->have_disp_power_ref) {
649 rdev->have_disp_power_ref = true;
650 return ret;
651 }
652
653
654 if (!active && rdev->have_disp_power_ref) {
655 pm_runtime_put_autosuspend(dev->dev);
656 rdev->have_disp_power_ref = false;
657 }
658
659
660 pm_runtime_put_autosuspend(dev->dev);
661 return ret;
662 }
663
664 static const struct drm_crtc_funcs radeon_crtc_funcs = {
665 .cursor_set2 = radeon_crtc_cursor_set2,
666 .cursor_move = radeon_crtc_cursor_move,
667 .gamma_set = radeon_crtc_gamma_set,
668 .set_config = radeon_crtc_set_config,
669 .destroy = radeon_crtc_destroy,
670 .page_flip_target = radeon_crtc_page_flip_target,
671 };
672
673 static void radeon_crtc_init(struct drm_device *dev, int index)
674 {
675 struct radeon_device *rdev = dev->dev_private;
676 struct radeon_crtc *radeon_crtc;
677 int i;
678
679 radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
680 if (radeon_crtc == NULL)
681 return;
682
683 drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs);
684
685 drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256);
686 radeon_crtc->crtc_id = index;
687 radeon_crtc->flip_queue = alloc_workqueue("radeon-crtc", WQ_HIGHPRI, 0);
688 rdev->mode_info.crtcs[index] = radeon_crtc;
689
690 if (rdev->family >= CHIP_BONAIRE) {
691 radeon_crtc->max_cursor_width = CIK_CURSOR_WIDTH;
692 radeon_crtc->max_cursor_height = CIK_CURSOR_HEIGHT;
693 } else {
694 radeon_crtc->max_cursor_width = CURSOR_WIDTH;
695 radeon_crtc->max_cursor_height = CURSOR_HEIGHT;
696 }
697 dev->mode_config.cursor_width = radeon_crtc->max_cursor_width;
698 dev->mode_config.cursor_height = radeon_crtc->max_cursor_height;
699
700 #if 0
701 radeon_crtc->mode_set.crtc = &radeon_crtc->base;
702 radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1);
703 radeon_crtc->mode_set.num_connectors = 0;
704 #endif
705
706 for (i = 0; i < 256; i++) {
707 radeon_crtc->lut_r[i] = i << 2;
708 radeon_crtc->lut_g[i] = i << 2;
709 radeon_crtc->lut_b[i] = i << 2;
710 }
711
712 if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom))
713 radeon_atombios_init_crtc(dev, radeon_crtc);
714 else
715 radeon_legacy_init_crtc(dev, radeon_crtc);
716 }
717
718 static const char *encoder_names[38] = {
719 "NONE",
720 "INTERNAL_LVDS",
721 "INTERNAL_TMDS1",
722 "INTERNAL_TMDS2",
723 "INTERNAL_DAC1",
724 "INTERNAL_DAC2",
725 "INTERNAL_SDVOA",
726 "INTERNAL_SDVOB",
727 "SI170B",
728 "CH7303",
729 "CH7301",
730 "INTERNAL_DVO1",
731 "EXTERNAL_SDVOA",
732 "EXTERNAL_SDVOB",
733 "TITFP513",
734 "INTERNAL_LVTM1",
735 "VT1623",
736 "HDMI_SI1930",
737 "HDMI_INTERNAL",
738 "INTERNAL_KLDSCP_TMDS1",
739 "INTERNAL_KLDSCP_DVO1",
740 "INTERNAL_KLDSCP_DAC1",
741 "INTERNAL_KLDSCP_DAC2",
742 "SI178",
743 "MVPU_FPGA",
744 "INTERNAL_DDI",
745 "VT1625",
746 "HDMI_SI1932",
747 "DP_AN9801",
748 "DP_DP501",
749 "INTERNAL_UNIPHY",
750 "INTERNAL_KLDSCP_LVTMA",
751 "INTERNAL_UNIPHY1",
752 "INTERNAL_UNIPHY2",
753 "NUTMEG",
754 "TRAVIS",
755 "INTERNAL_VCE",
756 "INTERNAL_UNIPHY3",
757 };
758
759 static const char *hpd_names[6] = {
760 "HPD1",
761 "HPD2",
762 "HPD3",
763 "HPD4",
764 "HPD5",
765 "HPD6",
766 };
767
768 static void radeon_print_display_setup(struct drm_device *dev)
769 {
770 struct drm_connector *connector;
771 struct radeon_connector *radeon_connector;
772 struct drm_encoder *encoder;
773 struct radeon_encoder *radeon_encoder;
774 uint32_t devices;
775 int i = 0;
776
777 DRM_INFO("Radeon Display Connectors\n");
778 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
779 radeon_connector = to_radeon_connector(connector);
780 DRM_INFO("Connector %d:\n", i);
781 DRM_INFO(" %s\n", connector->name);
782 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
783 DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]);
784 if (radeon_connector->ddc_bus) {
785 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
786 radeon_connector->ddc_bus->rec.mask_clk_reg,
787 radeon_connector->ddc_bus->rec.mask_data_reg,
788 radeon_connector->ddc_bus->rec.a_clk_reg,
789 radeon_connector->ddc_bus->rec.a_data_reg,
790 radeon_connector->ddc_bus->rec.en_clk_reg,
791 radeon_connector->ddc_bus->rec.en_data_reg,
792 radeon_connector->ddc_bus->rec.y_clk_reg,
793 radeon_connector->ddc_bus->rec.y_data_reg);
794 if (radeon_connector->router.ddc_valid)
795 DRM_INFO(" DDC Router 0x%x/0x%x\n",
796 radeon_connector->router.ddc_mux_control_pin,
797 radeon_connector->router.ddc_mux_state);
798 if (radeon_connector->router.cd_valid)
799 DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
800 radeon_connector->router.cd_mux_control_pin,
801 radeon_connector->router.cd_mux_state);
802 } else {
803 if (connector->connector_type == DRM_MODE_CONNECTOR_VGA ||
804 connector->connector_type == DRM_MODE_CONNECTOR_DVII ||
805 connector->connector_type == DRM_MODE_CONNECTOR_DVID ||
806 connector->connector_type == DRM_MODE_CONNECTOR_DVIA ||
807 connector->connector_type == DRM_MODE_CONNECTOR_HDMIA ||
808 connector->connector_type == DRM_MODE_CONNECTOR_HDMIB)
809 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
810 }
811 DRM_INFO(" Encoders:\n");
812 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
813 radeon_encoder = to_radeon_encoder(encoder);
814 devices = radeon_encoder->devices & radeon_connector->devices;
815 if (devices) {
816 if (devices & ATOM_DEVICE_CRT1_SUPPORT)
817 DRM_INFO(" CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]);
818 if (devices & ATOM_DEVICE_CRT2_SUPPORT)
819 DRM_INFO(" CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]);
820 if (devices & ATOM_DEVICE_LCD1_SUPPORT)
821 DRM_INFO(" LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]);
822 if (devices & ATOM_DEVICE_DFP1_SUPPORT)
823 DRM_INFO(" DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]);
824 if (devices & ATOM_DEVICE_DFP2_SUPPORT)
825 DRM_INFO(" DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]);
826 if (devices & ATOM_DEVICE_DFP3_SUPPORT)
827 DRM_INFO(" DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]);
828 if (devices & ATOM_DEVICE_DFP4_SUPPORT)
829 DRM_INFO(" DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]);
830 if (devices & ATOM_DEVICE_DFP5_SUPPORT)
831 DRM_INFO(" DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]);
832 if (devices & ATOM_DEVICE_DFP6_SUPPORT)
833 DRM_INFO(" DFP6: %s\n", encoder_names[radeon_encoder->encoder_id]);
834 if (devices & ATOM_DEVICE_TV1_SUPPORT)
835 DRM_INFO(" TV1: %s\n", encoder_names[radeon_encoder->encoder_id]);
836 if (devices & ATOM_DEVICE_CV_SUPPORT)
837 DRM_INFO(" CV: %s\n", encoder_names[radeon_encoder->encoder_id]);
838 }
839 }
840 i++;
841 }
842 }
843
844 static bool radeon_setup_enc_conn(struct drm_device *dev)
845 {
846 struct radeon_device *rdev = dev->dev_private;
847 bool ret = false;
848
849 if (rdev->bios) {
850 if (rdev->is_atom_bios) {
851 ret = radeon_get_atom_connector_info_from_supported_devices_table(dev);
852 if (ret == false)
853 ret = radeon_get_atom_connector_info_from_object_table(dev);
854 } else {
855 ret = radeon_get_legacy_connector_info_from_bios(dev);
856 if (ret == false)
857 ret = radeon_get_legacy_connector_info_from_table(dev);
858 }
859 } else {
860 if (!ASIC_IS_AVIVO(rdev))
861 ret = radeon_get_legacy_connector_info_from_table(dev);
862 }
863 if (ret) {
864 radeon_setup_encoder_clones(dev);
865 radeon_print_display_setup(dev);
866 }
867
868 return ret;
869 }
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885 static void avivo_reduce_ratio(unsigned *nom, unsigned *den,
886 unsigned nom_min, unsigned den_min)
887 {
888 unsigned tmp;
889
890
891 tmp = gcd(*nom, *den);
892 *nom /= tmp;
893 *den /= tmp;
894
895
896 if (*nom < nom_min) {
897 tmp = DIV_ROUND_UP(nom_min, *nom);
898 *nom *= tmp;
899 *den *= tmp;
900 }
901
902
903 if (*den < den_min) {
904 tmp = DIV_ROUND_UP(den_min, *den);
905 *nom *= tmp;
906 *den *= tmp;
907 }
908 }
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924 static void avivo_get_fb_ref_div(unsigned nom, unsigned den, unsigned post_div,
925 unsigned fb_div_max, unsigned ref_div_max,
926 unsigned *fb_div, unsigned *ref_div)
927 {
928
929 ref_div_max = max(min(100 / post_div, ref_div_max), 1u);
930
931
932 *ref_div = min(max(den/post_div, 1u), ref_div_max);
933 *fb_div = DIV_ROUND_CLOSEST(nom * *ref_div * post_div, den);
934
935
936 if (*fb_div > fb_div_max) {
937 *ref_div = (*ref_div * fb_div_max)/(*fb_div);
938 *fb_div = fb_div_max;
939 }
940 }
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955 void radeon_compute_pll_avivo(struct radeon_pll *pll,
956 u32 freq,
957 u32 *dot_clock_p,
958 u32 *fb_div_p,
959 u32 *frac_fb_div_p,
960 u32 *ref_div_p,
961 u32 *post_div_p)
962 {
963 unsigned target_clock = pll->flags & RADEON_PLL_USE_FRAC_FB_DIV ?
964 freq : freq / 10;
965
966 unsigned fb_div_min, fb_div_max, fb_div;
967 unsigned post_div_min, post_div_max, post_div;
968 unsigned ref_div_min, ref_div_max, ref_div;
969 unsigned post_div_best, diff_best;
970 unsigned nom, den;
971
972
973 fb_div_min = pll->min_feedback_div;
974 fb_div_max = pll->max_feedback_div;
975
976 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
977 fb_div_min *= 10;
978 fb_div_max *= 10;
979 }
980
981
982 if (pll->flags & RADEON_PLL_USE_REF_DIV)
983 ref_div_min = pll->reference_div;
984 else
985 ref_div_min = pll->min_ref_div;
986
987 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV &&
988 pll->flags & RADEON_PLL_USE_REF_DIV)
989 ref_div_max = pll->reference_div;
990 else if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
991
992 ref_div_max = min(pll->max_ref_div, 7u);
993 else
994 ref_div_max = pll->max_ref_div;
995
996
997 if (pll->flags & RADEON_PLL_USE_POST_DIV) {
998 post_div_min = pll->post_div;
999 post_div_max = pll->post_div;
1000 } else {
1001 unsigned vco_min, vco_max;
1002
1003 if (pll->flags & RADEON_PLL_IS_LCD) {
1004 vco_min = pll->lcd_pll_out_min;
1005 vco_max = pll->lcd_pll_out_max;
1006 } else {
1007 vco_min = pll->pll_out_min;
1008 vco_max = pll->pll_out_max;
1009 }
1010
1011 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1012 vco_min *= 10;
1013 vco_max *= 10;
1014 }
1015
1016 post_div_min = vco_min / target_clock;
1017 if ((target_clock * post_div_min) < vco_min)
1018 ++post_div_min;
1019 if (post_div_min < pll->min_post_div)
1020 post_div_min = pll->min_post_div;
1021
1022 post_div_max = vco_max / target_clock;
1023 if ((target_clock * post_div_max) > vco_max)
1024 --post_div_max;
1025 if (post_div_max > pll->max_post_div)
1026 post_div_max = pll->max_post_div;
1027 }
1028
1029
1030 nom = target_clock;
1031 den = pll->reference_freq;
1032
1033
1034 avivo_reduce_ratio(&nom, &den, fb_div_min, post_div_min);
1035
1036
1037 if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
1038 post_div_best = post_div_min;
1039 else
1040 post_div_best = post_div_max;
1041 diff_best = ~0;
1042
1043 for (post_div = post_div_min; post_div <= post_div_max; ++post_div) {
1044 unsigned diff;
1045 avivo_get_fb_ref_div(nom, den, post_div, fb_div_max,
1046 ref_div_max, &fb_div, &ref_div);
1047 diff = abs(target_clock - (pll->reference_freq * fb_div) /
1048 (ref_div * post_div));
1049
1050 if (diff < diff_best || (diff == diff_best &&
1051 !(pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP))) {
1052
1053 post_div_best = post_div;
1054 diff_best = diff;
1055 }
1056 }
1057 post_div = post_div_best;
1058
1059
1060 avivo_get_fb_ref_div(nom, den, post_div, fb_div_max, ref_div_max,
1061 &fb_div, &ref_div);
1062
1063
1064
1065 avivo_reduce_ratio(&fb_div, &ref_div, fb_div_min, ref_div_min);
1066
1067
1068 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV && (fb_div % 10)) {
1069 fb_div_min = max(fb_div_min, (9 - (fb_div % 10)) * 20 + 50);
1070 if (fb_div < fb_div_min) {
1071 unsigned tmp = DIV_ROUND_UP(fb_div_min, fb_div);
1072 fb_div *= tmp;
1073 ref_div *= tmp;
1074 }
1075 }
1076
1077
1078 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1079 *fb_div_p = fb_div / 10;
1080 *frac_fb_div_p = fb_div % 10;
1081 } else {
1082 *fb_div_p = fb_div;
1083 *frac_fb_div_p = 0;
1084 }
1085
1086 *dot_clock_p = ((pll->reference_freq * *fb_div_p * 10) +
1087 (pll->reference_freq * *frac_fb_div_p)) /
1088 (ref_div * post_div * 10);
1089 *ref_div_p = ref_div;
1090 *post_div_p = post_div;
1091
1092 DRM_DEBUG_KMS("%d - %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1093 freq, *dot_clock_p * 10, *fb_div_p, *frac_fb_div_p,
1094 ref_div, post_div);
1095 }
1096
1097
1098 static inline uint32_t radeon_div(uint64_t n, uint32_t d)
1099 {
1100 uint64_t mod;
1101
1102 n += d / 2;
1103
1104 mod = do_div(n, d);
1105 return n;
1106 }
1107
1108 void radeon_compute_pll_legacy(struct radeon_pll *pll,
1109 uint64_t freq,
1110 uint32_t *dot_clock_p,
1111 uint32_t *fb_div_p,
1112 uint32_t *frac_fb_div_p,
1113 uint32_t *ref_div_p,
1114 uint32_t *post_div_p)
1115 {
1116 uint32_t min_ref_div = pll->min_ref_div;
1117 uint32_t max_ref_div = pll->max_ref_div;
1118 uint32_t min_post_div = pll->min_post_div;
1119 uint32_t max_post_div = pll->max_post_div;
1120 uint32_t min_fractional_feed_div = 0;
1121 uint32_t max_fractional_feed_div = 0;
1122 uint32_t best_vco = pll->best_vco;
1123 uint32_t best_post_div = 1;
1124 uint32_t best_ref_div = 1;
1125 uint32_t best_feedback_div = 1;
1126 uint32_t best_frac_feedback_div = 0;
1127 uint32_t best_freq = -1;
1128 uint32_t best_error = 0xffffffff;
1129 uint32_t best_vco_diff = 1;
1130 uint32_t post_div;
1131 u32 pll_out_min, pll_out_max;
1132
1133 DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div);
1134 freq = freq * 1000;
1135
1136 if (pll->flags & RADEON_PLL_IS_LCD) {
1137 pll_out_min = pll->lcd_pll_out_min;
1138 pll_out_max = pll->lcd_pll_out_max;
1139 } else {
1140 pll_out_min = pll->pll_out_min;
1141 pll_out_max = pll->pll_out_max;
1142 }
1143
1144 if (pll_out_min > 64800)
1145 pll_out_min = 64800;
1146
1147 if (pll->flags & RADEON_PLL_USE_REF_DIV)
1148 min_ref_div = max_ref_div = pll->reference_div;
1149 else {
1150 while (min_ref_div < max_ref_div-1) {
1151 uint32_t mid = (min_ref_div + max_ref_div) / 2;
1152 uint32_t pll_in = pll->reference_freq / mid;
1153 if (pll_in < pll->pll_in_min)
1154 max_ref_div = mid;
1155 else if (pll_in > pll->pll_in_max)
1156 min_ref_div = mid;
1157 else
1158 break;
1159 }
1160 }
1161
1162 if (pll->flags & RADEON_PLL_USE_POST_DIV)
1163 min_post_div = max_post_div = pll->post_div;
1164
1165 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1166 min_fractional_feed_div = pll->min_frac_feedback_div;
1167 max_fractional_feed_div = pll->max_frac_feedback_div;
1168 }
1169
1170 for (post_div = max_post_div; post_div >= min_post_div; --post_div) {
1171 uint32_t ref_div;
1172
1173 if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1))
1174 continue;
1175
1176
1177 if (pll->flags & RADEON_PLL_LEGACY) {
1178 if ((post_div == 5) ||
1179 (post_div == 7) ||
1180 (post_div == 9) ||
1181 (post_div == 10) ||
1182 (post_div == 11) ||
1183 (post_div == 13) ||
1184 (post_div == 14) ||
1185 (post_div == 15))
1186 continue;
1187 }
1188
1189 for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) {
1190 uint32_t feedback_div, current_freq = 0, error, vco_diff;
1191 uint32_t pll_in = pll->reference_freq / ref_div;
1192 uint32_t min_feed_div = pll->min_feedback_div;
1193 uint32_t max_feed_div = pll->max_feedback_div + 1;
1194
1195 if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max)
1196 continue;
1197
1198 while (min_feed_div < max_feed_div) {
1199 uint32_t vco;
1200 uint32_t min_frac_feed_div = min_fractional_feed_div;
1201 uint32_t max_frac_feed_div = max_fractional_feed_div + 1;
1202 uint32_t frac_feedback_div;
1203 uint64_t tmp;
1204
1205 feedback_div = (min_feed_div + max_feed_div) / 2;
1206
1207 tmp = (uint64_t)pll->reference_freq * feedback_div;
1208 vco = radeon_div(tmp, ref_div);
1209
1210 if (vco < pll_out_min) {
1211 min_feed_div = feedback_div + 1;
1212 continue;
1213 } else if (vco > pll_out_max) {
1214 max_feed_div = feedback_div;
1215 continue;
1216 }
1217
1218 while (min_frac_feed_div < max_frac_feed_div) {
1219 frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2;
1220 tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div;
1221 tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div;
1222 current_freq = radeon_div(tmp, ref_div * post_div);
1223
1224 if (pll->flags & RADEON_PLL_PREFER_CLOSEST_LOWER) {
1225 if (freq < current_freq)
1226 error = 0xffffffff;
1227 else
1228 error = freq - current_freq;
1229 } else
1230 error = abs(current_freq - freq);
1231 vco_diff = abs(vco - best_vco);
1232
1233 if ((best_vco == 0 && error < best_error) ||
1234 (best_vco != 0 &&
1235 ((best_error > 100 && error < best_error - 100) ||
1236 (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) {
1237 best_post_div = post_div;
1238 best_ref_div = ref_div;
1239 best_feedback_div = feedback_div;
1240 best_frac_feedback_div = frac_feedback_div;
1241 best_freq = current_freq;
1242 best_error = error;
1243 best_vco_diff = vco_diff;
1244 } else if (current_freq == freq) {
1245 if (best_freq == -1) {
1246 best_post_div = post_div;
1247 best_ref_div = ref_div;
1248 best_feedback_div = feedback_div;
1249 best_frac_feedback_div = frac_feedback_div;
1250 best_freq = current_freq;
1251 best_error = error;
1252 best_vco_diff = vco_diff;
1253 } else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) ||
1254 ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) ||
1255 ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) ||
1256 ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) ||
1257 ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) ||
1258 ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) {
1259 best_post_div = post_div;
1260 best_ref_div = ref_div;
1261 best_feedback_div = feedback_div;
1262 best_frac_feedback_div = frac_feedback_div;
1263 best_freq = current_freq;
1264 best_error = error;
1265 best_vco_diff = vco_diff;
1266 }
1267 }
1268 if (current_freq < freq)
1269 min_frac_feed_div = frac_feedback_div + 1;
1270 else
1271 max_frac_feed_div = frac_feedback_div;
1272 }
1273 if (current_freq < freq)
1274 min_feed_div = feedback_div + 1;
1275 else
1276 max_feed_div = feedback_div;
1277 }
1278 }
1279 }
1280
1281 *dot_clock_p = best_freq / 10000;
1282 *fb_div_p = best_feedback_div;
1283 *frac_fb_div_p = best_frac_feedback_div;
1284 *ref_div_p = best_ref_div;
1285 *post_div_p = best_post_div;
1286 DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1287 (long long)freq,
1288 best_freq / 1000, best_feedback_div, best_frac_feedback_div,
1289 best_ref_div, best_post_div);
1290
1291 }
1292
1293 static const struct drm_framebuffer_funcs radeon_fb_funcs = {
1294 .destroy = drm_gem_fb_destroy,
1295 .create_handle = drm_gem_fb_create_handle,
1296 };
1297
1298 int
1299 radeon_framebuffer_init(struct drm_device *dev,
1300 struct drm_framebuffer *fb,
1301 const struct drm_mode_fb_cmd2 *mode_cmd,
1302 struct drm_gem_object *obj)
1303 {
1304 int ret;
1305 fb->obj[0] = obj;
1306 drm_helper_mode_fill_fb_struct(dev, fb, mode_cmd);
1307 ret = drm_framebuffer_init(dev, fb, &radeon_fb_funcs);
1308 if (ret) {
1309 fb->obj[0] = NULL;
1310 return ret;
1311 }
1312 return 0;
1313 }
1314
1315 static struct drm_framebuffer *
1316 radeon_user_framebuffer_create(struct drm_device *dev,
1317 struct drm_file *file_priv,
1318 const struct drm_mode_fb_cmd2 *mode_cmd)
1319 {
1320 struct drm_gem_object *obj;
1321 struct drm_framebuffer *fb;
1322 int ret;
1323
1324 obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]);
1325 if (obj == NULL) {
1326 dev_err(&dev->pdev->dev, "No GEM object associated to handle 0x%08X, "
1327 "can't create framebuffer\n", mode_cmd->handles[0]);
1328 return ERR_PTR(-ENOENT);
1329 }
1330
1331
1332 if (obj->import_attach) {
1333 DRM_DEBUG_KMS("Cannot create framebuffer from imported dma_buf\n");
1334 return ERR_PTR(-EINVAL);
1335 }
1336
1337 fb = kzalloc(sizeof(*fb), GFP_KERNEL);
1338 if (fb == NULL) {
1339 drm_gem_object_put_unlocked(obj);
1340 return ERR_PTR(-ENOMEM);
1341 }
1342
1343 ret = radeon_framebuffer_init(dev, fb, mode_cmd, obj);
1344 if (ret) {
1345 kfree(fb);
1346 drm_gem_object_put_unlocked(obj);
1347 return ERR_PTR(ret);
1348 }
1349
1350 return fb;
1351 }
1352
1353 static const struct drm_mode_config_funcs radeon_mode_funcs = {
1354 .fb_create = radeon_user_framebuffer_create,
1355 .output_poll_changed = drm_fb_helper_output_poll_changed,
1356 };
1357
1358 static const struct drm_prop_enum_list radeon_tmds_pll_enum_list[] =
1359 { { 0, "driver" },
1360 { 1, "bios" },
1361 };
1362
1363 static const struct drm_prop_enum_list radeon_tv_std_enum_list[] =
1364 { { TV_STD_NTSC, "ntsc" },
1365 { TV_STD_PAL, "pal" },
1366 { TV_STD_PAL_M, "pal-m" },
1367 { TV_STD_PAL_60, "pal-60" },
1368 { TV_STD_NTSC_J, "ntsc-j" },
1369 { TV_STD_SCART_PAL, "scart-pal" },
1370 { TV_STD_PAL_CN, "pal-cn" },
1371 { TV_STD_SECAM, "secam" },
1372 };
1373
1374 static const struct drm_prop_enum_list radeon_underscan_enum_list[] =
1375 { { UNDERSCAN_OFF, "off" },
1376 { UNDERSCAN_ON, "on" },
1377 { UNDERSCAN_AUTO, "auto" },
1378 };
1379
1380 static const struct drm_prop_enum_list radeon_audio_enum_list[] =
1381 { { RADEON_AUDIO_DISABLE, "off" },
1382 { RADEON_AUDIO_ENABLE, "on" },
1383 { RADEON_AUDIO_AUTO, "auto" },
1384 };
1385
1386
1387 static const struct drm_prop_enum_list radeon_dither_enum_list[] =
1388 { { RADEON_FMT_DITHER_DISABLE, "off" },
1389 { RADEON_FMT_DITHER_ENABLE, "on" },
1390 };
1391
1392 static const struct drm_prop_enum_list radeon_output_csc_enum_list[] =
1393 { { RADEON_OUTPUT_CSC_BYPASS, "bypass" },
1394 { RADEON_OUTPUT_CSC_TVRGB, "tvrgb" },
1395 { RADEON_OUTPUT_CSC_YCBCR601, "ycbcr601" },
1396 { RADEON_OUTPUT_CSC_YCBCR709, "ycbcr709" },
1397 };
1398
1399 static int radeon_modeset_create_props(struct radeon_device *rdev)
1400 {
1401 int sz;
1402
1403 if (rdev->is_atom_bios) {
1404 rdev->mode_info.coherent_mode_property =
1405 drm_property_create_range(rdev->ddev, 0 , "coherent", 0, 1);
1406 if (!rdev->mode_info.coherent_mode_property)
1407 return -ENOMEM;
1408 }
1409
1410 if (!ASIC_IS_AVIVO(rdev)) {
1411 sz = ARRAY_SIZE(radeon_tmds_pll_enum_list);
1412 rdev->mode_info.tmds_pll_property =
1413 drm_property_create_enum(rdev->ddev, 0,
1414 "tmds_pll",
1415 radeon_tmds_pll_enum_list, sz);
1416 }
1417
1418 rdev->mode_info.load_detect_property =
1419 drm_property_create_range(rdev->ddev, 0, "load detection", 0, 1);
1420 if (!rdev->mode_info.load_detect_property)
1421 return -ENOMEM;
1422
1423 drm_mode_create_scaling_mode_property(rdev->ddev);
1424
1425 sz = ARRAY_SIZE(radeon_tv_std_enum_list);
1426 rdev->mode_info.tv_std_property =
1427 drm_property_create_enum(rdev->ddev, 0,
1428 "tv standard",
1429 radeon_tv_std_enum_list, sz);
1430
1431 sz = ARRAY_SIZE(radeon_underscan_enum_list);
1432 rdev->mode_info.underscan_property =
1433 drm_property_create_enum(rdev->ddev, 0,
1434 "underscan",
1435 radeon_underscan_enum_list, sz);
1436
1437 rdev->mode_info.underscan_hborder_property =
1438 drm_property_create_range(rdev->ddev, 0,
1439 "underscan hborder", 0, 128);
1440 if (!rdev->mode_info.underscan_hborder_property)
1441 return -ENOMEM;
1442
1443 rdev->mode_info.underscan_vborder_property =
1444 drm_property_create_range(rdev->ddev, 0,
1445 "underscan vborder", 0, 128);
1446 if (!rdev->mode_info.underscan_vborder_property)
1447 return -ENOMEM;
1448
1449 sz = ARRAY_SIZE(radeon_audio_enum_list);
1450 rdev->mode_info.audio_property =
1451 drm_property_create_enum(rdev->ddev, 0,
1452 "audio",
1453 radeon_audio_enum_list, sz);
1454
1455 sz = ARRAY_SIZE(radeon_dither_enum_list);
1456 rdev->mode_info.dither_property =
1457 drm_property_create_enum(rdev->ddev, 0,
1458 "dither",
1459 radeon_dither_enum_list, sz);
1460
1461 sz = ARRAY_SIZE(radeon_output_csc_enum_list);
1462 rdev->mode_info.output_csc_property =
1463 drm_property_create_enum(rdev->ddev, 0,
1464 "output_csc",
1465 radeon_output_csc_enum_list, sz);
1466
1467 return 0;
1468 }
1469
1470 void radeon_update_display_priority(struct radeon_device *rdev)
1471 {
1472
1473 if ((radeon_disp_priority == 0) || (radeon_disp_priority > 2)) {
1474
1475
1476
1477
1478
1479
1480 if ((ASIC_IS_R300(rdev) || (rdev->family == CHIP_RV515)) &&
1481 !(rdev->flags & RADEON_IS_IGP))
1482 rdev->disp_priority = 2;
1483 else
1484 rdev->disp_priority = 0;
1485 } else
1486 rdev->disp_priority = radeon_disp_priority;
1487
1488 }
1489
1490
1491
1492
1493 static void radeon_afmt_init(struct radeon_device *rdev)
1494 {
1495 int i;
1496
1497 for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++)
1498 rdev->mode_info.afmt[i] = NULL;
1499
1500 if (ASIC_IS_NODCE(rdev)) {
1501
1502 } else if (ASIC_IS_DCE4(rdev)) {
1503 static uint32_t eg_offsets[] = {
1504 EVERGREEN_CRTC0_REGISTER_OFFSET,
1505 EVERGREEN_CRTC1_REGISTER_OFFSET,
1506 EVERGREEN_CRTC2_REGISTER_OFFSET,
1507 EVERGREEN_CRTC3_REGISTER_OFFSET,
1508 EVERGREEN_CRTC4_REGISTER_OFFSET,
1509 EVERGREEN_CRTC5_REGISTER_OFFSET,
1510 0x13830 - 0x7030,
1511 };
1512 int num_afmt;
1513
1514
1515
1516
1517
1518 if (ASIC_IS_DCE8(rdev))
1519 num_afmt = 7;
1520 else if (ASIC_IS_DCE6(rdev))
1521 num_afmt = 6;
1522 else if (ASIC_IS_DCE5(rdev))
1523 num_afmt = 6;
1524 else if (ASIC_IS_DCE41(rdev))
1525 num_afmt = 2;
1526 else
1527 num_afmt = 6;
1528
1529 BUG_ON(num_afmt > ARRAY_SIZE(eg_offsets));
1530 for (i = 0; i < num_afmt; i++) {
1531 rdev->mode_info.afmt[i] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1532 if (rdev->mode_info.afmt[i]) {
1533 rdev->mode_info.afmt[i]->offset = eg_offsets[i];
1534 rdev->mode_info.afmt[i]->id = i;
1535 }
1536 }
1537 } else if (ASIC_IS_DCE3(rdev)) {
1538
1539 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1540 if (rdev->mode_info.afmt[0]) {
1541 rdev->mode_info.afmt[0]->offset = DCE3_HDMI_OFFSET0;
1542 rdev->mode_info.afmt[0]->id = 0;
1543 }
1544 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1545 if (rdev->mode_info.afmt[1]) {
1546 rdev->mode_info.afmt[1]->offset = DCE3_HDMI_OFFSET1;
1547 rdev->mode_info.afmt[1]->id = 1;
1548 }
1549 } else if (ASIC_IS_DCE2(rdev)) {
1550
1551 rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1552 if (rdev->mode_info.afmt[0]) {
1553 rdev->mode_info.afmt[0]->offset = DCE2_HDMI_OFFSET0;
1554 rdev->mode_info.afmt[0]->id = 0;
1555 }
1556
1557 if (rdev->family >= CHIP_R600) {
1558 rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1559 if (rdev->mode_info.afmt[1]) {
1560 rdev->mode_info.afmt[1]->offset = DCE2_HDMI_OFFSET1;
1561 rdev->mode_info.afmt[1]->id = 1;
1562 }
1563 }
1564 }
1565 }
1566
1567 static void radeon_afmt_fini(struct radeon_device *rdev)
1568 {
1569 int i;
1570
1571 for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++) {
1572 kfree(rdev->mode_info.afmt[i]);
1573 rdev->mode_info.afmt[i] = NULL;
1574 }
1575 }
1576
1577 int radeon_modeset_init(struct radeon_device *rdev)
1578 {
1579 int i;
1580 int ret;
1581
1582 drm_mode_config_init(rdev->ddev);
1583 rdev->mode_info.mode_config_initialized = true;
1584
1585 rdev->ddev->mode_config.funcs = &radeon_mode_funcs;
1586
1587 if (radeon_use_pflipirq == 2 && rdev->family >= CHIP_R600)
1588 rdev->ddev->mode_config.async_page_flip = true;
1589
1590 if (ASIC_IS_DCE5(rdev)) {
1591 rdev->ddev->mode_config.max_width = 16384;
1592 rdev->ddev->mode_config.max_height = 16384;
1593 } else if (ASIC_IS_AVIVO(rdev)) {
1594 rdev->ddev->mode_config.max_width = 8192;
1595 rdev->ddev->mode_config.max_height = 8192;
1596 } else {
1597 rdev->ddev->mode_config.max_width = 4096;
1598 rdev->ddev->mode_config.max_height = 4096;
1599 }
1600
1601 rdev->ddev->mode_config.preferred_depth = 24;
1602 rdev->ddev->mode_config.prefer_shadow = 1;
1603
1604 rdev->ddev->mode_config.fb_base = rdev->mc.aper_base;
1605
1606 ret = radeon_modeset_create_props(rdev);
1607 if (ret) {
1608 return ret;
1609 }
1610
1611
1612 radeon_i2c_init(rdev);
1613
1614
1615 if (!rdev->is_atom_bios) {
1616
1617 radeon_combios_check_hardcoded_edid(rdev);
1618 }
1619
1620
1621 for (i = 0; i < rdev->num_crtc; i++) {
1622 radeon_crtc_init(rdev->ddev, i);
1623 }
1624
1625
1626 ret = radeon_setup_enc_conn(rdev->ddev);
1627 if (!ret) {
1628 return ret;
1629 }
1630
1631
1632 if (rdev->is_atom_bios) {
1633 radeon_atom_encoder_init(rdev);
1634 radeon_atom_disp_eng_pll_init(rdev);
1635 }
1636
1637
1638 radeon_hpd_init(rdev);
1639
1640
1641 radeon_afmt_init(rdev);
1642
1643 radeon_fbdev_init(rdev);
1644 drm_kms_helper_poll_init(rdev->ddev);
1645
1646
1647 ret = radeon_pm_late_init(rdev);
1648
1649 return 0;
1650 }
1651
1652 void radeon_modeset_fini(struct radeon_device *rdev)
1653 {
1654 if (rdev->mode_info.mode_config_initialized) {
1655 drm_kms_helper_poll_fini(rdev->ddev);
1656 radeon_hpd_fini(rdev);
1657 drm_helper_force_disable_all(rdev->ddev);
1658 radeon_fbdev_fini(rdev);
1659 radeon_afmt_fini(rdev);
1660 drm_mode_config_cleanup(rdev->ddev);
1661 rdev->mode_info.mode_config_initialized = false;
1662 }
1663
1664 kfree(rdev->mode_info.bios_hardcoded_edid);
1665
1666
1667 radeon_i2c_fini(rdev);
1668 }
1669
1670 static bool is_hdtv_mode(const struct drm_display_mode *mode)
1671 {
1672
1673 if ((mode->vdisplay == 480 && mode->hdisplay == 720) ||
1674 (mode->vdisplay == 576) ||
1675 (mode->vdisplay == 720) ||
1676 (mode->vdisplay == 1080))
1677 return true;
1678 else
1679 return false;
1680 }
1681
1682 bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
1683 const struct drm_display_mode *mode,
1684 struct drm_display_mode *adjusted_mode)
1685 {
1686 struct drm_device *dev = crtc->dev;
1687 struct radeon_device *rdev = dev->dev_private;
1688 struct drm_encoder *encoder;
1689 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1690 struct radeon_encoder *radeon_encoder;
1691 struct drm_connector *connector;
1692 struct radeon_connector *radeon_connector;
1693 bool first = true;
1694 u32 src_v = 1, dst_v = 1;
1695 u32 src_h = 1, dst_h = 1;
1696
1697 radeon_crtc->h_border = 0;
1698 radeon_crtc->v_border = 0;
1699
1700 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1701 if (encoder->crtc != crtc)
1702 continue;
1703 radeon_encoder = to_radeon_encoder(encoder);
1704 connector = radeon_get_connector_for_encoder(encoder);
1705 radeon_connector = to_radeon_connector(connector);
1706
1707 if (first) {
1708
1709 if (radeon_encoder->rmx_type == RMX_OFF)
1710 radeon_crtc->rmx_type = RMX_OFF;
1711 else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay ||
1712 mode->vdisplay < radeon_encoder->native_mode.vdisplay)
1713 radeon_crtc->rmx_type = radeon_encoder->rmx_type;
1714 else
1715 radeon_crtc->rmx_type = RMX_OFF;
1716
1717 memcpy(&radeon_crtc->native_mode,
1718 &radeon_encoder->native_mode,
1719 sizeof(struct drm_display_mode));
1720 src_v = crtc->mode.vdisplay;
1721 dst_v = radeon_crtc->native_mode.vdisplay;
1722 src_h = crtc->mode.hdisplay;
1723 dst_h = radeon_crtc->native_mode.hdisplay;
1724
1725
1726 if (ASIC_IS_AVIVO(rdev) &&
1727 (!(mode->flags & DRM_MODE_FLAG_INTERLACE)) &&
1728 ((radeon_encoder->underscan_type == UNDERSCAN_ON) ||
1729 ((radeon_encoder->underscan_type == UNDERSCAN_AUTO) &&
1730 drm_detect_hdmi_monitor(radeon_connector_edid(connector)) &&
1731 is_hdtv_mode(mode)))) {
1732 if (radeon_encoder->underscan_hborder != 0)
1733 radeon_crtc->h_border = radeon_encoder->underscan_hborder;
1734 else
1735 radeon_crtc->h_border = (mode->hdisplay >> 5) + 16;
1736 if (radeon_encoder->underscan_vborder != 0)
1737 radeon_crtc->v_border = radeon_encoder->underscan_vborder;
1738 else
1739 radeon_crtc->v_border = (mode->vdisplay >> 5) + 16;
1740 radeon_crtc->rmx_type = RMX_FULL;
1741 src_v = crtc->mode.vdisplay;
1742 dst_v = crtc->mode.vdisplay - (radeon_crtc->v_border * 2);
1743 src_h = crtc->mode.hdisplay;
1744 dst_h = crtc->mode.hdisplay - (radeon_crtc->h_border * 2);
1745 }
1746 first = false;
1747 } else {
1748 if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) {
1749
1750
1751
1752
1753
1754
1755 DRM_ERROR("Scaling not consistent across encoder.\n");
1756 return false;
1757 }
1758 }
1759 }
1760 if (radeon_crtc->rmx_type != RMX_OFF) {
1761 fixed20_12 a, b;
1762 a.full = dfixed_const(src_v);
1763 b.full = dfixed_const(dst_v);
1764 radeon_crtc->vsc.full = dfixed_div(a, b);
1765 a.full = dfixed_const(src_h);
1766 b.full = dfixed_const(dst_h);
1767 radeon_crtc->hsc.full = dfixed_div(a, b);
1768 } else {
1769 radeon_crtc->vsc.full = dfixed_const(1);
1770 radeon_crtc->hsc.full = dfixed_const(1);
1771 }
1772 return true;
1773 }
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812 int radeon_get_crtc_scanoutpos(struct drm_device *dev, unsigned int pipe,
1813 unsigned int flags, int *vpos, int *hpos,
1814 ktime_t *stime, ktime_t *etime,
1815 const struct drm_display_mode *mode)
1816 {
1817 u32 stat_crtc = 0, vbl = 0, position = 0;
1818 int vbl_start, vbl_end, vtotal, ret = 0;
1819 bool in_vbl = true;
1820
1821 struct radeon_device *rdev = dev->dev_private;
1822
1823
1824
1825
1826 if (stime)
1827 *stime = ktime_get();
1828
1829 if (ASIC_IS_DCE4(rdev)) {
1830 if (pipe == 0) {
1831 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1832 EVERGREEN_CRTC0_REGISTER_OFFSET);
1833 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1834 EVERGREEN_CRTC0_REGISTER_OFFSET);
1835 ret |= DRM_SCANOUTPOS_VALID;
1836 }
1837 if (pipe == 1) {
1838 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1839 EVERGREEN_CRTC1_REGISTER_OFFSET);
1840 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1841 EVERGREEN_CRTC1_REGISTER_OFFSET);
1842 ret |= DRM_SCANOUTPOS_VALID;
1843 }
1844 if (pipe == 2) {
1845 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1846 EVERGREEN_CRTC2_REGISTER_OFFSET);
1847 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1848 EVERGREEN_CRTC2_REGISTER_OFFSET);
1849 ret |= DRM_SCANOUTPOS_VALID;
1850 }
1851 if (pipe == 3) {
1852 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1853 EVERGREEN_CRTC3_REGISTER_OFFSET);
1854 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1855 EVERGREEN_CRTC3_REGISTER_OFFSET);
1856 ret |= DRM_SCANOUTPOS_VALID;
1857 }
1858 if (pipe == 4) {
1859 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1860 EVERGREEN_CRTC4_REGISTER_OFFSET);
1861 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1862 EVERGREEN_CRTC4_REGISTER_OFFSET);
1863 ret |= DRM_SCANOUTPOS_VALID;
1864 }
1865 if (pipe == 5) {
1866 vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1867 EVERGREEN_CRTC5_REGISTER_OFFSET);
1868 position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1869 EVERGREEN_CRTC5_REGISTER_OFFSET);
1870 ret |= DRM_SCANOUTPOS_VALID;
1871 }
1872 } else if (ASIC_IS_AVIVO(rdev)) {
1873 if (pipe == 0) {
1874 vbl = RREG32(AVIVO_D1CRTC_V_BLANK_START_END);
1875 position = RREG32(AVIVO_D1CRTC_STATUS_POSITION);
1876 ret |= DRM_SCANOUTPOS_VALID;
1877 }
1878 if (pipe == 1) {
1879 vbl = RREG32(AVIVO_D2CRTC_V_BLANK_START_END);
1880 position = RREG32(AVIVO_D2CRTC_STATUS_POSITION);
1881 ret |= DRM_SCANOUTPOS_VALID;
1882 }
1883 } else {
1884
1885 if (pipe == 0) {
1886
1887
1888
1889 vbl = (RREG32(RADEON_CRTC_V_TOTAL_DISP) &
1890 RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1891
1892 position = (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1893 stat_crtc = RREG32(RADEON_CRTC_STATUS);
1894 if (!(stat_crtc & 1))
1895 in_vbl = false;
1896
1897 ret |= DRM_SCANOUTPOS_VALID;
1898 }
1899 if (pipe == 1) {
1900 vbl = (RREG32(RADEON_CRTC2_V_TOTAL_DISP) &
1901 RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1902 position = (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1903 stat_crtc = RREG32(RADEON_CRTC2_STATUS);
1904 if (!(stat_crtc & 1))
1905 in_vbl = false;
1906
1907 ret |= DRM_SCANOUTPOS_VALID;
1908 }
1909 }
1910
1911
1912 if (etime)
1913 *etime = ktime_get();
1914
1915
1916
1917
1918 *vpos = position & 0x1fff;
1919 *hpos = (position >> 16) & 0x1fff;
1920
1921
1922 if (vbl > 0) {
1923
1924 ret |= DRM_SCANOUTPOS_ACCURATE;
1925 vbl_start = vbl & 0x1fff;
1926 vbl_end = (vbl >> 16) & 0x1fff;
1927 }
1928 else {
1929
1930 vbl_start = mode->crtc_vdisplay;
1931 vbl_end = 0;
1932 }
1933
1934
1935 if (flags & GET_DISTANCE_TO_VBLANKSTART) {
1936
1937 *hpos = *vpos - vbl_start;
1938 }
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950 if (!(flags & USE_REAL_VBLANKSTART))
1951 vbl_start -= rdev->mode_info.crtcs[pipe]->lb_vblank_lead_lines;
1952
1953
1954 if ((*vpos < vbl_start) && (*vpos >= vbl_end))
1955 in_vbl = false;
1956
1957
1958 if (in_vbl)
1959 ret |= DRM_SCANOUTPOS_IN_VBLANK;
1960
1961
1962 if (flags & GET_DISTANCE_TO_VBLANKSTART) {
1963
1964 *vpos -= vbl_start;
1965 return ret;
1966 }
1967
1968
1969
1970
1971
1972
1973
1974
1975 if (in_vbl && (*vpos >= vbl_start)) {
1976 vtotal = mode->crtc_vtotal;
1977 *vpos = *vpos - vtotal;
1978 }
1979
1980
1981 *vpos = *vpos - vbl_end;
1982
1983 return ret;
1984 }