1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24 
25 #include <linux/dma-mapping.h>
26 
27 #include <drm/drmP.h>
28 #include <drm/drm_crtc_helper.h>
29 #include <drm/drm_plane_helper.h>
30 #include <drm/drm_dp_helper.h>
31 
32 #include <nvif/class.h>
33 
34 #include "nouveau_drm.h"
35 #include "nouveau_dma.h"
36 #include "nouveau_gem.h"
37 #include "nouveau_connector.h"
38 #include "nouveau_encoder.h"
39 #include "nouveau_crtc.h"
40 #include "nouveau_fence.h"
41 #include "nv50_display.h"
42 
43 #define EVO_DMA_NR 9
44 
45 #define EVO_MASTER  (0x00)
46 #define EVO_FLIP(c) (0x01 + (c))
47 #define EVO_OVLY(c) (0x05 + (c))
48 #define EVO_OIMM(c) (0x09 + (c))
49 #define EVO_CURS(c) (0x0d + (c))
50 
51 /* offsets in shared sync bo of various structures */
52 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
53 #define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
54 #define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
55 #define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
56 
57 /******************************************************************************
58  * EVO channel
59  *****************************************************************************/
60 
61 struct nv50_chan {
62 	struct nvif_object user;
63 	struct nvif_device *device;
64 };
65 
66 static int
nv50_chan_create(struct nvif_device * device,struct nvif_object * disp,const s32 * oclass,u8 head,void * data,u32 size,struct nv50_chan * chan)67 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
68 		 const s32 *oclass, u8 head, void *data, u32 size,
69 		 struct nv50_chan *chan)
70 {
71 	struct nvif_sclass *sclass;
72 	int ret, i, n;
73 
74 	chan->device = device;
75 
76 	ret = n = nvif_object_sclass_get(disp, &sclass);
77 	if (ret < 0)
78 		return ret;
79 
80 	while (oclass[0]) {
81 		for (i = 0; i < n; i++) {
82 			if (sclass[i].oclass == oclass[0]) {
83 				ret = nvif_object_init(disp, 0, oclass[0],
84 						       data, size, &chan->user);
85 				if (ret == 0)
86 					nvif_object_map(&chan->user);
87 				nvif_object_sclass_put(&sclass);
88 				return ret;
89 			}
90 		}
91 		oclass++;
92 	}
93 
94 	nvif_object_sclass_put(&sclass);
95 	return -ENOSYS;
96 }
97 
98 static void
nv50_chan_destroy(struct nv50_chan * chan)99 nv50_chan_destroy(struct nv50_chan *chan)
100 {
101 	nvif_object_fini(&chan->user);
102 }
103 
104 /******************************************************************************
105  * PIO EVO channel
106  *****************************************************************************/
107 
108 struct nv50_pioc {
109 	struct nv50_chan base;
110 };
111 
112 static void
nv50_pioc_destroy(struct nv50_pioc * pioc)113 nv50_pioc_destroy(struct nv50_pioc *pioc)
114 {
115 	nv50_chan_destroy(&pioc->base);
116 }
117 
118 static int
nv50_pioc_create(struct nvif_device * device,struct nvif_object * disp,const s32 * oclass,u8 head,void * data,u32 size,struct nv50_pioc * pioc)119 nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
120 		 const s32 *oclass, u8 head, void *data, u32 size,
121 		 struct nv50_pioc *pioc)
122 {
123 	return nv50_chan_create(device, disp, oclass, head, data, size,
124 				&pioc->base);
125 }
126 
127 /******************************************************************************
128  * Cursor Immediate
129  *****************************************************************************/
130 
131 struct nv50_curs {
132 	struct nv50_pioc base;
133 };
134 
135 static int
nv50_curs_create(struct nvif_device * device,struct nvif_object * disp,int head,struct nv50_curs * curs)136 nv50_curs_create(struct nvif_device *device, struct nvif_object *disp,
137 		 int head, struct nv50_curs *curs)
138 {
139 	struct nv50_disp_cursor_v0 args = {
140 		.head = head,
141 	};
142 	static const s32 oclass[] = {
143 		GK104_DISP_CURSOR,
144 		GF110_DISP_CURSOR,
145 		GT214_DISP_CURSOR,
146 		G82_DISP_CURSOR,
147 		NV50_DISP_CURSOR,
148 		0
149 	};
150 
151 	return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
152 				&curs->base);
153 }
154 
155 /******************************************************************************
156  * Overlay Immediate
157  *****************************************************************************/
158 
159 struct nv50_oimm {
160 	struct nv50_pioc base;
161 };
162 
163 static int
nv50_oimm_create(struct nvif_device * device,struct nvif_object * disp,int head,struct nv50_oimm * oimm)164 nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
165 		 int head, struct nv50_oimm *oimm)
166 {
167 	struct nv50_disp_cursor_v0 args = {
168 		.head = head,
169 	};
170 	static const s32 oclass[] = {
171 		GK104_DISP_OVERLAY,
172 		GF110_DISP_OVERLAY,
173 		GT214_DISP_OVERLAY,
174 		G82_DISP_OVERLAY,
175 		NV50_DISP_OVERLAY,
176 		0
177 	};
178 
179 	return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
180 				&oimm->base);
181 }
182 
183 /******************************************************************************
184  * DMA EVO channel
185  *****************************************************************************/
186 
187 struct nv50_dmac {
188 	struct nv50_chan base;
189 	dma_addr_t handle;
190 	u32 *ptr;
191 
192 	struct nvif_object sync;
193 	struct nvif_object vram;
194 
195 	/* Protects against concurrent pushbuf access to this channel, lock is
196 	 * grabbed by evo_wait (if the pushbuf reservation is successful) and
197 	 * dropped again by evo_kick. */
198 	struct mutex lock;
199 };
200 
201 static void
nv50_dmac_destroy(struct nv50_dmac * dmac,struct nvif_object * disp)202 nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
203 {
204 	struct nvif_device *device = dmac->base.device;
205 
206 	nvif_object_fini(&dmac->vram);
207 	nvif_object_fini(&dmac->sync);
208 
209 	nv50_chan_destroy(&dmac->base);
210 
211 	if (dmac->ptr) {
212 		struct device *dev = nvxx_device(device)->dev;
213 		dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
214 	}
215 }
216 
217 static int
nv50_dmac_create(struct nvif_device * device,struct nvif_object * disp,const s32 * oclass,u8 head,void * data,u32 size,u64 syncbuf,struct nv50_dmac * dmac)218 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
219 		 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
220 		 struct nv50_dmac *dmac)
221 {
222 	struct nv50_disp_core_channel_dma_v0 *args = data;
223 	struct nvif_object pushbuf;
224 	int ret;
225 
226 	mutex_init(&dmac->lock);
227 
228 	dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
229 				       &dmac->handle, GFP_KERNEL);
230 	if (!dmac->ptr)
231 		return -ENOMEM;
232 
233 	ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
234 			       &(struct nv_dma_v0) {
235 					.target = NV_DMA_V0_TARGET_PCI_US,
236 					.access = NV_DMA_V0_ACCESS_RD,
237 					.start = dmac->handle + 0x0000,
238 					.limit = dmac->handle + 0x0fff,
239 			       }, sizeof(struct nv_dma_v0), &pushbuf);
240 	if (ret)
241 		return ret;
242 
243 	args->pushbuf = nvif_handle(&pushbuf);
244 
245 	ret = nv50_chan_create(device, disp, oclass, head, data, size,
246 			       &dmac->base);
247 	nvif_object_fini(&pushbuf);
248 	if (ret)
249 		return ret;
250 
251 	ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
252 			       &(struct nv_dma_v0) {
253 					.target = NV_DMA_V0_TARGET_VRAM,
254 					.access = NV_DMA_V0_ACCESS_RDWR,
255 					.start = syncbuf + 0x0000,
256 					.limit = syncbuf + 0x0fff,
257 			       }, sizeof(struct nv_dma_v0),
258 			       &dmac->sync);
259 	if (ret)
260 		return ret;
261 
262 	ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
263 			       &(struct nv_dma_v0) {
264 					.target = NV_DMA_V0_TARGET_VRAM,
265 					.access = NV_DMA_V0_ACCESS_RDWR,
266 					.start = 0,
267 					.limit = device->info.ram_user - 1,
268 			       }, sizeof(struct nv_dma_v0),
269 			       &dmac->vram);
270 	if (ret)
271 		return ret;
272 
273 	return ret;
274 }
275 
276 /******************************************************************************
277  * Core
278  *****************************************************************************/
279 
280 struct nv50_mast {
281 	struct nv50_dmac base;
282 };
283 
284 static int
nv50_core_create(struct nvif_device * device,struct nvif_object * disp,u64 syncbuf,struct nv50_mast * core)285 nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
286 		 u64 syncbuf, struct nv50_mast *core)
287 {
288 	struct nv50_disp_core_channel_dma_v0 args = {
289 		.pushbuf = 0xb0007d00,
290 	};
291 	static const s32 oclass[] = {
292 		GM204_DISP_CORE_CHANNEL_DMA,
293 		GM107_DISP_CORE_CHANNEL_DMA,
294 		GK110_DISP_CORE_CHANNEL_DMA,
295 		GK104_DISP_CORE_CHANNEL_DMA,
296 		GF110_DISP_CORE_CHANNEL_DMA,
297 		GT214_DISP_CORE_CHANNEL_DMA,
298 		GT206_DISP_CORE_CHANNEL_DMA,
299 		GT200_DISP_CORE_CHANNEL_DMA,
300 		G82_DISP_CORE_CHANNEL_DMA,
301 		NV50_DISP_CORE_CHANNEL_DMA,
302 		0
303 	};
304 
305 	return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
306 				syncbuf, &core->base);
307 }
308 
309 /******************************************************************************
310  * Base
311  *****************************************************************************/
312 
313 struct nv50_sync {
314 	struct nv50_dmac base;
315 	u32 addr;
316 	u32 data;
317 };
318 
319 static int
nv50_base_create(struct nvif_device * device,struct nvif_object * disp,int head,u64 syncbuf,struct nv50_sync * base)320 nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
321 		 int head, u64 syncbuf, struct nv50_sync *base)
322 {
323 	struct nv50_disp_base_channel_dma_v0 args = {
324 		.pushbuf = 0xb0007c00 | head,
325 		.head = head,
326 	};
327 	static const s32 oclass[] = {
328 		GK110_DISP_BASE_CHANNEL_DMA,
329 		GK104_DISP_BASE_CHANNEL_DMA,
330 		GF110_DISP_BASE_CHANNEL_DMA,
331 		GT214_DISP_BASE_CHANNEL_DMA,
332 		GT200_DISP_BASE_CHANNEL_DMA,
333 		G82_DISP_BASE_CHANNEL_DMA,
334 		NV50_DISP_BASE_CHANNEL_DMA,
335 		0
336 	};
337 
338 	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
339 				syncbuf, &base->base);
340 }
341 
342 /******************************************************************************
343  * Overlay
344  *****************************************************************************/
345 
346 struct nv50_ovly {
347 	struct nv50_dmac base;
348 };
349 
350 static int
nv50_ovly_create(struct nvif_device * device,struct nvif_object * disp,int head,u64 syncbuf,struct nv50_ovly * ovly)351 nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
352 		 int head, u64 syncbuf, struct nv50_ovly *ovly)
353 {
354 	struct nv50_disp_overlay_channel_dma_v0 args = {
355 		.pushbuf = 0xb0007e00 | head,
356 		.head = head,
357 	};
358 	static const s32 oclass[] = {
359 		GK104_DISP_OVERLAY_CONTROL_DMA,
360 		GF110_DISP_OVERLAY_CONTROL_DMA,
361 		GT214_DISP_OVERLAY_CHANNEL_DMA,
362 		GT200_DISP_OVERLAY_CHANNEL_DMA,
363 		G82_DISP_OVERLAY_CHANNEL_DMA,
364 		NV50_DISP_OVERLAY_CHANNEL_DMA,
365 		0
366 	};
367 
368 	return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
369 				syncbuf, &ovly->base);
370 }
371 
372 struct nv50_head {
373 	struct nouveau_crtc base;
374 	struct nouveau_bo *image;
375 	struct nv50_curs curs;
376 	struct nv50_sync sync;
377 	struct nv50_ovly ovly;
378 	struct nv50_oimm oimm;
379 };
380 
381 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
382 #define nv50_curs(c) (&nv50_head(c)->curs)
383 #define nv50_sync(c) (&nv50_head(c)->sync)
384 #define nv50_ovly(c) (&nv50_head(c)->ovly)
385 #define nv50_oimm(c) (&nv50_head(c)->oimm)
386 #define nv50_chan(c) (&(c)->base.base)
387 #define nv50_vers(c) nv50_chan(c)->user.oclass
388 
389 struct nv50_fbdma {
390 	struct list_head head;
391 	struct nvif_object core;
392 	struct nvif_object base[4];
393 };
394 
395 struct nv50_disp {
396 	struct nvif_object *disp;
397 	struct nv50_mast mast;
398 
399 	struct list_head fbdma;
400 
401 	struct nouveau_bo *sync;
402 };
403 
404 static struct nv50_disp *
nv50_disp(struct drm_device * dev)405 nv50_disp(struct drm_device *dev)
406 {
407 	return nouveau_display(dev)->priv;
408 }
409 
410 #define nv50_mast(d) (&nv50_disp(d)->mast)
411 
412 static struct drm_crtc *
nv50_display_crtc_get(struct drm_encoder * encoder)413 nv50_display_crtc_get(struct drm_encoder *encoder)
414 {
415 	return nouveau_encoder(encoder)->crtc;
416 }
417 
418 /******************************************************************************
419  * EVO channel helpers
420  *****************************************************************************/
421 static u32 *
evo_wait(void * evoc,int nr)422 evo_wait(void *evoc, int nr)
423 {
424 	struct nv50_dmac *dmac = evoc;
425 	struct nvif_device *device = dmac->base.device;
426 	u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
427 
428 	mutex_lock(&dmac->lock);
429 	if (put + nr >= (PAGE_SIZE / 4) - 8) {
430 		dmac->ptr[put] = 0x20000000;
431 
432 		nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
433 		if (nvif_msec(device, 2000,
434 			if (!nvif_rd32(&dmac->base.user, 0x0004))
435 				break;
436 		) < 0) {
437 			mutex_unlock(&dmac->lock);
438 			printk(KERN_ERR "nouveau: evo channel stalled\n");
439 			return NULL;
440 		}
441 
442 		put = 0;
443 	}
444 
445 	return dmac->ptr + put;
446 }
447 
448 static void
evo_kick(u32 * push,void * evoc)449 evo_kick(u32 *push, void *evoc)
450 {
451 	struct nv50_dmac *dmac = evoc;
452 	nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
453 	mutex_unlock(&dmac->lock);
454 }
455 
456 #if 1
457 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
458 #define evo_data(p,d)   *((p)++) = (d)
459 #else
460 #define evo_mthd(p,m,s) do {                                                   \
461 	const u32 _m = (m), _s = (s);                                          \
462 	printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__);                     \
463 	*((p)++) = ((_s << 18) | _m);                                          \
464 } while(0)
465 #define evo_data(p,d) do {                                                     \
466 	const u32 _d = (d);                                                    \
467 	printk(KERN_ERR "\t%08x\n", _d);                                       \
468 	*((p)++) = _d;                                                         \
469 } while(0)
470 #endif
471 
472 static bool
evo_sync_wait(void * data)473 evo_sync_wait(void *data)
474 {
475 	if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
476 		return true;
477 	usleep_range(1, 2);
478 	return false;
479 }
480 
481 static int
evo_sync(struct drm_device * dev)482 evo_sync(struct drm_device *dev)
483 {
484 	struct nvif_device *device = &nouveau_drm(dev)->device;
485 	struct nv50_disp *disp = nv50_disp(dev);
486 	struct nv50_mast *mast = nv50_mast(dev);
487 	u32 *push = evo_wait(mast, 8);
488 	if (push) {
489 		nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
490 		evo_mthd(push, 0x0084, 1);
491 		evo_data(push, 0x80000000 | EVO_MAST_NTFY);
492 		evo_mthd(push, 0x0080, 2);
493 		evo_data(push, 0x00000000);
494 		evo_data(push, 0x00000000);
495 		evo_kick(push, mast);
496 		if (nvif_msec(device, 2000,
497 			if (evo_sync_wait(disp->sync))
498 				break;
499 		) >= 0)
500 			return 0;
501 	}
502 
503 	return -EBUSY;
504 }
505 
506 /******************************************************************************
507  * Page flipping channel
508  *****************************************************************************/
509 struct nouveau_bo *
nv50_display_crtc_sema(struct drm_device * dev,int crtc)510 nv50_display_crtc_sema(struct drm_device *dev, int crtc)
511 {
512 	return nv50_disp(dev)->sync;
513 }
514 
515 struct nv50_display_flip {
516 	struct nv50_disp *disp;
517 	struct nv50_sync *chan;
518 };
519 
520 static bool
nv50_display_flip_wait(void * data)521 nv50_display_flip_wait(void *data)
522 {
523 	struct nv50_display_flip *flip = data;
524 	if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
525 					      flip->chan->data)
526 		return true;
527 	usleep_range(1, 2);
528 	return false;
529 }
530 
531 void
nv50_display_flip_stop(struct drm_crtc * crtc)532 nv50_display_flip_stop(struct drm_crtc *crtc)
533 {
534 	struct nvif_device *device = &nouveau_drm(crtc->dev)->device;
535 	struct nv50_display_flip flip = {
536 		.disp = nv50_disp(crtc->dev),
537 		.chan = nv50_sync(crtc),
538 	};
539 	u32 *push;
540 
541 	push = evo_wait(flip.chan, 8);
542 	if (push) {
543 		evo_mthd(push, 0x0084, 1);
544 		evo_data(push, 0x00000000);
545 		evo_mthd(push, 0x0094, 1);
546 		evo_data(push, 0x00000000);
547 		evo_mthd(push, 0x00c0, 1);
548 		evo_data(push, 0x00000000);
549 		evo_mthd(push, 0x0080, 1);
550 		evo_data(push, 0x00000000);
551 		evo_kick(push, flip.chan);
552 	}
553 
554 	nvif_msec(device, 2000,
555 		if (nv50_display_flip_wait(&flip))
556 			break;
557 	);
558 }
559 
560 int
nv50_display_flip_next(struct drm_crtc * crtc,struct drm_framebuffer * fb,struct nouveau_channel * chan,u32 swap_interval)561 nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
562 		       struct nouveau_channel *chan, u32 swap_interval)
563 {
564 	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
565 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
566 	struct nv50_head *head = nv50_head(crtc);
567 	struct nv50_sync *sync = nv50_sync(crtc);
568 	u32 *push;
569 	int ret;
570 
571 	if (crtc->primary->fb->width != fb->width ||
572 	    crtc->primary->fb->height != fb->height)
573 		return -EINVAL;
574 
575 	swap_interval <<= 4;
576 	if (swap_interval == 0)
577 		swap_interval |= 0x100;
578 	if (chan == NULL)
579 		evo_sync(crtc->dev);
580 
581 	push = evo_wait(sync, 128);
582 	if (unlikely(push == NULL))
583 		return -EBUSY;
584 
585 	if (chan && chan->user.oclass < G82_CHANNEL_GPFIFO) {
586 		ret = RING_SPACE(chan, 8);
587 		if (ret)
588 			return ret;
589 
590 		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
591 		OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
592 		OUT_RING  (chan, sync->addr ^ 0x10);
593 		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
594 		OUT_RING  (chan, sync->data + 1);
595 		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
596 		OUT_RING  (chan, sync->addr);
597 		OUT_RING  (chan, sync->data);
598 	} else
599 	if (chan && chan->user.oclass < FERMI_CHANNEL_GPFIFO) {
600 		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
601 		ret = RING_SPACE(chan, 12);
602 		if (ret)
603 			return ret;
604 
605 		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
606 		OUT_RING  (chan, chan->vram.handle);
607 		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
608 		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
609 		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
610 		OUT_RING  (chan, sync->data + 1);
611 		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
612 		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
613 		OUT_RING  (chan, upper_32_bits(addr));
614 		OUT_RING  (chan, lower_32_bits(addr));
615 		OUT_RING  (chan, sync->data);
616 		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
617 	} else
618 	if (chan) {
619 		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
620 		ret = RING_SPACE(chan, 10);
621 		if (ret)
622 			return ret;
623 
624 		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
625 		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
626 		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
627 		OUT_RING  (chan, sync->data + 1);
628 		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
629 				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
630 		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
631 		OUT_RING  (chan, upper_32_bits(addr));
632 		OUT_RING  (chan, lower_32_bits(addr));
633 		OUT_RING  (chan, sync->data);
634 		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
635 				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
636 	}
637 
638 	if (chan) {
639 		sync->addr ^= 0x10;
640 		sync->data++;
641 		FIRE_RING (chan);
642 	}
643 
644 	/* queue the flip */
645 	evo_mthd(push, 0x0100, 1);
646 	evo_data(push, 0xfffe0000);
647 	evo_mthd(push, 0x0084, 1);
648 	evo_data(push, swap_interval);
649 	if (!(swap_interval & 0x00000100)) {
650 		evo_mthd(push, 0x00e0, 1);
651 		evo_data(push, 0x40000000);
652 	}
653 	evo_mthd(push, 0x0088, 4);
654 	evo_data(push, sync->addr);
655 	evo_data(push, sync->data++);
656 	evo_data(push, sync->data);
657 	evo_data(push, sync->base.sync.handle);
658 	evo_mthd(push, 0x00a0, 2);
659 	evo_data(push, 0x00000000);
660 	evo_data(push, 0x00000000);
661 	evo_mthd(push, 0x00c0, 1);
662 	evo_data(push, nv_fb->r_handle);
663 	evo_mthd(push, 0x0110, 2);
664 	evo_data(push, 0x00000000);
665 	evo_data(push, 0x00000000);
666 	if (nv50_vers(sync) < GF110_DISP_BASE_CHANNEL_DMA) {
667 		evo_mthd(push, 0x0800, 5);
668 		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
669 		evo_data(push, 0);
670 		evo_data(push, (fb->height << 16) | fb->width);
671 		evo_data(push, nv_fb->r_pitch);
672 		evo_data(push, nv_fb->r_format);
673 	} else {
674 		evo_mthd(push, 0x0400, 5);
675 		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
676 		evo_data(push, 0);
677 		evo_data(push, (fb->height << 16) | fb->width);
678 		evo_data(push, nv_fb->r_pitch);
679 		evo_data(push, nv_fb->r_format);
680 	}
681 	evo_mthd(push, 0x0080, 1);
682 	evo_data(push, 0x00000000);
683 	evo_kick(push, sync);
684 
685 	nouveau_bo_ref(nv_fb->nvbo, &head->image);
686 	return 0;
687 }
688 
689 /******************************************************************************
690  * CRTC
691  *****************************************************************************/
692 static int
nv50_crtc_set_dither(struct nouveau_crtc * nv_crtc,bool update)693 nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
694 {
695 	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
696 	struct nouveau_connector *nv_connector;
697 	struct drm_connector *connector;
698 	u32 *push, mode = 0x00;
699 
700 	nv_connector = nouveau_crtc_connector_get(nv_crtc);
701 	connector = &nv_connector->base;
702 	if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
703 		if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3)
704 			mode = DITHERING_MODE_DYNAMIC2X2;
705 	} else {
706 		mode = nv_connector->dithering_mode;
707 	}
708 
709 	if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
710 		if (connector->display_info.bpc >= 8)
711 			mode |= DITHERING_DEPTH_8BPC;
712 	} else {
713 		mode |= nv_connector->dithering_depth;
714 	}
715 
716 	push = evo_wait(mast, 4);
717 	if (push) {
718 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
719 			evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
720 			evo_data(push, mode);
721 		} else
722 		if (nv50_vers(mast) < GK104_DISP_CORE_CHANNEL_DMA) {
723 			evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
724 			evo_data(push, mode);
725 		} else {
726 			evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
727 			evo_data(push, mode);
728 		}
729 
730 		if (update) {
731 			evo_mthd(push, 0x0080, 1);
732 			evo_data(push, 0x00000000);
733 		}
734 		evo_kick(push, mast);
735 	}
736 
737 	return 0;
738 }
739 
740 static int
nv50_crtc_set_scale(struct nouveau_crtc * nv_crtc,bool update)741 nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
742 {
743 	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
744 	struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
745 	struct drm_crtc *crtc = &nv_crtc->base;
746 	struct nouveau_connector *nv_connector;
747 	int mode = DRM_MODE_SCALE_NONE;
748 	u32 oX, oY, *push;
749 
750 	/* start off at the resolution we programmed the crtc for, this
751 	 * effectively handles NONE/FULL scaling
752 	 */
753 	nv_connector = nouveau_crtc_connector_get(nv_crtc);
754 	if (nv_connector && nv_connector->native_mode) {
755 		mode = nv_connector->scaling_mode;
756 		if (nv_connector->scaling_full) /* non-EDID LVDS/eDP mode */
757 			mode = DRM_MODE_SCALE_FULLSCREEN;
758 	}
759 
760 	if (mode != DRM_MODE_SCALE_NONE)
761 		omode = nv_connector->native_mode;
762 	else
763 		omode = umode;
764 
765 	oX = omode->hdisplay;
766 	oY = omode->vdisplay;
767 	if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
768 		oY *= 2;
769 
770 	/* add overscan compensation if necessary, will keep the aspect
771 	 * ratio the same as the backend mode unless overridden by the
772 	 * user setting both hborder and vborder properties.
773 	 */
774 	if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
775 			     (nv_connector->underscan == UNDERSCAN_AUTO &&
776 			      nv_connector->edid &&
777 			      drm_detect_hdmi_monitor(nv_connector->edid)))) {
778 		u32 bX = nv_connector->underscan_hborder;
779 		u32 bY = nv_connector->underscan_vborder;
780 		u32 aspect = (oY << 19) / oX;
781 
782 		if (bX) {
783 			oX -= (bX * 2);
784 			if (bY) oY -= (bY * 2);
785 			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
786 		} else {
787 			oX -= (oX >> 4) + 32;
788 			if (bY) oY -= (bY * 2);
789 			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
790 		}
791 	}
792 
793 	/* handle CENTER/ASPECT scaling, taking into account the areas
794 	 * removed already for overscan compensation
795 	 */
796 	switch (mode) {
797 	case DRM_MODE_SCALE_CENTER:
798 		oX = min((u32)umode->hdisplay, oX);
799 		oY = min((u32)umode->vdisplay, oY);
800 		/* fall-through */
801 	case DRM_MODE_SCALE_ASPECT:
802 		if (oY < oX) {
803 			u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
804 			oX = ((oY * aspect) + (aspect / 2)) >> 19;
805 		} else {
806 			u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
807 			oY = ((oX * aspect) + (aspect / 2)) >> 19;
808 		}
809 		break;
810 	default:
811 		break;
812 	}
813 
814 	push = evo_wait(mast, 8);
815 	if (push) {
816 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
817 			/*XXX: SCALE_CTRL_ACTIVE??? */
818 			evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
819 			evo_data(push, (oY << 16) | oX);
820 			evo_data(push, (oY << 16) | oX);
821 			evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
822 			evo_data(push, 0x00000000);
823 			evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
824 			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
825 		} else {
826 			evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
827 			evo_data(push, (oY << 16) | oX);
828 			evo_data(push, (oY << 16) | oX);
829 			evo_data(push, (oY << 16) | oX);
830 			evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
831 			evo_data(push, 0x00000000);
832 			evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
833 			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
834 		}
835 
836 		evo_kick(push, mast);
837 
838 		if (update) {
839 			nv50_display_flip_stop(crtc);
840 			nv50_display_flip_next(crtc, crtc->primary->fb,
841 					       NULL, 1);
842 		}
843 	}
844 
845 	return 0;
846 }
847 
848 static int
nv50_crtc_set_raster_vblank_dmi(struct nouveau_crtc * nv_crtc,u32 usec)849 nv50_crtc_set_raster_vblank_dmi(struct nouveau_crtc *nv_crtc, u32 usec)
850 {
851 	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
852 	u32 *push;
853 
854 	push = evo_wait(mast, 8);
855 	if (!push)
856 		return -ENOMEM;
857 
858 	evo_mthd(push, 0x0828 + (nv_crtc->index * 0x400), 1);
859 	evo_data(push, usec);
860 	evo_kick(push, mast);
861 	return 0;
862 }
863 
864 static int
nv50_crtc_set_color_vibrance(struct nouveau_crtc * nv_crtc,bool update)865 nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
866 {
867 	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
868 	u32 *push, hue, vib;
869 	int adj;
870 
871 	adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
872 	vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
873 	hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
874 
875 	push = evo_wait(mast, 16);
876 	if (push) {
877 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
878 			evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
879 			evo_data(push, (hue << 20) | (vib << 8));
880 		} else {
881 			evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
882 			evo_data(push, (hue << 20) | (vib << 8));
883 		}
884 
885 		if (update) {
886 			evo_mthd(push, 0x0080, 1);
887 			evo_data(push, 0x00000000);
888 		}
889 		evo_kick(push, mast);
890 	}
891 
892 	return 0;
893 }
894 
895 static int
nv50_crtc_set_image(struct nouveau_crtc * nv_crtc,struct drm_framebuffer * fb,int x,int y,bool update)896 nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
897 		    int x, int y, bool update)
898 {
899 	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
900 	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
901 	u32 *push;
902 
903 	push = evo_wait(mast, 16);
904 	if (push) {
905 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
906 			evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
907 			evo_data(push, nvfb->nvbo->bo.offset >> 8);
908 			evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
909 			evo_data(push, (fb->height << 16) | fb->width);
910 			evo_data(push, nvfb->r_pitch);
911 			evo_data(push, nvfb->r_format);
912 			evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
913 			evo_data(push, (y << 16) | x);
914 			if (nv50_vers(mast) > NV50_DISP_CORE_CHANNEL_DMA) {
915 				evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
916 				evo_data(push, nvfb->r_handle);
917 			}
918 		} else {
919 			evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
920 			evo_data(push, nvfb->nvbo->bo.offset >> 8);
921 			evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
922 			evo_data(push, (fb->height << 16) | fb->width);
923 			evo_data(push, nvfb->r_pitch);
924 			evo_data(push, nvfb->r_format);
925 			evo_data(push, nvfb->r_handle);
926 			evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
927 			evo_data(push, (y << 16) | x);
928 		}
929 
930 		if (update) {
931 			evo_mthd(push, 0x0080, 1);
932 			evo_data(push, 0x00000000);
933 		}
934 		evo_kick(push, mast);
935 	}
936 
937 	nv_crtc->fb.handle = nvfb->r_handle;
938 	return 0;
939 }
940 
941 static void
nv50_crtc_cursor_show(struct nouveau_crtc * nv_crtc)942 nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
943 {
944 	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
945 	u32 *push = evo_wait(mast, 16);
946 	if (push) {
947 		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
948 			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
949 			evo_data(push, 0x85000000);
950 			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
951 		} else
952 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
953 			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
954 			evo_data(push, 0x85000000);
955 			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
956 			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
957 			evo_data(push, mast->base.vram.handle);
958 		} else {
959 			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
960 			evo_data(push, 0x85000000);
961 			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
962 			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
963 			evo_data(push, mast->base.vram.handle);
964 		}
965 		evo_kick(push, mast);
966 	}
967 	nv_crtc->cursor.visible = true;
968 }
969 
970 static void
nv50_crtc_cursor_hide(struct nouveau_crtc * nv_crtc)971 nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
972 {
973 	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
974 	u32 *push = evo_wait(mast, 16);
975 	if (push) {
976 		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
977 			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
978 			evo_data(push, 0x05000000);
979 		} else
980 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
981 			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
982 			evo_data(push, 0x05000000);
983 			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
984 			evo_data(push, 0x00000000);
985 		} else {
986 			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
987 			evo_data(push, 0x05000000);
988 			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
989 			evo_data(push, 0x00000000);
990 		}
991 		evo_kick(push, mast);
992 	}
993 	nv_crtc->cursor.visible = false;
994 }
995 
996 static void
nv50_crtc_cursor_show_hide(struct nouveau_crtc * nv_crtc,bool show,bool update)997 nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
998 {
999 	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
1000 
1001 	if (show && nv_crtc->cursor.nvbo && nv_crtc->base.enabled)
1002 		nv50_crtc_cursor_show(nv_crtc);
1003 	else
1004 		nv50_crtc_cursor_hide(nv_crtc);
1005 
1006 	if (update) {
1007 		u32 *push = evo_wait(mast, 2);
1008 		if (push) {
1009 			evo_mthd(push, 0x0080, 1);
1010 			evo_data(push, 0x00000000);
1011 			evo_kick(push, mast);
1012 		}
1013 	}
1014 }
1015 
1016 static void
nv50_crtc_dpms(struct drm_crtc * crtc,int mode)1017 nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
1018 {
1019 }
1020 
1021 static void
nv50_crtc_prepare(struct drm_crtc * crtc)1022 nv50_crtc_prepare(struct drm_crtc *crtc)
1023 {
1024 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1025 	struct nv50_mast *mast = nv50_mast(crtc->dev);
1026 	u32 *push;
1027 
1028 	nv50_display_flip_stop(crtc);
1029 
1030 	push = evo_wait(mast, 6);
1031 	if (push) {
1032 		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1033 			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1034 			evo_data(push, 0x00000000);
1035 			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
1036 			evo_data(push, 0x40000000);
1037 		} else
1038 		if (nv50_vers(mast) <  GF110_DISP_CORE_CHANNEL_DMA) {
1039 			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1040 			evo_data(push, 0x00000000);
1041 			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
1042 			evo_data(push, 0x40000000);
1043 			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1044 			evo_data(push, 0x00000000);
1045 		} else {
1046 			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1047 			evo_data(push, 0x00000000);
1048 			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
1049 			evo_data(push, 0x03000000);
1050 			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1051 			evo_data(push, 0x00000000);
1052 		}
1053 
1054 		evo_kick(push, mast);
1055 	}
1056 
1057 	nv50_crtc_cursor_show_hide(nv_crtc, false, false);
1058 }
1059 
1060 static void
nv50_crtc_commit(struct drm_crtc * crtc)1061 nv50_crtc_commit(struct drm_crtc *crtc)
1062 {
1063 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1064 	struct nv50_mast *mast = nv50_mast(crtc->dev);
1065 	u32 *push;
1066 
1067 	push = evo_wait(mast, 32);
1068 	if (push) {
1069 		if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) {
1070 			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1071 			evo_data(push, nv_crtc->fb.handle);
1072 			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1073 			evo_data(push, 0xc0000000);
1074 			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1075 		} else
1076 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1077 			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1078 			evo_data(push, nv_crtc->fb.handle);
1079 			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1080 			evo_data(push, 0xc0000000);
1081 			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1082 			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1083 			evo_data(push, mast->base.vram.handle);
1084 		} else {
1085 			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1086 			evo_data(push, nv_crtc->fb.handle);
1087 			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
1088 			evo_data(push, 0x83000000);
1089 			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1090 			evo_data(push, 0x00000000);
1091 			evo_data(push, 0x00000000);
1092 			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1093 			evo_data(push, mast->base.vram.handle);
1094 			evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
1095 			evo_data(push, 0xffffff00);
1096 		}
1097 
1098 		evo_kick(push, mast);
1099 	}
1100 
1101 	nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1102 	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1103 }
1104 
1105 static bool
nv50_crtc_mode_fixup(struct drm_crtc * crtc,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)1106 nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1107 		     struct drm_display_mode *adjusted_mode)
1108 {
1109 	drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
1110 	return true;
1111 }
1112 
1113 static int
nv50_crtc_swap_fbs(struct drm_crtc * crtc,struct drm_framebuffer * old_fb)1114 nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
1115 {
1116 	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
1117 	struct nv50_head *head = nv50_head(crtc);
1118 	int ret;
1119 
1120 	ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM, true);
1121 	if (ret == 0) {
1122 		if (head->image)
1123 			nouveau_bo_unpin(head->image);
1124 		nouveau_bo_ref(nvfb->nvbo, &head->image);
1125 	}
1126 
1127 	return ret;
1128 }
1129 
1130 static int
nv50_crtc_mode_set(struct drm_crtc * crtc,struct drm_display_mode * umode,struct drm_display_mode * mode,int x,int y,struct drm_framebuffer * old_fb)1131 nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1132 		   struct drm_display_mode *mode, int x, int y,
1133 		   struct drm_framebuffer *old_fb)
1134 {
1135 	struct nv50_mast *mast = nv50_mast(crtc->dev);
1136 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1137 	struct nouveau_connector *nv_connector;
1138 	u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1139 	u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1140 	u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1141 	u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1142 	u32 vblan2e = 0, vblan2s = 1, vblankus = 0;
1143 	u32 *push;
1144 	int ret;
1145 
1146 	hactive = mode->htotal;
1147 	hsynce  = mode->hsync_end - mode->hsync_start - 1;
1148 	hbackp  = mode->htotal - mode->hsync_end;
1149 	hblanke = hsynce + hbackp;
1150 	hfrontp = mode->hsync_start - mode->hdisplay;
1151 	hblanks = mode->htotal - hfrontp - 1;
1152 
1153 	vactive = mode->vtotal * vscan / ilace;
1154 	vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1155 	vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1156 	vblanke = vsynce + vbackp;
1157 	vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1158 	vblanks = vactive - vfrontp - 1;
1159 	/* XXX: Safe underestimate, even "0" works */
1160 	vblankus = (vactive - mode->vdisplay - 2) * hactive;
1161 	vblankus *= 1000;
1162 	vblankus /= mode->clock;
1163 
1164 	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1165 		vblan2e = vactive + vsynce + vbackp;
1166 		vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1167 		vactive = (vactive * 2) + 1;
1168 	}
1169 
1170 	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1171 	if (ret)
1172 		return ret;
1173 
1174 	push = evo_wait(mast, 64);
1175 	if (push) {
1176 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1177 			evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1178 			evo_data(push, 0x00800000 | mode->clock);
1179 			evo_data(push, (ilace == 2) ? 2 : 0);
1180 			evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1181 			evo_data(push, 0x00000000);
1182 			evo_data(push, (vactive << 16) | hactive);
1183 			evo_data(push, ( vsynce << 16) | hsynce);
1184 			evo_data(push, (vblanke << 16) | hblanke);
1185 			evo_data(push, (vblanks << 16) | hblanks);
1186 			evo_data(push, (vblan2e << 16) | vblan2s);
1187 			evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1188 			evo_data(push, 0x00000000);
1189 			evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1190 			evo_data(push, 0x00000311);
1191 			evo_data(push, 0x00000100);
1192 		} else {
1193 			evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1194 			evo_data(push, 0x00000000);
1195 			evo_data(push, (vactive << 16) | hactive);
1196 			evo_data(push, ( vsynce << 16) | hsynce);
1197 			evo_data(push, (vblanke << 16) | hblanke);
1198 			evo_data(push, (vblanks << 16) | hblanks);
1199 			evo_data(push, (vblan2e << 16) | vblan2s);
1200 			evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1201 			evo_data(push, 0x00000000); /* ??? */
1202 			evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1203 			evo_data(push, mode->clock * 1000);
1204 			evo_data(push, 0x00200000); /* ??? */
1205 			evo_data(push, mode->clock * 1000);
1206 			evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1207 			evo_data(push, 0x00000311);
1208 			evo_data(push, 0x00000100);
1209 		}
1210 
1211 		evo_kick(push, mast);
1212 	}
1213 
1214 	nv_connector = nouveau_crtc_connector_get(nv_crtc);
1215 	nv50_crtc_set_dither(nv_crtc, false);
1216 	nv50_crtc_set_scale(nv_crtc, false);
1217 
1218 	/* G94 only accepts this after setting scale */
1219 	if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA)
1220 		nv50_crtc_set_raster_vblank_dmi(nv_crtc, vblankus);
1221 
1222 	nv50_crtc_set_color_vibrance(nv_crtc, false);
1223 	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
1224 	return 0;
1225 }
1226 
1227 static int
nv50_crtc_mode_set_base(struct drm_crtc * crtc,int x,int y,struct drm_framebuffer * old_fb)1228 nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1229 			struct drm_framebuffer *old_fb)
1230 {
1231 	struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1232 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1233 	int ret;
1234 
1235 	if (!crtc->primary->fb) {
1236 		NV_DEBUG(drm, "No FB bound\n");
1237 		return 0;
1238 	}
1239 
1240 	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1241 	if (ret)
1242 		return ret;
1243 
1244 	nv50_display_flip_stop(crtc);
1245 	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
1246 	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1247 	return 0;
1248 }
1249 
1250 static int
nv50_crtc_mode_set_base_atomic(struct drm_crtc * crtc,struct drm_framebuffer * fb,int x,int y,enum mode_set_atomic state)1251 nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1252 			       struct drm_framebuffer *fb, int x, int y,
1253 			       enum mode_set_atomic state)
1254 {
1255 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1256 	nv50_display_flip_stop(crtc);
1257 	nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1258 	return 0;
1259 }
1260 
1261 static void
nv50_crtc_lut_load(struct drm_crtc * crtc)1262 nv50_crtc_lut_load(struct drm_crtc *crtc)
1263 {
1264 	struct nv50_disp *disp = nv50_disp(crtc->dev);
1265 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1266 	void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1267 	int i;
1268 
1269 	for (i = 0; i < 256; i++) {
1270 		u16 r = nv_crtc->lut.r[i] >> 2;
1271 		u16 g = nv_crtc->lut.g[i] >> 2;
1272 		u16 b = nv_crtc->lut.b[i] >> 2;
1273 
1274 		if (disp->disp->oclass < GF110_DISP) {
1275 			writew(r + 0x0000, lut + (i * 0x08) + 0);
1276 			writew(g + 0x0000, lut + (i * 0x08) + 2);
1277 			writew(b + 0x0000, lut + (i * 0x08) + 4);
1278 		} else {
1279 			writew(r + 0x6000, lut + (i * 0x20) + 0);
1280 			writew(g + 0x6000, lut + (i * 0x20) + 2);
1281 			writew(b + 0x6000, lut + (i * 0x20) + 4);
1282 		}
1283 	}
1284 }
1285 
1286 static void
nv50_crtc_disable(struct drm_crtc * crtc)1287 nv50_crtc_disable(struct drm_crtc *crtc)
1288 {
1289 	struct nv50_head *head = nv50_head(crtc);
1290 	evo_sync(crtc->dev);
1291 	if (head->image)
1292 		nouveau_bo_unpin(head->image);
1293 	nouveau_bo_ref(NULL, &head->image);
1294 }
1295 
1296 static int
nv50_crtc_cursor_set(struct drm_crtc * crtc,struct drm_file * file_priv,uint32_t handle,uint32_t width,uint32_t height)1297 nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1298 		     uint32_t handle, uint32_t width, uint32_t height)
1299 {
1300 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1301 	struct drm_device *dev = crtc->dev;
1302 	struct drm_gem_object *gem = NULL;
1303 	struct nouveau_bo *nvbo = NULL;
1304 	int ret = 0;
1305 
1306 	if (handle) {
1307 		if (width != 64 || height != 64)
1308 			return -EINVAL;
1309 
1310 		gem = drm_gem_object_lookup(dev, file_priv, handle);
1311 		if (unlikely(!gem))
1312 			return -ENOENT;
1313 		nvbo = nouveau_gem_object(gem);
1314 
1315 		ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, true);
1316 	}
1317 
1318 	if (ret == 0) {
1319 		if (nv_crtc->cursor.nvbo)
1320 			nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1321 		nouveau_bo_ref(nvbo, &nv_crtc->cursor.nvbo);
1322 	}
1323 	drm_gem_object_unreference_unlocked(gem);
1324 
1325 	nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1326 	return ret;
1327 }
1328 
1329 static int
nv50_crtc_cursor_move(struct drm_crtc * crtc,int x,int y)1330 nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1331 {
1332 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1333 	struct nv50_curs *curs = nv50_curs(crtc);
1334 	struct nv50_chan *chan = nv50_chan(curs);
1335 	nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff));
1336 	nvif_wr32(&chan->user, 0x0080, 0x00000000);
1337 
1338 	nv_crtc->cursor_saved_x = x;
1339 	nv_crtc->cursor_saved_y = y;
1340 	return 0;
1341 }
1342 
1343 static void
nv50_crtc_gamma_set(struct drm_crtc * crtc,u16 * r,u16 * g,u16 * b,uint32_t start,uint32_t size)1344 nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1345 		    uint32_t start, uint32_t size)
1346 {
1347 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1348 	u32 end = min_t(u32, start + size, 256);
1349 	u32 i;
1350 
1351 	for (i = start; i < end; i++) {
1352 		nv_crtc->lut.r[i] = r[i];
1353 		nv_crtc->lut.g[i] = g[i];
1354 		nv_crtc->lut.b[i] = b[i];
1355 	}
1356 
1357 	nv50_crtc_lut_load(crtc);
1358 }
1359 
1360 static void
nv50_crtc_cursor_restore(struct nouveau_crtc * nv_crtc,int x,int y)1361 nv50_crtc_cursor_restore(struct nouveau_crtc *nv_crtc, int x, int y)
1362 {
1363 	nv50_crtc_cursor_move(&nv_crtc->base, x, y);
1364 
1365 	nv50_crtc_cursor_show_hide(nv_crtc, true, true);
1366 }
1367 
1368 static void
nv50_crtc_destroy(struct drm_crtc * crtc)1369 nv50_crtc_destroy(struct drm_crtc *crtc)
1370 {
1371 	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1372 	struct nv50_disp *disp = nv50_disp(crtc->dev);
1373 	struct nv50_head *head = nv50_head(crtc);
1374 	struct nv50_fbdma *fbdma;
1375 
1376 	list_for_each_entry(fbdma, &disp->fbdma, head) {
1377 		nvif_object_fini(&fbdma->base[nv_crtc->index]);
1378 	}
1379 
1380 	nv50_dmac_destroy(&head->ovly.base, disp->disp);
1381 	nv50_pioc_destroy(&head->oimm.base);
1382 	nv50_dmac_destroy(&head->sync.base, disp->disp);
1383 	nv50_pioc_destroy(&head->curs.base);
1384 
1385 	/*XXX: this shouldn't be necessary, but the core doesn't call
1386 	 *     disconnect() during the cleanup paths
1387 	 */
1388 	if (head->image)
1389 		nouveau_bo_unpin(head->image);
1390 	nouveau_bo_ref(NULL, &head->image);
1391 
1392 	/*XXX: ditto */
1393 	if (nv_crtc->cursor.nvbo)
1394 		nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1395 	nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1396 
1397 	nouveau_bo_unmap(nv_crtc->lut.nvbo);
1398 	if (nv_crtc->lut.nvbo)
1399 		nouveau_bo_unpin(nv_crtc->lut.nvbo);
1400 	nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1401 
1402 	drm_crtc_cleanup(crtc);
1403 	kfree(crtc);
1404 }
1405 
1406 static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1407 	.dpms = nv50_crtc_dpms,
1408 	.prepare = nv50_crtc_prepare,
1409 	.commit = nv50_crtc_commit,
1410 	.mode_fixup = nv50_crtc_mode_fixup,
1411 	.mode_set = nv50_crtc_mode_set,
1412 	.mode_set_base = nv50_crtc_mode_set_base,
1413 	.mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1414 	.load_lut = nv50_crtc_lut_load,
1415 	.disable = nv50_crtc_disable,
1416 };
1417 
1418 static const struct drm_crtc_funcs nv50_crtc_func = {
1419 	.cursor_set = nv50_crtc_cursor_set,
1420 	.cursor_move = nv50_crtc_cursor_move,
1421 	.gamma_set = nv50_crtc_gamma_set,
1422 	.set_config = nouveau_crtc_set_config,
1423 	.destroy = nv50_crtc_destroy,
1424 	.page_flip = nouveau_crtc_page_flip,
1425 };
1426 
1427 static int
nv50_crtc_create(struct drm_device * dev,int index)1428 nv50_crtc_create(struct drm_device *dev, int index)
1429 {
1430 	struct nouveau_drm *drm = nouveau_drm(dev);
1431 	struct nvif_device *device = &drm->device;
1432 	struct nv50_disp *disp = nv50_disp(dev);
1433 	struct nv50_head *head;
1434 	struct drm_crtc *crtc;
1435 	int ret, i;
1436 
1437 	head = kzalloc(sizeof(*head), GFP_KERNEL);
1438 	if (!head)
1439 		return -ENOMEM;
1440 
1441 	head->base.index = index;
1442 	head->base.set_dither = nv50_crtc_set_dither;
1443 	head->base.set_scale = nv50_crtc_set_scale;
1444 	head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
1445 	head->base.color_vibrance = 50;
1446 	head->base.vibrant_hue = 0;
1447 	head->base.cursor.set_pos = nv50_crtc_cursor_restore;
1448 	for (i = 0; i < 256; i++) {
1449 		head->base.lut.r[i] = i << 8;
1450 		head->base.lut.g[i] = i << 8;
1451 		head->base.lut.b[i] = i << 8;
1452 	}
1453 
1454 	crtc = &head->base.base;
1455 	drm_crtc_init(dev, crtc, &nv50_crtc_func);
1456 	drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
1457 	drm_mode_crtc_set_gamma_size(crtc, 256);
1458 
1459 	ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1460 			     0, 0x0000, NULL, NULL, &head->base.lut.nvbo);
1461 	if (!ret) {
1462 		ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true);
1463 		if (!ret) {
1464 			ret = nouveau_bo_map(head->base.lut.nvbo);
1465 			if (ret)
1466 				nouveau_bo_unpin(head->base.lut.nvbo);
1467 		}
1468 		if (ret)
1469 			nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1470 	}
1471 
1472 	if (ret)
1473 		goto out;
1474 
1475 	/* allocate cursor resources */
1476 	ret = nv50_curs_create(device, disp->disp, index, &head->curs);
1477 	if (ret)
1478 		goto out;
1479 
1480 	/* allocate page flip / sync resources */
1481 	ret = nv50_base_create(device, disp->disp, index, disp->sync->bo.offset,
1482 			       &head->sync);
1483 	if (ret)
1484 		goto out;
1485 
1486 	head->sync.addr = EVO_FLIP_SEM0(index);
1487 	head->sync.data = 0x00000000;
1488 
1489 	/* allocate overlay resources */
1490 	ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
1491 	if (ret)
1492 		goto out;
1493 
1494 	ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
1495 			       &head->ovly);
1496 	if (ret)
1497 		goto out;
1498 
1499 out:
1500 	if (ret)
1501 		nv50_crtc_destroy(crtc);
1502 	return ret;
1503 }
1504 
1505 /******************************************************************************
1506  * Encoder helpers
1507  *****************************************************************************/
1508 static bool
nv50_encoder_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)1509 nv50_encoder_mode_fixup(struct drm_encoder *encoder,
1510 			const struct drm_display_mode *mode,
1511 			struct drm_display_mode *adjusted_mode)
1512 {
1513 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1514 	struct nouveau_connector *nv_connector;
1515 
1516 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1517 	if (nv_connector && nv_connector->native_mode) {
1518 		nv_connector->scaling_full = false;
1519 		if (nv_connector->scaling_mode == DRM_MODE_SCALE_NONE) {
1520 			switch (nv_connector->type) {
1521 			case DCB_CONNECTOR_LVDS:
1522 			case DCB_CONNECTOR_LVDS_SPWG:
1523 			case DCB_CONNECTOR_eDP:
1524 				/* force use of scaler for non-edid modes */
1525 				if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
1526 					return true;
1527 				nv_connector->scaling_full = true;
1528 				break;
1529 			default:
1530 				return true;
1531 			}
1532 		}
1533 
1534 		drm_mode_copy(adjusted_mode, nv_connector->native_mode);
1535 	}
1536 
1537 	return true;
1538 }
1539 
1540 /******************************************************************************
1541  * DAC
1542  *****************************************************************************/
1543 static void
nv50_dac_dpms(struct drm_encoder * encoder,int mode)1544 nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1545 {
1546 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1547 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1548 	struct {
1549 		struct nv50_disp_mthd_v1 base;
1550 		struct nv50_disp_dac_pwr_v0 pwr;
1551 	} args = {
1552 		.base.version = 1,
1553 		.base.method = NV50_DISP_MTHD_V1_DAC_PWR,
1554 		.base.hasht  = nv_encoder->dcb->hasht,
1555 		.base.hashm  = nv_encoder->dcb->hashm,
1556 		.pwr.state = 1,
1557 		.pwr.data  = 1,
1558 		.pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
1559 			      mode != DRM_MODE_DPMS_OFF),
1560 		.pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
1561 			      mode != DRM_MODE_DPMS_OFF),
1562 	};
1563 
1564 	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1565 }
1566 
1567 static void
nv50_dac_commit(struct drm_encoder * encoder)1568 nv50_dac_commit(struct drm_encoder *encoder)
1569 {
1570 }
1571 
1572 static void
nv50_dac_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)1573 nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1574 		  struct drm_display_mode *adjusted_mode)
1575 {
1576 	struct nv50_mast *mast = nv50_mast(encoder->dev);
1577 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1578 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1579 	u32 *push;
1580 
1581 	nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1582 
1583 	push = evo_wait(mast, 8);
1584 	if (push) {
1585 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1586 			u32 syncs = 0x00000000;
1587 
1588 			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1589 				syncs |= 0x00000001;
1590 			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1591 				syncs |= 0x00000002;
1592 
1593 			evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1594 			evo_data(push, 1 << nv_crtc->index);
1595 			evo_data(push, syncs);
1596 		} else {
1597 			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1598 			u32 syncs = 0x00000001;
1599 
1600 			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1601 				syncs |= 0x00000008;
1602 			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1603 				syncs |= 0x00000010;
1604 
1605 			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1606 				magic |= 0x00000001;
1607 
1608 			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1609 			evo_data(push, syncs);
1610 			evo_data(push, magic);
1611 			evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1612 			evo_data(push, 1 << nv_crtc->index);
1613 		}
1614 
1615 		evo_kick(push, mast);
1616 	}
1617 
1618 	nv_encoder->crtc = encoder->crtc;
1619 }
1620 
1621 static void
nv50_dac_disconnect(struct drm_encoder * encoder)1622 nv50_dac_disconnect(struct drm_encoder *encoder)
1623 {
1624 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1625 	struct nv50_mast *mast = nv50_mast(encoder->dev);
1626 	const int or = nv_encoder->or;
1627 	u32 *push;
1628 
1629 	if (nv_encoder->crtc) {
1630 		nv50_crtc_prepare(nv_encoder->crtc);
1631 
1632 		push = evo_wait(mast, 4);
1633 		if (push) {
1634 			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1635 				evo_mthd(push, 0x0400 + (or * 0x080), 1);
1636 				evo_data(push, 0x00000000);
1637 			} else {
1638 				evo_mthd(push, 0x0180 + (or * 0x020), 1);
1639 				evo_data(push, 0x00000000);
1640 			}
1641 			evo_kick(push, mast);
1642 		}
1643 	}
1644 
1645 	nv_encoder->crtc = NULL;
1646 }
1647 
1648 static enum drm_connector_status
nv50_dac_detect(struct drm_encoder * encoder,struct drm_connector * connector)1649 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1650 {
1651 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1652 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1653 	struct {
1654 		struct nv50_disp_mthd_v1 base;
1655 		struct nv50_disp_dac_load_v0 load;
1656 	} args = {
1657 		.base.version = 1,
1658 		.base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
1659 		.base.hasht  = nv_encoder->dcb->hasht,
1660 		.base.hashm  = nv_encoder->dcb->hashm,
1661 	};
1662 	int ret;
1663 
1664 	args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
1665 	if (args.load.data == 0)
1666 		args.load.data = 340;
1667 
1668 	ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
1669 	if (ret || !args.load.load)
1670 		return connector_status_disconnected;
1671 
1672 	return connector_status_connected;
1673 }
1674 
1675 static void
nv50_dac_destroy(struct drm_encoder * encoder)1676 nv50_dac_destroy(struct drm_encoder *encoder)
1677 {
1678 	drm_encoder_cleanup(encoder);
1679 	kfree(encoder);
1680 }
1681 
1682 static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1683 	.dpms = nv50_dac_dpms,
1684 	.mode_fixup = nv50_encoder_mode_fixup,
1685 	.prepare = nv50_dac_disconnect,
1686 	.commit = nv50_dac_commit,
1687 	.mode_set = nv50_dac_mode_set,
1688 	.disable = nv50_dac_disconnect,
1689 	.get_crtc = nv50_display_crtc_get,
1690 	.detect = nv50_dac_detect
1691 };
1692 
1693 static const struct drm_encoder_funcs nv50_dac_func = {
1694 	.destroy = nv50_dac_destroy,
1695 };
1696 
1697 static int
nv50_dac_create(struct drm_connector * connector,struct dcb_output * dcbe)1698 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1699 {
1700 	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1701 	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
1702 	struct nvkm_i2c_bus *bus;
1703 	struct nouveau_encoder *nv_encoder;
1704 	struct drm_encoder *encoder;
1705 	int type = DRM_MODE_ENCODER_DAC;
1706 
1707 	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1708 	if (!nv_encoder)
1709 		return -ENOMEM;
1710 	nv_encoder->dcb = dcbe;
1711 	nv_encoder->or = ffs(dcbe->or) - 1;
1712 
1713 	bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1714 	if (bus)
1715 		nv_encoder->i2c = &bus->i2c;
1716 
1717 	encoder = to_drm_encoder(nv_encoder);
1718 	encoder->possible_crtcs = dcbe->heads;
1719 	encoder->possible_clones = 0;
1720 	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type);
1721 	drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
1722 
1723 	drm_mode_connector_attach_encoder(connector, encoder);
1724 	return 0;
1725 }
1726 
1727 /******************************************************************************
1728  * Audio
1729  *****************************************************************************/
1730 static void
nv50_audio_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode)1731 nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1732 {
1733 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1734 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1735 	struct nouveau_connector *nv_connector;
1736 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1737 	struct __packed {
1738 		struct {
1739 			struct nv50_disp_mthd_v1 mthd;
1740 			struct nv50_disp_sor_hda_eld_v0 eld;
1741 		} base;
1742 		u8 data[sizeof(nv_connector->base.eld)];
1743 	} args = {
1744 		.base.mthd.version = 1,
1745 		.base.mthd.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1746 		.base.mthd.hasht   = nv_encoder->dcb->hasht,
1747 		.base.mthd.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
1748 				     (0x0100 << nv_crtc->index),
1749 	};
1750 
1751 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1752 	if (!drm_detect_monitor_audio(nv_connector->edid))
1753 		return;
1754 
1755 	drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1756 	memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
1757 
1758 	nvif_mthd(disp->disp, 0, &args,
1759 		  sizeof(args.base) + drm_eld_size(args.data));
1760 }
1761 
1762 static void
nv50_audio_disconnect(struct drm_encoder * encoder,struct nouveau_crtc * nv_crtc)1763 nv50_audio_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1764 {
1765 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1766 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1767 	struct {
1768 		struct nv50_disp_mthd_v1 base;
1769 		struct nv50_disp_sor_hda_eld_v0 eld;
1770 	} args = {
1771 		.base.version = 1,
1772 		.base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1773 		.base.hasht   = nv_encoder->dcb->hasht,
1774 		.base.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
1775 				(0x0100 << nv_crtc->index),
1776 	};
1777 
1778 	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1779 }
1780 
1781 /******************************************************************************
1782  * HDMI
1783  *****************************************************************************/
1784 static void
nv50_hdmi_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode)1785 nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1786 {
1787 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1788 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1789 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1790 	struct {
1791 		struct nv50_disp_mthd_v1 base;
1792 		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
1793 	} args = {
1794 		.base.version = 1,
1795 		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
1796 		.base.hasht  = nv_encoder->dcb->hasht,
1797 		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
1798 			       (0x0100 << nv_crtc->index),
1799 		.pwr.state = 1,
1800 		.pwr.rekey = 56, /* binary driver, and tegra, constant */
1801 	};
1802 	struct nouveau_connector *nv_connector;
1803 	u32 max_ac_packet;
1804 
1805 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1806 	if (!drm_detect_hdmi_monitor(nv_connector->edid))
1807 		return;
1808 
1809 	max_ac_packet  = mode->htotal - mode->hdisplay;
1810 	max_ac_packet -= args.pwr.rekey;
1811 	max_ac_packet -= 18; /* constant from tegra */
1812 	args.pwr.max_ac_packet = max_ac_packet / 32;
1813 
1814 	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1815 	nv50_audio_mode_set(encoder, mode);
1816 }
1817 
1818 static void
nv50_hdmi_disconnect(struct drm_encoder * encoder,struct nouveau_crtc * nv_crtc)1819 nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1820 {
1821 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1822 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1823 	struct {
1824 		struct nv50_disp_mthd_v1 base;
1825 		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
1826 	} args = {
1827 		.base.version = 1,
1828 		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
1829 		.base.hasht  = nv_encoder->dcb->hasht,
1830 		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
1831 			       (0x0100 << nv_crtc->index),
1832 	};
1833 
1834 	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1835 }
1836 
1837 /******************************************************************************
1838  * SOR
1839  *****************************************************************************/
1840 static void
nv50_sor_dpms(struct drm_encoder * encoder,int mode)1841 nv50_sor_dpms(struct drm_encoder *encoder, int mode)
1842 {
1843 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1844 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1845 	struct {
1846 		struct nv50_disp_mthd_v1 base;
1847 		struct nv50_disp_sor_pwr_v0 pwr;
1848 	} args = {
1849 		.base.version = 1,
1850 		.base.method = NV50_DISP_MTHD_V1_SOR_PWR,
1851 		.base.hasht  = nv_encoder->dcb->hasht,
1852 		.base.hashm  = nv_encoder->dcb->hashm,
1853 		.pwr.state = mode == DRM_MODE_DPMS_ON,
1854 	};
1855 	struct {
1856 		struct nv50_disp_mthd_v1 base;
1857 		struct nv50_disp_sor_dp_pwr_v0 pwr;
1858 	} link = {
1859 		.base.version = 1,
1860 		.base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR,
1861 		.base.hasht  = nv_encoder->dcb->hasht,
1862 		.base.hashm  = nv_encoder->dcb->hashm,
1863 		.pwr.state = mode == DRM_MODE_DPMS_ON,
1864 	};
1865 	struct drm_device *dev = encoder->dev;
1866 	struct drm_encoder *partner;
1867 
1868 	nv_encoder->last_dpms = mode;
1869 
1870 	list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1871 		struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1872 
1873 		if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1874 			continue;
1875 
1876 		if (nv_partner != nv_encoder &&
1877 		    nv_partner->dcb->or == nv_encoder->dcb->or) {
1878 			if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1879 				return;
1880 			break;
1881 		}
1882 	}
1883 
1884 	if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1885 		args.pwr.state = 1;
1886 		nvif_mthd(disp->disp, 0, &args, sizeof(args));
1887 		nvif_mthd(disp->disp, 0, &link, sizeof(link));
1888 	} else {
1889 		nvif_mthd(disp->disp, 0, &args, sizeof(args));
1890 	}
1891 }
1892 
1893 static void
nv50_sor_ctrl(struct nouveau_encoder * nv_encoder,u32 mask,u32 data)1894 nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
1895 {
1896 	struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
1897 	u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
1898 	if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
1899 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
1900 			evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
1901 			evo_data(push, (nv_encoder->ctrl = temp));
1902 		} else {
1903 			evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1904 			evo_data(push, (nv_encoder->ctrl = temp));
1905 		}
1906 		evo_kick(push, mast);
1907 	}
1908 }
1909 
1910 static void
nv50_sor_disconnect(struct drm_encoder * encoder)1911 nv50_sor_disconnect(struct drm_encoder *encoder)
1912 {
1913 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1914 	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1915 
1916 	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1917 	nv_encoder->crtc = NULL;
1918 
1919 	if (nv_crtc) {
1920 		nv50_crtc_prepare(&nv_crtc->base);
1921 		nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
1922 		nv50_audio_disconnect(encoder, nv_crtc);
1923 		nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
1924 	}
1925 }
1926 
1927 static void
nv50_sor_commit(struct drm_encoder * encoder)1928 nv50_sor_commit(struct drm_encoder *encoder)
1929 {
1930 }
1931 
1932 static void
nv50_sor_mode_set(struct drm_encoder * encoder,struct drm_display_mode * umode,struct drm_display_mode * mode)1933 nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1934 		  struct drm_display_mode *mode)
1935 {
1936 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1937 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1938 	struct {
1939 		struct nv50_disp_mthd_v1 base;
1940 		struct nv50_disp_sor_lvds_script_v0 lvds;
1941 	} lvds = {
1942 		.base.version = 1,
1943 		.base.method  = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
1944 		.base.hasht   = nv_encoder->dcb->hasht,
1945 		.base.hashm   = nv_encoder->dcb->hashm,
1946 	};
1947 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1948 	struct nv50_mast *mast = nv50_mast(encoder->dev);
1949 	struct drm_device *dev = encoder->dev;
1950 	struct nouveau_drm *drm = nouveau_drm(dev);
1951 	struct nouveau_connector *nv_connector;
1952 	struct nvbios *bios = &drm->vbios;
1953 	u32 mask, ctrl;
1954 	u8 owner = 1 << nv_crtc->index;
1955 	u8 proto = 0xf;
1956 	u8 depth = 0x0;
1957 
1958 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1959 	nv_encoder->crtc = encoder->crtc;
1960 
1961 	switch (nv_encoder->dcb->type) {
1962 	case DCB_OUTPUT_TMDS:
1963 		if (nv_encoder->dcb->sorconf.link & 1) {
1964 			if (mode->clock < 165000)
1965 				proto = 0x1;
1966 			else
1967 				proto = 0x5;
1968 		} else {
1969 			proto = 0x2;
1970 		}
1971 
1972 		nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
1973 		break;
1974 	case DCB_OUTPUT_LVDS:
1975 		proto = 0x0;
1976 
1977 		if (bios->fp_no_ddc) {
1978 			if (bios->fp.dual_link)
1979 				lvds.lvds.script |= 0x0100;
1980 			if (bios->fp.if_is_24bit)
1981 				lvds.lvds.script |= 0x0200;
1982 		} else {
1983 			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1984 				if (((u8 *)nv_connector->edid)[121] == 2)
1985 					lvds.lvds.script |= 0x0100;
1986 			} else
1987 			if (mode->clock >= bios->fp.duallink_transition_clk) {
1988 				lvds.lvds.script |= 0x0100;
1989 			}
1990 
1991 			if (lvds.lvds.script & 0x0100) {
1992 				if (bios->fp.strapless_is_24bit & 2)
1993 					lvds.lvds.script |= 0x0200;
1994 			} else {
1995 				if (bios->fp.strapless_is_24bit & 1)
1996 					lvds.lvds.script |= 0x0200;
1997 			}
1998 
1999 			if (nv_connector->base.display_info.bpc == 8)
2000 				lvds.lvds.script |= 0x0200;
2001 		}
2002 
2003 		nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
2004 		break;
2005 	case DCB_OUTPUT_DP:
2006 		if (nv_connector->base.display_info.bpc == 6) {
2007 			nv_encoder->dp.datarate = mode->clock * 18 / 8;
2008 			depth = 0x2;
2009 		} else
2010 		if (nv_connector->base.display_info.bpc == 8) {
2011 			nv_encoder->dp.datarate = mode->clock * 24 / 8;
2012 			depth = 0x5;
2013 		} else {
2014 			nv_encoder->dp.datarate = mode->clock * 30 / 8;
2015 			depth = 0x6;
2016 		}
2017 
2018 		if (nv_encoder->dcb->sorconf.link & 1)
2019 			proto = 0x8;
2020 		else
2021 			proto = 0x9;
2022 		nv50_audio_mode_set(encoder, mode);
2023 		break;
2024 	default:
2025 		BUG_ON(1);
2026 		break;
2027 	}
2028 
2029 	nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
2030 
2031 	if (nv50_vers(mast) >= GF110_DISP) {
2032 		u32 *push = evo_wait(mast, 3);
2033 		if (push) {
2034 			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2035 			u32 syncs = 0x00000001;
2036 
2037 			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2038 				syncs |= 0x00000008;
2039 			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2040 				syncs |= 0x00000010;
2041 
2042 			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2043 				magic |= 0x00000001;
2044 
2045 			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2046 			evo_data(push, syncs | (depth << 6));
2047 			evo_data(push, magic);
2048 			evo_kick(push, mast);
2049 		}
2050 
2051 		ctrl = proto << 8;
2052 		mask = 0x00000f00;
2053 	} else {
2054 		ctrl = (depth << 16) | (proto << 8);
2055 		if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2056 			ctrl |= 0x00001000;
2057 		if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2058 			ctrl |= 0x00002000;
2059 		mask = 0x000f3f00;
2060 	}
2061 
2062 	nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
2063 }
2064 
2065 static void
nv50_sor_destroy(struct drm_encoder * encoder)2066 nv50_sor_destroy(struct drm_encoder *encoder)
2067 {
2068 	drm_encoder_cleanup(encoder);
2069 	kfree(encoder);
2070 }
2071 
2072 static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
2073 	.dpms = nv50_sor_dpms,
2074 	.mode_fixup = nv50_encoder_mode_fixup,
2075 	.prepare = nv50_sor_disconnect,
2076 	.commit = nv50_sor_commit,
2077 	.mode_set = nv50_sor_mode_set,
2078 	.disable = nv50_sor_disconnect,
2079 	.get_crtc = nv50_display_crtc_get,
2080 };
2081 
2082 static const struct drm_encoder_funcs nv50_sor_func = {
2083 	.destroy = nv50_sor_destroy,
2084 };
2085 
2086 static int
nv50_sor_create(struct drm_connector * connector,struct dcb_output * dcbe)2087 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
2088 {
2089 	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2090 	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2091 	struct nouveau_encoder *nv_encoder;
2092 	struct drm_encoder *encoder;
2093 	int type;
2094 
2095 	switch (dcbe->type) {
2096 	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
2097 	case DCB_OUTPUT_TMDS:
2098 	case DCB_OUTPUT_DP:
2099 	default:
2100 		type = DRM_MODE_ENCODER_TMDS;
2101 		break;
2102 	}
2103 
2104 	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2105 	if (!nv_encoder)
2106 		return -ENOMEM;
2107 	nv_encoder->dcb = dcbe;
2108 	nv_encoder->or = ffs(dcbe->or) - 1;
2109 	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
2110 
2111 	if (dcbe->type == DCB_OUTPUT_DP) {
2112 		struct nvkm_i2c_aux *aux =
2113 			nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
2114 		if (aux) {
2115 			nv_encoder->i2c = &aux->i2c;
2116 			nv_encoder->aux = aux;
2117 		}
2118 	} else {
2119 		struct nvkm_i2c_bus *bus =
2120 			nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2121 		if (bus)
2122 			nv_encoder->i2c = &bus->i2c;
2123 	}
2124 
2125 	encoder = to_drm_encoder(nv_encoder);
2126 	encoder->possible_crtcs = dcbe->heads;
2127 	encoder->possible_clones = 0;
2128 	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type);
2129 	drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
2130 
2131 	drm_mode_connector_attach_encoder(connector, encoder);
2132 	return 0;
2133 }
2134 
2135 /******************************************************************************
2136  * PIOR
2137  *****************************************************************************/
2138 
2139 static void
nv50_pior_dpms(struct drm_encoder * encoder,int mode)2140 nv50_pior_dpms(struct drm_encoder *encoder, int mode)
2141 {
2142 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2143 	struct nv50_disp *disp = nv50_disp(encoder->dev);
2144 	struct {
2145 		struct nv50_disp_mthd_v1 base;
2146 		struct nv50_disp_pior_pwr_v0 pwr;
2147 	} args = {
2148 		.base.version = 1,
2149 		.base.method = NV50_DISP_MTHD_V1_PIOR_PWR,
2150 		.base.hasht  = nv_encoder->dcb->hasht,
2151 		.base.hashm  = nv_encoder->dcb->hashm,
2152 		.pwr.state = mode == DRM_MODE_DPMS_ON,
2153 		.pwr.type = nv_encoder->dcb->type,
2154 	};
2155 
2156 	nvif_mthd(disp->disp, 0, &args, sizeof(args));
2157 }
2158 
2159 static bool
nv50_pior_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)2160 nv50_pior_mode_fixup(struct drm_encoder *encoder,
2161 		     const struct drm_display_mode *mode,
2162 		     struct drm_display_mode *adjusted_mode)
2163 {
2164 	if (!nv50_encoder_mode_fixup(encoder, mode, adjusted_mode))
2165 		return false;
2166 	adjusted_mode->clock *= 2;
2167 	return true;
2168 }
2169 
2170 static void
nv50_pior_commit(struct drm_encoder * encoder)2171 nv50_pior_commit(struct drm_encoder *encoder)
2172 {
2173 }
2174 
2175 static void
nv50_pior_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)2176 nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2177 		   struct drm_display_mode *adjusted_mode)
2178 {
2179 	struct nv50_mast *mast = nv50_mast(encoder->dev);
2180 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2181 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2182 	struct nouveau_connector *nv_connector;
2183 	u8 owner = 1 << nv_crtc->index;
2184 	u8 proto, depth;
2185 	u32 *push;
2186 
2187 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2188 	switch (nv_connector->base.display_info.bpc) {
2189 	case 10: depth = 0x6; break;
2190 	case  8: depth = 0x5; break;
2191 	case  6: depth = 0x2; break;
2192 	default: depth = 0x0; break;
2193 	}
2194 
2195 	switch (nv_encoder->dcb->type) {
2196 	case DCB_OUTPUT_TMDS:
2197 	case DCB_OUTPUT_DP:
2198 		proto = 0x0;
2199 		break;
2200 	default:
2201 		BUG_ON(1);
2202 		break;
2203 	}
2204 
2205 	nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
2206 
2207 	push = evo_wait(mast, 8);
2208 	if (push) {
2209 		if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2210 			u32 ctrl = (depth << 16) | (proto << 8) | owner;
2211 			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2212 				ctrl |= 0x00001000;
2213 			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2214 				ctrl |= 0x00002000;
2215 			evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
2216 			evo_data(push, ctrl);
2217 		}
2218 
2219 		evo_kick(push, mast);
2220 	}
2221 
2222 	nv_encoder->crtc = encoder->crtc;
2223 }
2224 
2225 static void
nv50_pior_disconnect(struct drm_encoder * encoder)2226 nv50_pior_disconnect(struct drm_encoder *encoder)
2227 {
2228 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2229 	struct nv50_mast *mast = nv50_mast(encoder->dev);
2230 	const int or = nv_encoder->or;
2231 	u32 *push;
2232 
2233 	if (nv_encoder->crtc) {
2234 		nv50_crtc_prepare(nv_encoder->crtc);
2235 
2236 		push = evo_wait(mast, 4);
2237 		if (push) {
2238 			if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2239 				evo_mthd(push, 0x0700 + (or * 0x040), 1);
2240 				evo_data(push, 0x00000000);
2241 			}
2242 			evo_kick(push, mast);
2243 		}
2244 	}
2245 
2246 	nv_encoder->crtc = NULL;
2247 }
2248 
2249 static void
nv50_pior_destroy(struct drm_encoder * encoder)2250 nv50_pior_destroy(struct drm_encoder *encoder)
2251 {
2252 	drm_encoder_cleanup(encoder);
2253 	kfree(encoder);
2254 }
2255 
2256 static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
2257 	.dpms = nv50_pior_dpms,
2258 	.mode_fixup = nv50_pior_mode_fixup,
2259 	.prepare = nv50_pior_disconnect,
2260 	.commit = nv50_pior_commit,
2261 	.mode_set = nv50_pior_mode_set,
2262 	.disable = nv50_pior_disconnect,
2263 	.get_crtc = nv50_display_crtc_get,
2264 };
2265 
2266 static const struct drm_encoder_funcs nv50_pior_func = {
2267 	.destroy = nv50_pior_destroy,
2268 };
2269 
2270 static int
nv50_pior_create(struct drm_connector * connector,struct dcb_output * dcbe)2271 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2272 {
2273 	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2274 	struct nvkm_i2c *i2c = nvxx_i2c(&drm->device);
2275 	struct nvkm_i2c_bus *bus = NULL;
2276 	struct nvkm_i2c_aux *aux = NULL;
2277 	struct i2c_adapter *ddc;
2278 	struct nouveau_encoder *nv_encoder;
2279 	struct drm_encoder *encoder;
2280 	int type;
2281 
2282 	switch (dcbe->type) {
2283 	case DCB_OUTPUT_TMDS:
2284 		bus  = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
2285 		ddc  = bus ? &bus->i2c : NULL;
2286 		type = DRM_MODE_ENCODER_TMDS;
2287 		break;
2288 	case DCB_OUTPUT_DP:
2289 		aux  = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
2290 		ddc  = aux ? &aux->i2c : NULL;
2291 		type = DRM_MODE_ENCODER_TMDS;
2292 		break;
2293 	default:
2294 		return -ENODEV;
2295 	}
2296 
2297 	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2298 	if (!nv_encoder)
2299 		return -ENOMEM;
2300 	nv_encoder->dcb = dcbe;
2301 	nv_encoder->or = ffs(dcbe->or) - 1;
2302 	nv_encoder->i2c = ddc;
2303 	nv_encoder->aux = aux;
2304 
2305 	encoder = to_drm_encoder(nv_encoder);
2306 	encoder->possible_crtcs = dcbe->heads;
2307 	encoder->possible_clones = 0;
2308 	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type);
2309 	drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
2310 
2311 	drm_mode_connector_attach_encoder(connector, encoder);
2312 	return 0;
2313 }
2314 
2315 /******************************************************************************
2316  * Framebuffer
2317  *****************************************************************************/
2318 
2319 static void
nv50_fbdma_fini(struct nv50_fbdma * fbdma)2320 nv50_fbdma_fini(struct nv50_fbdma *fbdma)
2321 {
2322 	int i;
2323 	for (i = 0; i < ARRAY_SIZE(fbdma->base); i++)
2324 		nvif_object_fini(&fbdma->base[i]);
2325 	nvif_object_fini(&fbdma->core);
2326 	list_del(&fbdma->head);
2327 	kfree(fbdma);
2328 }
2329 
2330 static int
nv50_fbdma_init(struct drm_device * dev,u32 name,u64 offset,u64 length,u8 kind)2331 nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind)
2332 {
2333 	struct nouveau_drm *drm = nouveau_drm(dev);
2334 	struct nv50_disp *disp = nv50_disp(dev);
2335 	struct nv50_mast *mast = nv50_mast(dev);
2336 	struct __attribute__ ((packed)) {
2337 		struct nv_dma_v0 base;
2338 		union {
2339 			struct nv50_dma_v0 nv50;
2340 			struct gf100_dma_v0 gf100;
2341 			struct gf119_dma_v0 gf119;
2342 		};
2343 	} args = {};
2344 	struct nv50_fbdma *fbdma;
2345 	struct drm_crtc *crtc;
2346 	u32 size = sizeof(args.base);
2347 	int ret;
2348 
2349 	list_for_each_entry(fbdma, &disp->fbdma, head) {
2350 		if (fbdma->core.handle == name)
2351 			return 0;
2352 	}
2353 
2354 	fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL);
2355 	if (!fbdma)
2356 		return -ENOMEM;
2357 	list_add(&fbdma->head, &disp->fbdma);
2358 
2359 	args.base.target = NV_DMA_V0_TARGET_VRAM;
2360 	args.base.access = NV_DMA_V0_ACCESS_RDWR;
2361 	args.base.start = offset;
2362 	args.base.limit = offset + length - 1;
2363 
2364 	if (drm->device.info.chipset < 0x80) {
2365 		args.nv50.part = NV50_DMA_V0_PART_256;
2366 		size += sizeof(args.nv50);
2367 	} else
2368 	if (drm->device.info.chipset < 0xc0) {
2369 		args.nv50.part = NV50_DMA_V0_PART_256;
2370 		args.nv50.kind = kind;
2371 		size += sizeof(args.nv50);
2372 	} else
2373 	if (drm->device.info.chipset < 0xd0) {
2374 		args.gf100.kind = kind;
2375 		size += sizeof(args.gf100);
2376 	} else {
2377 		args.gf119.page = GF119_DMA_V0_PAGE_LP;
2378 		args.gf119.kind = kind;
2379 		size += sizeof(args.gf119);
2380 	}
2381 
2382 	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2383 		struct nv50_head *head = nv50_head(crtc);
2384 		int ret = nvif_object_init(&head->sync.base.base.user, name,
2385 					   NV_DMA_IN_MEMORY, &args, size,
2386 					   &fbdma->base[head->base.index]);
2387 		if (ret) {
2388 			nv50_fbdma_fini(fbdma);
2389 			return ret;
2390 		}
2391 	}
2392 
2393 	ret = nvif_object_init(&mast->base.base.user, name, NV_DMA_IN_MEMORY,
2394 			       &args, size, &fbdma->core);
2395 	if (ret) {
2396 		nv50_fbdma_fini(fbdma);
2397 		return ret;
2398 	}
2399 
2400 	return 0;
2401 }
2402 
2403 static void
nv50_fb_dtor(struct drm_framebuffer * fb)2404 nv50_fb_dtor(struct drm_framebuffer *fb)
2405 {
2406 }
2407 
2408 static int
nv50_fb_ctor(struct drm_framebuffer * fb)2409 nv50_fb_ctor(struct drm_framebuffer *fb)
2410 {
2411 	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
2412 	struct nouveau_drm *drm = nouveau_drm(fb->dev);
2413 	struct nouveau_bo *nvbo = nv_fb->nvbo;
2414 	struct nv50_disp *disp = nv50_disp(fb->dev);
2415 	u8 kind = nouveau_bo_tile_layout(nvbo) >> 8;
2416 	u8 tile = nvbo->tile_mode;
2417 
2418 	if (drm->device.info.chipset >= 0xc0)
2419 		tile >>= 4; /* yep.. */
2420 
2421 	switch (fb->depth) {
2422 	case  8: nv_fb->r_format = 0x1e00; break;
2423 	case 15: nv_fb->r_format = 0xe900; break;
2424 	case 16: nv_fb->r_format = 0xe800; break;
2425 	case 24:
2426 	case 32: nv_fb->r_format = 0xcf00; break;
2427 	case 30: nv_fb->r_format = 0xd100; break;
2428 	default:
2429 		 NV_ERROR(drm, "unknown depth %d\n", fb->depth);
2430 		 return -EINVAL;
2431 	}
2432 
2433 	if (disp->disp->oclass < G82_DISP) {
2434 		nv_fb->r_pitch   = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2435 					    (fb->pitches[0] | 0x00100000);
2436 		nv_fb->r_format |= kind << 16;
2437 	} else
2438 	if (disp->disp->oclass < GF110_DISP) {
2439 		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2440 					   (fb->pitches[0] | 0x00100000);
2441 	} else {
2442 		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2443 					   (fb->pitches[0] | 0x01000000);
2444 	}
2445 	nv_fb->r_handle = 0xffff0000 | kind;
2446 
2447 	return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0,
2448 			       drm->device.info.ram_user, kind);
2449 }
2450 
2451 /******************************************************************************
2452  * Init
2453  *****************************************************************************/
2454 
2455 void
nv50_display_fini(struct drm_device * dev)2456 nv50_display_fini(struct drm_device *dev)
2457 {
2458 }
2459 
2460 int
nv50_display_init(struct drm_device * dev)2461 nv50_display_init(struct drm_device *dev)
2462 {
2463 	struct nv50_disp *disp = nv50_disp(dev);
2464 	struct drm_crtc *crtc;
2465 	u32 *push;
2466 
2467 	push = evo_wait(nv50_mast(dev), 32);
2468 	if (!push)
2469 		return -EBUSY;
2470 
2471 	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2472 		struct nv50_sync *sync = nv50_sync(crtc);
2473 
2474 		nv50_crtc_lut_load(crtc);
2475 		nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
2476 	}
2477 
2478 	evo_mthd(push, 0x0088, 1);
2479 	evo_data(push, nv50_mast(dev)->base.sync.handle);
2480 	evo_kick(push, nv50_mast(dev));
2481 	return 0;
2482 }
2483 
2484 void
nv50_display_destroy(struct drm_device * dev)2485 nv50_display_destroy(struct drm_device *dev)
2486 {
2487 	struct nv50_disp *disp = nv50_disp(dev);
2488 	struct nv50_fbdma *fbdma, *fbtmp;
2489 
2490 	list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) {
2491 		nv50_fbdma_fini(fbdma);
2492 	}
2493 
2494 	nv50_dmac_destroy(&disp->mast.base, disp->disp);
2495 
2496 	nouveau_bo_unmap(disp->sync);
2497 	if (disp->sync)
2498 		nouveau_bo_unpin(disp->sync);
2499 	nouveau_bo_ref(NULL, &disp->sync);
2500 
2501 	nouveau_display(dev)->priv = NULL;
2502 	kfree(disp);
2503 }
2504 
2505 int
nv50_display_create(struct drm_device * dev)2506 nv50_display_create(struct drm_device *dev)
2507 {
2508 	struct nvif_device *device = &nouveau_drm(dev)->device;
2509 	struct nouveau_drm *drm = nouveau_drm(dev);
2510 	struct dcb_table *dcb = &drm->vbios.dcb;
2511 	struct drm_connector *connector, *tmp;
2512 	struct nv50_disp *disp;
2513 	struct dcb_output *dcbe;
2514 	int crtcs, ret, i;
2515 
2516 	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2517 	if (!disp)
2518 		return -ENOMEM;
2519 	INIT_LIST_HEAD(&disp->fbdma);
2520 
2521 	nouveau_display(dev)->priv = disp;
2522 	nouveau_display(dev)->dtor = nv50_display_destroy;
2523 	nouveau_display(dev)->init = nv50_display_init;
2524 	nouveau_display(dev)->fini = nv50_display_fini;
2525 	nouveau_display(dev)->fb_ctor = nv50_fb_ctor;
2526 	nouveau_display(dev)->fb_dtor = nv50_fb_dtor;
2527 	disp->disp = &nouveau_display(dev)->disp;
2528 
2529 	/* small shared memory area we use for notifiers and semaphores */
2530 	ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2531 			     0, 0x0000, NULL, NULL, &disp->sync);
2532 	if (!ret) {
2533 		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
2534 		if (!ret) {
2535 			ret = nouveau_bo_map(disp->sync);
2536 			if (ret)
2537 				nouveau_bo_unpin(disp->sync);
2538 		}
2539 		if (ret)
2540 			nouveau_bo_ref(NULL, &disp->sync);
2541 	}
2542 
2543 	if (ret)
2544 		goto out;
2545 
2546 	/* allocate master evo channel */
2547 	ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
2548 			      &disp->mast);
2549 	if (ret)
2550 		goto out;
2551 
2552 	/* create crtc objects to represent the hw heads */
2553 	if (disp->disp->oclass >= GF110_DISP)
2554 		crtcs = nvif_rd32(&device->object, 0x022448);
2555 	else
2556 		crtcs = 2;
2557 
2558 	for (i = 0; i < crtcs; i++) {
2559 		ret = nv50_crtc_create(dev, i);
2560 		if (ret)
2561 			goto out;
2562 	}
2563 
2564 	/* create encoder/connector objects based on VBIOS DCB table */
2565 	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2566 		connector = nouveau_connector_create(dev, dcbe->connector);
2567 		if (IS_ERR(connector))
2568 			continue;
2569 
2570 		if (dcbe->location == DCB_LOC_ON_CHIP) {
2571 			switch (dcbe->type) {
2572 			case DCB_OUTPUT_TMDS:
2573 			case DCB_OUTPUT_LVDS:
2574 			case DCB_OUTPUT_DP:
2575 				ret = nv50_sor_create(connector, dcbe);
2576 				break;
2577 			case DCB_OUTPUT_ANALOG:
2578 				ret = nv50_dac_create(connector, dcbe);
2579 				break;
2580 			default:
2581 				ret = -ENODEV;
2582 				break;
2583 			}
2584 		} else {
2585 			ret = nv50_pior_create(connector, dcbe);
2586 		}
2587 
2588 		if (ret) {
2589 			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2590 				     dcbe->location, dcbe->type,
2591 				     ffs(dcbe->or) - 1, ret);
2592 			ret = 0;
2593 		}
2594 	}
2595 
2596 	/* cull any connectors we created that don't have an encoder */
2597 	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2598 		if (connector->encoder_ids[0])
2599 			continue;
2600 
2601 		NV_WARN(drm, "%s has no encoders, removing\n",
2602 			connector->name);
2603 		connector->funcs->destroy(connector);
2604 	}
2605 
2606 out:
2607 	if (ret)
2608 		nv50_display_destroy(dev);
2609 	return ret;
2610 }
2611