1/* 2 * Copyright 2011 Red Hat Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: Ben Skeggs 23 */ 24 25#include <linux/dma-mapping.h> 26 27#include <drm/drmP.h> 28#include <drm/drm_crtc_helper.h> 29#include <drm/drm_plane_helper.h> 30#include <drm/drm_dp_helper.h> 31 32#include <nvif/class.h> 33 34#include "nouveau_drm.h" 35#include "nouveau_dma.h" 36#include "nouveau_gem.h" 37#include "nouveau_connector.h" 38#include "nouveau_encoder.h" 39#include "nouveau_crtc.h" 40#include "nouveau_fence.h" 41#include "nv50_display.h" 42 43#define EVO_DMA_NR 9 44 45#define EVO_MASTER (0x00) 46#define EVO_FLIP(c) (0x01 + (c)) 47#define EVO_OVLY(c) (0x05 + (c)) 48#define EVO_OIMM(c) (0x09 + (c)) 49#define EVO_CURS(c) (0x0d + (c)) 50 51/* offsets in shared sync bo of various structures */ 52#define EVO_SYNC(c, o) ((c) * 0x0100 + (o)) 53#define EVO_MAST_NTFY EVO_SYNC( 0, 0x00) 54#define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00) 55#define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10) 56 57/****************************************************************************** 58 * EVO channel 59 *****************************************************************************/ 60 61struct nv50_chan { 62 struct nvif_object user; 63}; 64 65static int 66nv50_chan_create(struct nvif_object *disp, const u32 *oclass, u8 head, 67 void *data, u32 size, struct nv50_chan *chan) 68{ 69 const u32 handle = (oclass[0] << 16) | head; 70 u32 sclass[8]; 71 int ret, i; 72 73 ret = nvif_object_sclass(disp, sclass, ARRAY_SIZE(sclass)); 74 WARN_ON(ret > ARRAY_SIZE(sclass)); 75 if (ret < 0) 76 return ret; 77 78 while (oclass[0]) { 79 for (i = 0; i < ARRAY_SIZE(sclass); i++) { 80 if (sclass[i] == oclass[0]) { 81 ret = nvif_object_init(disp, NULL, handle, 82 oclass[0], data, size, 83 &chan->user); 84 if (ret == 0) 85 nvif_object_map(&chan->user); 86 return ret; 87 } 88 } 89 oclass++; 90 } 91 92 return -ENOSYS; 93} 94 95static void 96nv50_chan_destroy(struct nv50_chan *chan) 97{ 98 nvif_object_fini(&chan->user); 99} 100 101/****************************************************************************** 102 * PIO EVO channel 103 *****************************************************************************/ 104 105struct nv50_pioc { 106 struct nv50_chan base; 107}; 108 109static void 110nv50_pioc_destroy(struct nv50_pioc *pioc) 111{ 112 nv50_chan_destroy(&pioc->base); 113} 114 115static int 116nv50_pioc_create(struct nvif_object *disp, const u32 *oclass, u8 head, 117 void *data, u32 size, struct nv50_pioc *pioc) 118{ 119 return nv50_chan_create(disp, oclass, head, data, size, &pioc->base); 120} 121 122/****************************************************************************** 123 * Cursor Immediate 124 *****************************************************************************/ 125 126struct nv50_curs { 127 struct nv50_pioc base; 128}; 129 130static int 131nv50_curs_create(struct nvif_object *disp, int head, struct nv50_curs *curs) 132{ 133 struct nv50_disp_cursor_v0 args = { 134 .head = head, 135 }; 136 static const u32 oclass[] = { 137 GK104_DISP_CURSOR, 138 GF110_DISP_CURSOR, 139 GT214_DISP_CURSOR, 140 G82_DISP_CURSOR, 141 NV50_DISP_CURSOR, 142 0 143 }; 144 145 return nv50_pioc_create(disp, oclass, head, &args, sizeof(args), 146 &curs->base); 147} 148 149/****************************************************************************** 150 * Overlay Immediate 151 *****************************************************************************/ 152 153struct nv50_oimm { 154 struct nv50_pioc base; 155}; 156 157static int 158nv50_oimm_create(struct nvif_object *disp, int head, struct nv50_oimm *oimm) 159{ 160 struct nv50_disp_cursor_v0 args = { 161 .head = head, 162 }; 163 static const u32 oclass[] = { 164 GK104_DISP_OVERLAY, 165 GF110_DISP_OVERLAY, 166 GT214_DISP_OVERLAY, 167 G82_DISP_OVERLAY, 168 NV50_DISP_OVERLAY, 169 0 170 }; 171 172 return nv50_pioc_create(disp, oclass, head, &args, sizeof(args), 173 &oimm->base); 174} 175 176/****************************************************************************** 177 * DMA EVO channel 178 *****************************************************************************/ 179 180struct nv50_dmac { 181 struct nv50_chan base; 182 dma_addr_t handle; 183 u32 *ptr; 184 185 struct nvif_object sync; 186 struct nvif_object vram; 187 188 /* Protects against concurrent pushbuf access to this channel, lock is 189 * grabbed by evo_wait (if the pushbuf reservation is successful) and 190 * dropped again by evo_kick. */ 191 struct mutex lock; 192}; 193 194static void 195nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp) 196{ 197 nvif_object_fini(&dmac->vram); 198 nvif_object_fini(&dmac->sync); 199 200 nv50_chan_destroy(&dmac->base); 201 202 if (dmac->ptr) { 203 struct pci_dev *pdev = nvxx_device(nvif_device(disp))->pdev; 204 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle); 205 } 206} 207 208static int 209nv50_dmac_create(struct nvif_object *disp, const u32 *oclass, u8 head, 210 void *data, u32 size, u64 syncbuf, 211 struct nv50_dmac *dmac) 212{ 213 struct nvif_device *device = nvif_device(disp); 214 struct nv50_disp_core_channel_dma_v0 *args = data; 215 struct nvif_object pushbuf; 216 int ret; 217 218 mutex_init(&dmac->lock); 219 220 dmac->ptr = pci_alloc_consistent(nvxx_device(device)->pdev, 221 PAGE_SIZE, &dmac->handle); 222 if (!dmac->ptr) 223 return -ENOMEM; 224 225 ret = nvif_object_init(nvif_object(device), NULL, 226 args->pushbuf, NV_DMA_FROM_MEMORY, 227 &(struct nv_dma_v0) { 228 .target = NV_DMA_V0_TARGET_PCI_US, 229 .access = NV_DMA_V0_ACCESS_RD, 230 .start = dmac->handle + 0x0000, 231 .limit = dmac->handle + 0x0fff, 232 }, sizeof(struct nv_dma_v0), &pushbuf); 233 if (ret) 234 return ret; 235 236 ret = nv50_chan_create(disp, oclass, head, data, size, &dmac->base); 237 nvif_object_fini(&pushbuf); 238 if (ret) 239 return ret; 240 241 ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000000, 242 NV_DMA_IN_MEMORY, 243 &(struct nv_dma_v0) { 244 .target = NV_DMA_V0_TARGET_VRAM, 245 .access = NV_DMA_V0_ACCESS_RDWR, 246 .start = syncbuf + 0x0000, 247 .limit = syncbuf + 0x0fff, 248 }, sizeof(struct nv_dma_v0), 249 &dmac->sync); 250 if (ret) 251 return ret; 252 253 ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000001, 254 NV_DMA_IN_MEMORY, 255 &(struct nv_dma_v0) { 256 .target = NV_DMA_V0_TARGET_VRAM, 257 .access = NV_DMA_V0_ACCESS_RDWR, 258 .start = 0, 259 .limit = device->info.ram_user - 1, 260 }, sizeof(struct nv_dma_v0), 261 &dmac->vram); 262 if (ret) 263 return ret; 264 265 return ret; 266} 267 268/****************************************************************************** 269 * Core 270 *****************************************************************************/ 271 272struct nv50_mast { 273 struct nv50_dmac base; 274}; 275 276static int 277nv50_core_create(struct nvif_object *disp, u64 syncbuf, struct nv50_mast *core) 278{ 279 struct nv50_disp_core_channel_dma_v0 args = { 280 .pushbuf = 0xb0007d00, 281 }; 282 static const u32 oclass[] = { 283 GM204_DISP_CORE_CHANNEL_DMA, 284 GM107_DISP_CORE_CHANNEL_DMA, 285 GK110_DISP_CORE_CHANNEL_DMA, 286 GK104_DISP_CORE_CHANNEL_DMA, 287 GF110_DISP_CORE_CHANNEL_DMA, 288 GT214_DISP_CORE_CHANNEL_DMA, 289 GT206_DISP_CORE_CHANNEL_DMA, 290 GT200_DISP_CORE_CHANNEL_DMA, 291 G82_DISP_CORE_CHANNEL_DMA, 292 NV50_DISP_CORE_CHANNEL_DMA, 293 0 294 }; 295 296 return nv50_dmac_create(disp, oclass, 0, &args, sizeof(args), syncbuf, 297 &core->base); 298} 299 300/****************************************************************************** 301 * Base 302 *****************************************************************************/ 303 304struct nv50_sync { 305 struct nv50_dmac base; 306 u32 addr; 307 u32 data; 308}; 309 310static int 311nv50_base_create(struct nvif_object *disp, int head, u64 syncbuf, 312 struct nv50_sync *base) 313{ 314 struct nv50_disp_base_channel_dma_v0 args = { 315 .pushbuf = 0xb0007c00 | head, 316 .head = head, 317 }; 318 static const u32 oclass[] = { 319 GK110_DISP_BASE_CHANNEL_DMA, 320 GK104_DISP_BASE_CHANNEL_DMA, 321 GF110_DISP_BASE_CHANNEL_DMA, 322 GT214_DISP_BASE_CHANNEL_DMA, 323 GT200_DISP_BASE_CHANNEL_DMA, 324 G82_DISP_BASE_CHANNEL_DMA, 325 NV50_DISP_BASE_CHANNEL_DMA, 326 0 327 }; 328 329 return nv50_dmac_create(disp, oclass, head, &args, sizeof(args), 330 syncbuf, &base->base); 331} 332 333/****************************************************************************** 334 * Overlay 335 *****************************************************************************/ 336 337struct nv50_ovly { 338 struct nv50_dmac base; 339}; 340 341static int 342nv50_ovly_create(struct nvif_object *disp, int head, u64 syncbuf, 343 struct nv50_ovly *ovly) 344{ 345 struct nv50_disp_overlay_channel_dma_v0 args = { 346 .pushbuf = 0xb0007e00 | head, 347 .head = head, 348 }; 349 static const u32 oclass[] = { 350 GK104_DISP_OVERLAY_CONTROL_DMA, 351 GF110_DISP_OVERLAY_CONTROL_DMA, 352 GT214_DISP_OVERLAY_CHANNEL_DMA, 353 GT200_DISP_OVERLAY_CHANNEL_DMA, 354 G82_DISP_OVERLAY_CHANNEL_DMA, 355 NV50_DISP_OVERLAY_CHANNEL_DMA, 356 0 357 }; 358 359 return nv50_dmac_create(disp, oclass, head, &args, sizeof(args), 360 syncbuf, &ovly->base); 361} 362 363struct nv50_head { 364 struct nouveau_crtc base; 365 struct nouveau_bo *image; 366 struct nv50_curs curs; 367 struct nv50_sync sync; 368 struct nv50_ovly ovly; 369 struct nv50_oimm oimm; 370}; 371 372#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c)) 373#define nv50_curs(c) (&nv50_head(c)->curs) 374#define nv50_sync(c) (&nv50_head(c)->sync) 375#define nv50_ovly(c) (&nv50_head(c)->ovly) 376#define nv50_oimm(c) (&nv50_head(c)->oimm) 377#define nv50_chan(c) (&(c)->base.base) 378#define nv50_vers(c) nv50_chan(c)->user.oclass 379 380struct nv50_fbdma { 381 struct list_head head; 382 struct nvif_object core; 383 struct nvif_object base[4]; 384}; 385 386struct nv50_disp { 387 struct nvif_object *disp; 388 struct nv50_mast mast; 389 390 struct list_head fbdma; 391 392 struct nouveau_bo *sync; 393}; 394 395static struct nv50_disp * 396nv50_disp(struct drm_device *dev) 397{ 398 return nouveau_display(dev)->priv; 399} 400 401#define nv50_mast(d) (&nv50_disp(d)->mast) 402 403static struct drm_crtc * 404nv50_display_crtc_get(struct drm_encoder *encoder) 405{ 406 return nouveau_encoder(encoder)->crtc; 407} 408 409/****************************************************************************** 410 * EVO channel helpers 411 *****************************************************************************/ 412static u32 * 413evo_wait(void *evoc, int nr) 414{ 415 struct nv50_dmac *dmac = evoc; 416 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4; 417 418 mutex_lock(&dmac->lock); 419 if (put + nr >= (PAGE_SIZE / 4) - 8) { 420 dmac->ptr[put] = 0x20000000; 421 422 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000); 423 if (!nvxx_wait(&dmac->base.user, 0x0004, ~0, 0x00000000)) { 424 mutex_unlock(&dmac->lock); 425 nv_error(nvxx_object(&dmac->base.user), "channel stalled\n"); 426 return NULL; 427 } 428 429 put = 0; 430 } 431 432 return dmac->ptr + put; 433} 434 435static void 436evo_kick(u32 *push, void *evoc) 437{ 438 struct nv50_dmac *dmac = evoc; 439 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2); 440 mutex_unlock(&dmac->lock); 441} 442 443#if 1 444#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m)) 445#define evo_data(p,d) *((p)++) = (d) 446#else 447#define evo_mthd(p,m,s) do { \ 448 const u32 _m = (m), _s = (s); \ 449 printk(KERN_ERR "%04x %d %s\n", _m, _s, __func__); \ 450 *((p)++) = ((_s << 18) | _m); \ 451} while(0) 452#define evo_data(p,d) do { \ 453 const u32 _d = (d); \ 454 printk(KERN_ERR "\t%08x\n", _d); \ 455 *((p)++) = _d; \ 456} while(0) 457#endif 458 459static bool 460evo_sync_wait(void *data) 461{ 462 if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000) 463 return true; 464 usleep_range(1, 2); 465 return false; 466} 467 468static int 469evo_sync(struct drm_device *dev) 470{ 471 struct nvif_device *device = &nouveau_drm(dev)->device; 472 struct nv50_disp *disp = nv50_disp(dev); 473 struct nv50_mast *mast = nv50_mast(dev); 474 u32 *push = evo_wait(mast, 8); 475 if (push) { 476 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000); 477 evo_mthd(push, 0x0084, 1); 478 evo_data(push, 0x80000000 | EVO_MAST_NTFY); 479 evo_mthd(push, 0x0080, 2); 480 evo_data(push, 0x00000000); 481 evo_data(push, 0x00000000); 482 evo_kick(push, mast); 483 if (nv_wait_cb(nvxx_device(device), evo_sync_wait, disp->sync)) 484 return 0; 485 } 486 487 return -EBUSY; 488} 489 490/****************************************************************************** 491 * Page flipping channel 492 *****************************************************************************/ 493struct nouveau_bo * 494nv50_display_crtc_sema(struct drm_device *dev, int crtc) 495{ 496 return nv50_disp(dev)->sync; 497} 498 499struct nv50_display_flip { 500 struct nv50_disp *disp; 501 struct nv50_sync *chan; 502}; 503 504static bool 505nv50_display_flip_wait(void *data) 506{ 507 struct nv50_display_flip *flip = data; 508 if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) == 509 flip->chan->data) 510 return true; 511 usleep_range(1, 2); 512 return false; 513} 514 515void 516nv50_display_flip_stop(struct drm_crtc *crtc) 517{ 518 struct nvif_device *device = &nouveau_drm(crtc->dev)->device; 519 struct nv50_display_flip flip = { 520 .disp = nv50_disp(crtc->dev), 521 .chan = nv50_sync(crtc), 522 }; 523 u32 *push; 524 525 push = evo_wait(flip.chan, 8); 526 if (push) { 527 evo_mthd(push, 0x0084, 1); 528 evo_data(push, 0x00000000); 529 evo_mthd(push, 0x0094, 1); 530 evo_data(push, 0x00000000); 531 evo_mthd(push, 0x00c0, 1); 532 evo_data(push, 0x00000000); 533 evo_mthd(push, 0x0080, 1); 534 evo_data(push, 0x00000000); 535 evo_kick(push, flip.chan); 536 } 537 538 nv_wait_cb(nvxx_device(device), nv50_display_flip_wait, &flip); 539} 540 541int 542nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb, 543 struct nouveau_channel *chan, u32 swap_interval) 544{ 545 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb); 546 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 547 struct nv50_head *head = nv50_head(crtc); 548 struct nv50_sync *sync = nv50_sync(crtc); 549 u32 *push; 550 int ret; 551 552 if (crtc->primary->fb->width != fb->width || 553 crtc->primary->fb->height != fb->height) 554 return -EINVAL; 555 556 swap_interval <<= 4; 557 if (swap_interval == 0) 558 swap_interval |= 0x100; 559 if (chan == NULL) 560 evo_sync(crtc->dev); 561 562 push = evo_wait(sync, 128); 563 if (unlikely(push == NULL)) 564 return -EBUSY; 565 566 if (chan && chan->object->oclass < G82_CHANNEL_GPFIFO) { 567 ret = RING_SPACE(chan, 8); 568 if (ret) 569 return ret; 570 571 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2); 572 OUT_RING (chan, NvEvoSema0 + nv_crtc->index); 573 OUT_RING (chan, sync->addr ^ 0x10); 574 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1); 575 OUT_RING (chan, sync->data + 1); 576 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2); 577 OUT_RING (chan, sync->addr); 578 OUT_RING (chan, sync->data); 579 } else 580 if (chan && chan->object->oclass < FERMI_CHANNEL_GPFIFO) { 581 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr; 582 ret = RING_SPACE(chan, 12); 583 if (ret) 584 return ret; 585 586 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1); 587 OUT_RING (chan, chan->vram.handle); 588 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 589 OUT_RING (chan, upper_32_bits(addr ^ 0x10)); 590 OUT_RING (chan, lower_32_bits(addr ^ 0x10)); 591 OUT_RING (chan, sync->data + 1); 592 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG); 593 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 594 OUT_RING (chan, upper_32_bits(addr)); 595 OUT_RING (chan, lower_32_bits(addr)); 596 OUT_RING (chan, sync->data); 597 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL); 598 } else 599 if (chan) { 600 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr; 601 ret = RING_SPACE(chan, 10); 602 if (ret) 603 return ret; 604 605 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 606 OUT_RING (chan, upper_32_bits(addr ^ 0x10)); 607 OUT_RING (chan, lower_32_bits(addr ^ 0x10)); 608 OUT_RING (chan, sync->data + 1); 609 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG | 610 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD); 611 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 612 OUT_RING (chan, upper_32_bits(addr)); 613 OUT_RING (chan, lower_32_bits(addr)); 614 OUT_RING (chan, sync->data); 615 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL | 616 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD); 617 } 618 619 if (chan) { 620 sync->addr ^= 0x10; 621 sync->data++; 622 FIRE_RING (chan); 623 } 624 625 /* queue the flip */ 626 evo_mthd(push, 0x0100, 1); 627 evo_data(push, 0xfffe0000); 628 evo_mthd(push, 0x0084, 1); 629 evo_data(push, swap_interval); 630 if (!(swap_interval & 0x00000100)) { 631 evo_mthd(push, 0x00e0, 1); 632 evo_data(push, 0x40000000); 633 } 634 evo_mthd(push, 0x0088, 4); 635 evo_data(push, sync->addr); 636 evo_data(push, sync->data++); 637 evo_data(push, sync->data); 638 evo_data(push, sync->base.sync.handle); 639 evo_mthd(push, 0x00a0, 2); 640 evo_data(push, 0x00000000); 641 evo_data(push, 0x00000000); 642 evo_mthd(push, 0x00c0, 1); 643 evo_data(push, nv_fb->r_handle); 644 evo_mthd(push, 0x0110, 2); 645 evo_data(push, 0x00000000); 646 evo_data(push, 0x00000000); 647 if (nv50_vers(sync) < GF110_DISP_BASE_CHANNEL_DMA) { 648 evo_mthd(push, 0x0800, 5); 649 evo_data(push, nv_fb->nvbo->bo.offset >> 8); 650 evo_data(push, 0); 651 evo_data(push, (fb->height << 16) | fb->width); 652 evo_data(push, nv_fb->r_pitch); 653 evo_data(push, nv_fb->r_format); 654 } else { 655 evo_mthd(push, 0x0400, 5); 656 evo_data(push, nv_fb->nvbo->bo.offset >> 8); 657 evo_data(push, 0); 658 evo_data(push, (fb->height << 16) | fb->width); 659 evo_data(push, nv_fb->r_pitch); 660 evo_data(push, nv_fb->r_format); 661 } 662 evo_mthd(push, 0x0080, 1); 663 evo_data(push, 0x00000000); 664 evo_kick(push, sync); 665 666 nouveau_bo_ref(nv_fb->nvbo, &head->image); 667 return 0; 668} 669 670/****************************************************************************** 671 * CRTC 672 *****************************************************************************/ 673static int 674nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update) 675{ 676 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 677 struct nouveau_connector *nv_connector; 678 struct drm_connector *connector; 679 u32 *push, mode = 0x00; 680 681 nv_connector = nouveau_crtc_connector_get(nv_crtc); 682 connector = &nv_connector->base; 683 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) { 684 if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3) 685 mode = DITHERING_MODE_DYNAMIC2X2; 686 } else { 687 mode = nv_connector->dithering_mode; 688 } 689 690 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) { 691 if (connector->display_info.bpc >= 8) 692 mode |= DITHERING_DEPTH_8BPC; 693 } else { 694 mode |= nv_connector->dithering_depth; 695 } 696 697 push = evo_wait(mast, 4); 698 if (push) { 699 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 700 evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1); 701 evo_data(push, mode); 702 } else 703 if (nv50_vers(mast) < GK104_DISP_CORE_CHANNEL_DMA) { 704 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1); 705 evo_data(push, mode); 706 } else { 707 evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1); 708 evo_data(push, mode); 709 } 710 711 if (update) { 712 evo_mthd(push, 0x0080, 1); 713 evo_data(push, 0x00000000); 714 } 715 evo_kick(push, mast); 716 } 717 718 return 0; 719} 720 721static int 722nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update) 723{ 724 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 725 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode; 726 struct drm_crtc *crtc = &nv_crtc->base; 727 struct nouveau_connector *nv_connector; 728 int mode = DRM_MODE_SCALE_NONE; 729 u32 oX, oY, *push; 730 731 /* start off at the resolution we programmed the crtc for, this 732 * effectively handles NONE/FULL scaling 733 */ 734 nv_connector = nouveau_crtc_connector_get(nv_crtc); 735 if (nv_connector && nv_connector->native_mode) { 736 mode = nv_connector->scaling_mode; 737 if (nv_connector->scaling_full) /* non-EDID LVDS/eDP mode */ 738 mode = DRM_MODE_SCALE_FULLSCREEN; 739 } 740 741 if (mode != DRM_MODE_SCALE_NONE) 742 omode = nv_connector->native_mode; 743 else 744 omode = umode; 745 746 oX = omode->hdisplay; 747 oY = omode->vdisplay; 748 if (omode->flags & DRM_MODE_FLAG_DBLSCAN) 749 oY *= 2; 750 751 /* add overscan compensation if necessary, will keep the aspect 752 * ratio the same as the backend mode unless overridden by the 753 * user setting both hborder and vborder properties. 754 */ 755 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON || 756 (nv_connector->underscan == UNDERSCAN_AUTO && 757 nv_connector->edid && 758 drm_detect_hdmi_monitor(nv_connector->edid)))) { 759 u32 bX = nv_connector->underscan_hborder; 760 u32 bY = nv_connector->underscan_vborder; 761 u32 aspect = (oY << 19) / oX; 762 763 if (bX) { 764 oX -= (bX * 2); 765 if (bY) oY -= (bY * 2); 766 else oY = ((oX * aspect) + (aspect / 2)) >> 19; 767 } else { 768 oX -= (oX >> 4) + 32; 769 if (bY) oY -= (bY * 2); 770 else oY = ((oX * aspect) + (aspect / 2)) >> 19; 771 } 772 } 773 774 /* handle CENTER/ASPECT scaling, taking into account the areas 775 * removed already for overscan compensation 776 */ 777 switch (mode) { 778 case DRM_MODE_SCALE_CENTER: 779 oX = min((u32)umode->hdisplay, oX); 780 oY = min((u32)umode->vdisplay, oY); 781 /* fall-through */ 782 case DRM_MODE_SCALE_ASPECT: 783 if (oY < oX) { 784 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay; 785 oX = ((oY * aspect) + (aspect / 2)) >> 19; 786 } else { 787 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay; 788 oY = ((oX * aspect) + (aspect / 2)) >> 19; 789 } 790 break; 791 default: 792 break; 793 } 794 795 push = evo_wait(mast, 8); 796 if (push) { 797 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 798 /*XXX: SCALE_CTRL_ACTIVE??? */ 799 evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2); 800 evo_data(push, (oY << 16) | oX); 801 evo_data(push, (oY << 16) | oX); 802 evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1); 803 evo_data(push, 0x00000000); 804 evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1); 805 evo_data(push, umode->vdisplay << 16 | umode->hdisplay); 806 } else { 807 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3); 808 evo_data(push, (oY << 16) | oX); 809 evo_data(push, (oY << 16) | oX); 810 evo_data(push, (oY << 16) | oX); 811 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1); 812 evo_data(push, 0x00000000); 813 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1); 814 evo_data(push, umode->vdisplay << 16 | umode->hdisplay); 815 } 816 817 evo_kick(push, mast); 818 819 if (update) { 820 nv50_display_flip_stop(crtc); 821 nv50_display_flip_next(crtc, crtc->primary->fb, 822 NULL, 1); 823 } 824 } 825 826 return 0; 827} 828 829static int 830nv50_crtc_set_raster_vblank_dmi(struct nouveau_crtc *nv_crtc, u32 usec) 831{ 832 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 833 u32 *push; 834 835 push = evo_wait(mast, 8); 836 if (!push) 837 return -ENOMEM; 838 839 evo_mthd(push, 0x0828 + (nv_crtc->index * 0x400), 1); 840 evo_data(push, usec); 841 evo_kick(push, mast); 842 return 0; 843} 844 845static int 846nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update) 847{ 848 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 849 u32 *push, hue, vib; 850 int adj; 851 852 adj = (nv_crtc->color_vibrance > 0) ? 50 : 0; 853 vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff; 854 hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff; 855 856 push = evo_wait(mast, 16); 857 if (push) { 858 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 859 evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1); 860 evo_data(push, (hue << 20) | (vib << 8)); 861 } else { 862 evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1); 863 evo_data(push, (hue << 20) | (vib << 8)); 864 } 865 866 if (update) { 867 evo_mthd(push, 0x0080, 1); 868 evo_data(push, 0x00000000); 869 } 870 evo_kick(push, mast); 871 } 872 873 return 0; 874} 875 876static int 877nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb, 878 int x, int y, bool update) 879{ 880 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb); 881 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 882 u32 *push; 883 884 push = evo_wait(mast, 16); 885 if (push) { 886 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 887 evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1); 888 evo_data(push, nvfb->nvbo->bo.offset >> 8); 889 evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3); 890 evo_data(push, (fb->height << 16) | fb->width); 891 evo_data(push, nvfb->r_pitch); 892 evo_data(push, nvfb->r_format); 893 evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1); 894 evo_data(push, (y << 16) | x); 895 if (nv50_vers(mast) > NV50_DISP_CORE_CHANNEL_DMA) { 896 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 897 evo_data(push, nvfb->r_handle); 898 } 899 } else { 900 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1); 901 evo_data(push, nvfb->nvbo->bo.offset >> 8); 902 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4); 903 evo_data(push, (fb->height << 16) | fb->width); 904 evo_data(push, nvfb->r_pitch); 905 evo_data(push, nvfb->r_format); 906 evo_data(push, nvfb->r_handle); 907 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1); 908 evo_data(push, (y << 16) | x); 909 } 910 911 if (update) { 912 evo_mthd(push, 0x0080, 1); 913 evo_data(push, 0x00000000); 914 } 915 evo_kick(push, mast); 916 } 917 918 nv_crtc->fb.handle = nvfb->r_handle; 919 return 0; 920} 921 922static void 923nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc) 924{ 925 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 926 u32 *push = evo_wait(mast, 16); 927 if (push) { 928 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { 929 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2); 930 evo_data(push, 0x85000000); 931 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); 932 } else 933 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 934 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2); 935 evo_data(push, 0x85000000); 936 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); 937 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1); 938 evo_data(push, mast->base.vram.handle); 939 } else { 940 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2); 941 evo_data(push, 0x85000000); 942 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); 943 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1); 944 evo_data(push, mast->base.vram.handle); 945 } 946 evo_kick(push, mast); 947 } 948 nv_crtc->cursor.visible = true; 949} 950 951static void 952nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc) 953{ 954 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 955 u32 *push = evo_wait(mast, 16); 956 if (push) { 957 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { 958 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1); 959 evo_data(push, 0x05000000); 960 } else 961 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 962 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1); 963 evo_data(push, 0x05000000); 964 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1); 965 evo_data(push, 0x00000000); 966 } else { 967 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1); 968 evo_data(push, 0x05000000); 969 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1); 970 evo_data(push, 0x00000000); 971 } 972 evo_kick(push, mast); 973 } 974 nv_crtc->cursor.visible = false; 975} 976 977static void 978nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update) 979{ 980 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 981 982 if (show && nv_crtc->cursor.nvbo && nv_crtc->base.enabled) 983 nv50_crtc_cursor_show(nv_crtc); 984 else 985 nv50_crtc_cursor_hide(nv_crtc); 986 987 if (update) { 988 u32 *push = evo_wait(mast, 2); 989 if (push) { 990 evo_mthd(push, 0x0080, 1); 991 evo_data(push, 0x00000000); 992 evo_kick(push, mast); 993 } 994 } 995} 996 997static void 998nv50_crtc_dpms(struct drm_crtc *crtc, int mode) 999{ 1000} 1001 1002static void 1003nv50_crtc_prepare(struct drm_crtc *crtc) 1004{ 1005 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1006 struct nv50_mast *mast = nv50_mast(crtc->dev); 1007 u32 *push; 1008 1009 nv50_display_flip_stop(crtc); 1010 1011 push = evo_wait(mast, 6); 1012 if (push) { 1013 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { 1014 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 1015 evo_data(push, 0x00000000); 1016 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1); 1017 evo_data(push, 0x40000000); 1018 } else 1019 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1020 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 1021 evo_data(push, 0x00000000); 1022 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1); 1023 evo_data(push, 0x40000000); 1024 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1); 1025 evo_data(push, 0x00000000); 1026 } else { 1027 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1); 1028 evo_data(push, 0x00000000); 1029 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1); 1030 evo_data(push, 0x03000000); 1031 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1); 1032 evo_data(push, 0x00000000); 1033 } 1034 1035 evo_kick(push, mast); 1036 } 1037 1038 nv50_crtc_cursor_show_hide(nv_crtc, false, false); 1039} 1040 1041static void 1042nv50_crtc_commit(struct drm_crtc *crtc) 1043{ 1044 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1045 struct nv50_mast *mast = nv50_mast(crtc->dev); 1046 u32 *push; 1047 1048 push = evo_wait(mast, 32); 1049 if (push) { 1050 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { 1051 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 1052 evo_data(push, nv_crtc->fb.handle); 1053 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2); 1054 evo_data(push, 0xc0000000); 1055 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); 1056 } else 1057 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1058 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 1059 evo_data(push, nv_crtc->fb.handle); 1060 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2); 1061 evo_data(push, 0xc0000000); 1062 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); 1063 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1); 1064 evo_data(push, mast->base.vram.handle); 1065 } else { 1066 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1); 1067 evo_data(push, nv_crtc->fb.handle); 1068 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4); 1069 evo_data(push, 0x83000000); 1070 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); 1071 evo_data(push, 0x00000000); 1072 evo_data(push, 0x00000000); 1073 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1); 1074 evo_data(push, mast->base.vram.handle); 1075 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1); 1076 evo_data(push, 0xffffff00); 1077 } 1078 1079 evo_kick(push, mast); 1080 } 1081 1082 nv50_crtc_cursor_show_hide(nv_crtc, true, true); 1083 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1); 1084} 1085 1086static bool 1087nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode, 1088 struct drm_display_mode *adjusted_mode) 1089{ 1090 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V); 1091 return true; 1092} 1093 1094static int 1095nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb) 1096{ 1097 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb); 1098 struct nv50_head *head = nv50_head(crtc); 1099 int ret; 1100 1101 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM, true); 1102 if (ret == 0) { 1103 if (head->image) 1104 nouveau_bo_unpin(head->image); 1105 nouveau_bo_ref(nvfb->nvbo, &head->image); 1106 } 1107 1108 return ret; 1109} 1110 1111static int 1112nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode, 1113 struct drm_display_mode *mode, int x, int y, 1114 struct drm_framebuffer *old_fb) 1115{ 1116 struct nv50_mast *mast = nv50_mast(crtc->dev); 1117 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1118 struct nouveau_connector *nv_connector; 1119 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1; 1120 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1; 1121 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks; 1122 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks; 1123 u32 vblan2e = 0, vblan2s = 1, vblankus = 0; 1124 u32 *push; 1125 int ret; 1126 1127 hactive = mode->htotal; 1128 hsynce = mode->hsync_end - mode->hsync_start - 1; 1129 hbackp = mode->htotal - mode->hsync_end; 1130 hblanke = hsynce + hbackp; 1131 hfrontp = mode->hsync_start - mode->hdisplay; 1132 hblanks = mode->htotal - hfrontp - 1; 1133 1134 vactive = mode->vtotal * vscan / ilace; 1135 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1; 1136 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace; 1137 vblanke = vsynce + vbackp; 1138 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace; 1139 vblanks = vactive - vfrontp - 1; 1140 /* XXX: Safe underestimate, even "0" works */ 1141 vblankus = (vactive - mode->vdisplay - 2) * hactive; 1142 vblankus *= 1000; 1143 vblankus /= mode->clock; 1144 1145 if (mode->flags & DRM_MODE_FLAG_INTERLACE) { 1146 vblan2e = vactive + vsynce + vbackp; 1147 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace); 1148 vactive = (vactive * 2) + 1; 1149 } 1150 1151 ret = nv50_crtc_swap_fbs(crtc, old_fb); 1152 if (ret) 1153 return ret; 1154 1155 push = evo_wait(mast, 64); 1156 if (push) { 1157 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1158 evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2); 1159 evo_data(push, 0x00800000 | mode->clock); 1160 evo_data(push, (ilace == 2) ? 2 : 0); 1161 evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6); 1162 evo_data(push, 0x00000000); 1163 evo_data(push, (vactive << 16) | hactive); 1164 evo_data(push, ( vsynce << 16) | hsynce); 1165 evo_data(push, (vblanke << 16) | hblanke); 1166 evo_data(push, (vblanks << 16) | hblanks); 1167 evo_data(push, (vblan2e << 16) | vblan2s); 1168 evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1); 1169 evo_data(push, 0x00000000); 1170 evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2); 1171 evo_data(push, 0x00000311); 1172 evo_data(push, 0x00000100); 1173 } else { 1174 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6); 1175 evo_data(push, 0x00000000); 1176 evo_data(push, (vactive << 16) | hactive); 1177 evo_data(push, ( vsynce << 16) | hsynce); 1178 evo_data(push, (vblanke << 16) | hblanke); 1179 evo_data(push, (vblanks << 16) | hblanks); 1180 evo_data(push, (vblan2e << 16) | vblan2s); 1181 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1); 1182 evo_data(push, 0x00000000); /* ??? */ 1183 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3); 1184 evo_data(push, mode->clock * 1000); 1185 evo_data(push, 0x00200000); /* ??? */ 1186 evo_data(push, mode->clock * 1000); 1187 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2); 1188 evo_data(push, 0x00000311); 1189 evo_data(push, 0x00000100); 1190 } 1191 1192 evo_kick(push, mast); 1193 } 1194 1195 nv_connector = nouveau_crtc_connector_get(nv_crtc); 1196 nv50_crtc_set_dither(nv_crtc, false); 1197 nv50_crtc_set_scale(nv_crtc, false); 1198 1199 /* G94 only accepts this after setting scale */ 1200 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) 1201 nv50_crtc_set_raster_vblank_dmi(nv_crtc, vblankus); 1202 1203 nv50_crtc_set_color_vibrance(nv_crtc, false); 1204 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false); 1205 return 0; 1206} 1207 1208static int 1209nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y, 1210 struct drm_framebuffer *old_fb) 1211{ 1212 struct nouveau_drm *drm = nouveau_drm(crtc->dev); 1213 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1214 int ret; 1215 1216 if (!crtc->primary->fb) { 1217 NV_DEBUG(drm, "No FB bound\n"); 1218 return 0; 1219 } 1220 1221 ret = nv50_crtc_swap_fbs(crtc, old_fb); 1222 if (ret) 1223 return ret; 1224 1225 nv50_display_flip_stop(crtc); 1226 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true); 1227 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1); 1228 return 0; 1229} 1230 1231static int 1232nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc, 1233 struct drm_framebuffer *fb, int x, int y, 1234 enum mode_set_atomic state) 1235{ 1236 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1237 nv50_display_flip_stop(crtc); 1238 nv50_crtc_set_image(nv_crtc, fb, x, y, true); 1239 return 0; 1240} 1241 1242static void 1243nv50_crtc_lut_load(struct drm_crtc *crtc) 1244{ 1245 struct nv50_disp *disp = nv50_disp(crtc->dev); 1246 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1247 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo); 1248 int i; 1249 1250 for (i = 0; i < 256; i++) { 1251 u16 r = nv_crtc->lut.r[i] >> 2; 1252 u16 g = nv_crtc->lut.g[i] >> 2; 1253 u16 b = nv_crtc->lut.b[i] >> 2; 1254 1255 if (disp->disp->oclass < GF110_DISP) { 1256 writew(r + 0x0000, lut + (i * 0x08) + 0); 1257 writew(g + 0x0000, lut + (i * 0x08) + 2); 1258 writew(b + 0x0000, lut + (i * 0x08) + 4); 1259 } else { 1260 writew(r + 0x6000, lut + (i * 0x20) + 0); 1261 writew(g + 0x6000, lut + (i * 0x20) + 2); 1262 writew(b + 0x6000, lut + (i * 0x20) + 4); 1263 } 1264 } 1265} 1266 1267static void 1268nv50_crtc_disable(struct drm_crtc *crtc) 1269{ 1270 struct nv50_head *head = nv50_head(crtc); 1271 evo_sync(crtc->dev); 1272 if (head->image) 1273 nouveau_bo_unpin(head->image); 1274 nouveau_bo_ref(NULL, &head->image); 1275} 1276 1277static int 1278nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv, 1279 uint32_t handle, uint32_t width, uint32_t height) 1280{ 1281 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1282 struct drm_device *dev = crtc->dev; 1283 struct drm_gem_object *gem = NULL; 1284 struct nouveau_bo *nvbo = NULL; 1285 int ret = 0; 1286 1287 if (handle) { 1288 if (width != 64 || height != 64) 1289 return -EINVAL; 1290 1291 gem = drm_gem_object_lookup(dev, file_priv, handle); 1292 if (unlikely(!gem)) 1293 return -ENOENT; 1294 nvbo = nouveau_gem_object(gem); 1295 1296 ret = nouveau_bo_pin(nvbo, TTM_PL_FLAG_VRAM, true); 1297 } 1298 1299 if (ret == 0) { 1300 if (nv_crtc->cursor.nvbo) 1301 nouveau_bo_unpin(nv_crtc->cursor.nvbo); 1302 nouveau_bo_ref(nvbo, &nv_crtc->cursor.nvbo); 1303 } 1304 drm_gem_object_unreference_unlocked(gem); 1305 1306 nv50_crtc_cursor_show_hide(nv_crtc, true, true); 1307 return ret; 1308} 1309 1310static int 1311nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y) 1312{ 1313 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1314 struct nv50_curs *curs = nv50_curs(crtc); 1315 struct nv50_chan *chan = nv50_chan(curs); 1316 nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff)); 1317 nvif_wr32(&chan->user, 0x0080, 0x00000000); 1318 1319 nv_crtc->cursor_saved_x = x; 1320 nv_crtc->cursor_saved_y = y; 1321 return 0; 1322} 1323 1324static void 1325nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b, 1326 uint32_t start, uint32_t size) 1327{ 1328 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1329 u32 end = min_t(u32, start + size, 256); 1330 u32 i; 1331 1332 for (i = start; i < end; i++) { 1333 nv_crtc->lut.r[i] = r[i]; 1334 nv_crtc->lut.g[i] = g[i]; 1335 nv_crtc->lut.b[i] = b[i]; 1336 } 1337 1338 nv50_crtc_lut_load(crtc); 1339} 1340 1341static void 1342nv50_crtc_cursor_restore(struct nouveau_crtc *nv_crtc, int x, int y) 1343{ 1344 nv50_crtc_cursor_move(&nv_crtc->base, x, y); 1345 1346 nv50_crtc_cursor_show_hide(nv_crtc, true, true); 1347} 1348 1349static void 1350nv50_crtc_destroy(struct drm_crtc *crtc) 1351{ 1352 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1353 struct nv50_disp *disp = nv50_disp(crtc->dev); 1354 struct nv50_head *head = nv50_head(crtc); 1355 struct nv50_fbdma *fbdma; 1356 1357 list_for_each_entry(fbdma, &disp->fbdma, head) { 1358 nvif_object_fini(&fbdma->base[nv_crtc->index]); 1359 } 1360 1361 nv50_dmac_destroy(&head->ovly.base, disp->disp); 1362 nv50_pioc_destroy(&head->oimm.base); 1363 nv50_dmac_destroy(&head->sync.base, disp->disp); 1364 nv50_pioc_destroy(&head->curs.base); 1365 1366 /*XXX: this shouldn't be necessary, but the core doesn't call 1367 * disconnect() during the cleanup paths 1368 */ 1369 if (head->image) 1370 nouveau_bo_unpin(head->image); 1371 nouveau_bo_ref(NULL, &head->image); 1372 1373 /*XXX: ditto */ 1374 if (nv_crtc->cursor.nvbo) 1375 nouveau_bo_unpin(nv_crtc->cursor.nvbo); 1376 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo); 1377 1378 nouveau_bo_unmap(nv_crtc->lut.nvbo); 1379 if (nv_crtc->lut.nvbo) 1380 nouveau_bo_unpin(nv_crtc->lut.nvbo); 1381 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo); 1382 1383 drm_crtc_cleanup(crtc); 1384 kfree(crtc); 1385} 1386 1387static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = { 1388 .dpms = nv50_crtc_dpms, 1389 .prepare = nv50_crtc_prepare, 1390 .commit = nv50_crtc_commit, 1391 .mode_fixup = nv50_crtc_mode_fixup, 1392 .mode_set = nv50_crtc_mode_set, 1393 .mode_set_base = nv50_crtc_mode_set_base, 1394 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic, 1395 .load_lut = nv50_crtc_lut_load, 1396 .disable = nv50_crtc_disable, 1397}; 1398 1399static const struct drm_crtc_funcs nv50_crtc_func = { 1400 .cursor_set = nv50_crtc_cursor_set, 1401 .cursor_move = nv50_crtc_cursor_move, 1402 .gamma_set = nv50_crtc_gamma_set, 1403 .set_config = nouveau_crtc_set_config, 1404 .destroy = nv50_crtc_destroy, 1405 .page_flip = nouveau_crtc_page_flip, 1406}; 1407 1408static int 1409nv50_crtc_create(struct drm_device *dev, int index) 1410{ 1411 struct nv50_disp *disp = nv50_disp(dev); 1412 struct nv50_head *head; 1413 struct drm_crtc *crtc; 1414 int ret, i; 1415 1416 head = kzalloc(sizeof(*head), GFP_KERNEL); 1417 if (!head) 1418 return -ENOMEM; 1419 1420 head->base.index = index; 1421 head->base.set_dither = nv50_crtc_set_dither; 1422 head->base.set_scale = nv50_crtc_set_scale; 1423 head->base.set_color_vibrance = nv50_crtc_set_color_vibrance; 1424 head->base.color_vibrance = 50; 1425 head->base.vibrant_hue = 0; 1426 head->base.cursor.set_pos = nv50_crtc_cursor_restore; 1427 for (i = 0; i < 256; i++) { 1428 head->base.lut.r[i] = i << 8; 1429 head->base.lut.g[i] = i << 8; 1430 head->base.lut.b[i] = i << 8; 1431 } 1432 1433 crtc = &head->base.base; 1434 drm_crtc_init(dev, crtc, &nv50_crtc_func); 1435 drm_crtc_helper_add(crtc, &nv50_crtc_hfunc); 1436 drm_mode_crtc_set_gamma_size(crtc, 256); 1437 1438 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM, 1439 0, 0x0000, NULL, NULL, &head->base.lut.nvbo); 1440 if (!ret) { 1441 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM, true); 1442 if (!ret) { 1443 ret = nouveau_bo_map(head->base.lut.nvbo); 1444 if (ret) 1445 nouveau_bo_unpin(head->base.lut.nvbo); 1446 } 1447 if (ret) 1448 nouveau_bo_ref(NULL, &head->base.lut.nvbo); 1449 } 1450 1451 if (ret) 1452 goto out; 1453 1454 /* allocate cursor resources */ 1455 ret = nv50_curs_create(disp->disp, index, &head->curs); 1456 if (ret) 1457 goto out; 1458 1459 /* allocate page flip / sync resources */ 1460 ret = nv50_base_create(disp->disp, index, disp->sync->bo.offset, 1461 &head->sync); 1462 if (ret) 1463 goto out; 1464 1465 head->sync.addr = EVO_FLIP_SEM0(index); 1466 head->sync.data = 0x00000000; 1467 1468 /* allocate overlay resources */ 1469 ret = nv50_oimm_create(disp->disp, index, &head->oimm); 1470 if (ret) 1471 goto out; 1472 1473 ret = nv50_ovly_create(disp->disp, index, disp->sync->bo.offset, 1474 &head->ovly); 1475 if (ret) 1476 goto out; 1477 1478out: 1479 if (ret) 1480 nv50_crtc_destroy(crtc); 1481 return ret; 1482} 1483 1484/****************************************************************************** 1485 * Encoder helpers 1486 *****************************************************************************/ 1487static bool 1488nv50_encoder_mode_fixup(struct drm_encoder *encoder, 1489 const struct drm_display_mode *mode, 1490 struct drm_display_mode *adjusted_mode) 1491{ 1492 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1493 struct nouveau_connector *nv_connector; 1494 1495 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1496 if (nv_connector && nv_connector->native_mode) { 1497 nv_connector->scaling_full = false; 1498 if (nv_connector->scaling_mode == DRM_MODE_SCALE_NONE) { 1499 switch (nv_connector->type) { 1500 case DCB_CONNECTOR_LVDS: 1501 case DCB_CONNECTOR_LVDS_SPWG: 1502 case DCB_CONNECTOR_eDP: 1503 /* force use of scaler for non-edid modes */ 1504 if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER) 1505 return true; 1506 nv_connector->scaling_full = true; 1507 break; 1508 default: 1509 return true; 1510 } 1511 } 1512 1513 drm_mode_copy(adjusted_mode, nv_connector->native_mode); 1514 } 1515 1516 return true; 1517} 1518 1519/****************************************************************************** 1520 * DAC 1521 *****************************************************************************/ 1522static void 1523nv50_dac_dpms(struct drm_encoder *encoder, int mode) 1524{ 1525 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1526 struct nv50_disp *disp = nv50_disp(encoder->dev); 1527 struct { 1528 struct nv50_disp_mthd_v1 base; 1529 struct nv50_disp_dac_pwr_v0 pwr; 1530 } args = { 1531 .base.version = 1, 1532 .base.method = NV50_DISP_MTHD_V1_DAC_PWR, 1533 .base.hasht = nv_encoder->dcb->hasht, 1534 .base.hashm = nv_encoder->dcb->hashm, 1535 .pwr.state = 1, 1536 .pwr.data = 1, 1537 .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND && 1538 mode != DRM_MODE_DPMS_OFF), 1539 .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY && 1540 mode != DRM_MODE_DPMS_OFF), 1541 }; 1542 1543 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1544} 1545 1546static void 1547nv50_dac_commit(struct drm_encoder *encoder) 1548{ 1549} 1550 1551static void 1552nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 1553 struct drm_display_mode *adjusted_mode) 1554{ 1555 struct nv50_mast *mast = nv50_mast(encoder->dev); 1556 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1557 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1558 u32 *push; 1559 1560 nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON); 1561 1562 push = evo_wait(mast, 8); 1563 if (push) { 1564 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1565 u32 syncs = 0x00000000; 1566 1567 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 1568 syncs |= 0x00000001; 1569 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 1570 syncs |= 0x00000002; 1571 1572 evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2); 1573 evo_data(push, 1 << nv_crtc->index); 1574 evo_data(push, syncs); 1575 } else { 1576 u32 magic = 0x31ec6000 | (nv_crtc->index << 25); 1577 u32 syncs = 0x00000001; 1578 1579 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 1580 syncs |= 0x00000008; 1581 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 1582 syncs |= 0x00000010; 1583 1584 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 1585 magic |= 0x00000001; 1586 1587 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2); 1588 evo_data(push, syncs); 1589 evo_data(push, magic); 1590 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1); 1591 evo_data(push, 1 << nv_crtc->index); 1592 } 1593 1594 evo_kick(push, mast); 1595 } 1596 1597 nv_encoder->crtc = encoder->crtc; 1598} 1599 1600static void 1601nv50_dac_disconnect(struct drm_encoder *encoder) 1602{ 1603 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1604 struct nv50_mast *mast = nv50_mast(encoder->dev); 1605 const int or = nv_encoder->or; 1606 u32 *push; 1607 1608 if (nv_encoder->crtc) { 1609 nv50_crtc_prepare(nv_encoder->crtc); 1610 1611 push = evo_wait(mast, 4); 1612 if (push) { 1613 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1614 evo_mthd(push, 0x0400 + (or * 0x080), 1); 1615 evo_data(push, 0x00000000); 1616 } else { 1617 evo_mthd(push, 0x0180 + (or * 0x020), 1); 1618 evo_data(push, 0x00000000); 1619 } 1620 evo_kick(push, mast); 1621 } 1622 } 1623 1624 nv_encoder->crtc = NULL; 1625} 1626 1627static enum drm_connector_status 1628nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector) 1629{ 1630 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1631 struct nv50_disp *disp = nv50_disp(encoder->dev); 1632 struct { 1633 struct nv50_disp_mthd_v1 base; 1634 struct nv50_disp_dac_load_v0 load; 1635 } args = { 1636 .base.version = 1, 1637 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD, 1638 .base.hasht = nv_encoder->dcb->hasht, 1639 .base.hashm = nv_encoder->dcb->hashm, 1640 }; 1641 int ret; 1642 1643 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval; 1644 if (args.load.data == 0) 1645 args.load.data = 340; 1646 1647 ret = nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1648 if (ret || !args.load.load) 1649 return connector_status_disconnected; 1650 1651 return connector_status_connected; 1652} 1653 1654static void 1655nv50_dac_destroy(struct drm_encoder *encoder) 1656{ 1657 drm_encoder_cleanup(encoder); 1658 kfree(encoder); 1659} 1660 1661static const struct drm_encoder_helper_funcs nv50_dac_hfunc = { 1662 .dpms = nv50_dac_dpms, 1663 .mode_fixup = nv50_encoder_mode_fixup, 1664 .prepare = nv50_dac_disconnect, 1665 .commit = nv50_dac_commit, 1666 .mode_set = nv50_dac_mode_set, 1667 .disable = nv50_dac_disconnect, 1668 .get_crtc = nv50_display_crtc_get, 1669 .detect = nv50_dac_detect 1670}; 1671 1672static const struct drm_encoder_funcs nv50_dac_func = { 1673 .destroy = nv50_dac_destroy, 1674}; 1675 1676static int 1677nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe) 1678{ 1679 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1680 struct nvkm_i2c *i2c = nvxx_i2c(&drm->device); 1681 struct nouveau_encoder *nv_encoder; 1682 struct drm_encoder *encoder; 1683 int type = DRM_MODE_ENCODER_DAC; 1684 1685 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 1686 if (!nv_encoder) 1687 return -ENOMEM; 1688 nv_encoder->dcb = dcbe; 1689 nv_encoder->or = ffs(dcbe->or) - 1; 1690 nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index); 1691 1692 encoder = to_drm_encoder(nv_encoder); 1693 encoder->possible_crtcs = dcbe->heads; 1694 encoder->possible_clones = 0; 1695 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type); 1696 drm_encoder_helper_add(encoder, &nv50_dac_hfunc); 1697 1698 drm_mode_connector_attach_encoder(connector, encoder); 1699 return 0; 1700} 1701 1702/****************************************************************************** 1703 * Audio 1704 *****************************************************************************/ 1705static void 1706nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) 1707{ 1708 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1709 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1710 struct nouveau_connector *nv_connector; 1711 struct nv50_disp *disp = nv50_disp(encoder->dev); 1712 struct __packed { 1713 struct { 1714 struct nv50_disp_mthd_v1 mthd; 1715 struct nv50_disp_sor_hda_eld_v0 eld; 1716 } base; 1717 u8 data[sizeof(nv_connector->base.eld)]; 1718 } args = { 1719 .base.mthd.version = 1, 1720 .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, 1721 .base.mthd.hasht = nv_encoder->dcb->hasht, 1722 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 1723 (0x0100 << nv_crtc->index), 1724 }; 1725 1726 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1727 if (!drm_detect_monitor_audio(nv_connector->edid)) 1728 return; 1729 1730 drm_edid_to_eld(&nv_connector->base, nv_connector->edid); 1731 memcpy(args.data, nv_connector->base.eld, sizeof(args.data)); 1732 1733 nvif_mthd(disp->disp, 0, &args, 1734 sizeof(args.base) + drm_eld_size(args.data)); 1735} 1736 1737static void 1738nv50_audio_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) 1739{ 1740 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1741 struct nv50_disp *disp = nv50_disp(encoder->dev); 1742 struct { 1743 struct nv50_disp_mthd_v1 base; 1744 struct nv50_disp_sor_hda_eld_v0 eld; 1745 } args = { 1746 .base.version = 1, 1747 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, 1748 .base.hasht = nv_encoder->dcb->hasht, 1749 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 1750 (0x0100 << nv_crtc->index), 1751 }; 1752 1753 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1754} 1755 1756/****************************************************************************** 1757 * HDMI 1758 *****************************************************************************/ 1759static void 1760nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) 1761{ 1762 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1763 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1764 struct nv50_disp *disp = nv50_disp(encoder->dev); 1765 struct { 1766 struct nv50_disp_mthd_v1 base; 1767 struct nv50_disp_sor_hdmi_pwr_v0 pwr; 1768 } args = { 1769 .base.version = 1, 1770 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, 1771 .base.hasht = nv_encoder->dcb->hasht, 1772 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 1773 (0x0100 << nv_crtc->index), 1774 .pwr.state = 1, 1775 .pwr.rekey = 56, /* binary driver, and tegra, constant */ 1776 }; 1777 struct nouveau_connector *nv_connector; 1778 u32 max_ac_packet; 1779 1780 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1781 if (!drm_detect_hdmi_monitor(nv_connector->edid)) 1782 return; 1783 1784 max_ac_packet = mode->htotal - mode->hdisplay; 1785 max_ac_packet -= args.pwr.rekey; 1786 max_ac_packet -= 18; /* constant from tegra */ 1787 args.pwr.max_ac_packet = max_ac_packet / 32; 1788 1789 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1790 nv50_audio_mode_set(encoder, mode); 1791} 1792 1793static void 1794nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) 1795{ 1796 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1797 struct nv50_disp *disp = nv50_disp(encoder->dev); 1798 struct { 1799 struct nv50_disp_mthd_v1 base; 1800 struct nv50_disp_sor_hdmi_pwr_v0 pwr; 1801 } args = { 1802 .base.version = 1, 1803 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, 1804 .base.hasht = nv_encoder->dcb->hasht, 1805 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 1806 (0x0100 << nv_crtc->index), 1807 }; 1808 1809 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1810} 1811 1812/****************************************************************************** 1813 * SOR 1814 *****************************************************************************/ 1815static void 1816nv50_sor_dpms(struct drm_encoder *encoder, int mode) 1817{ 1818 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1819 struct nv50_disp *disp = nv50_disp(encoder->dev); 1820 struct { 1821 struct nv50_disp_mthd_v1 base; 1822 struct nv50_disp_sor_pwr_v0 pwr; 1823 } args = { 1824 .base.version = 1, 1825 .base.method = NV50_DISP_MTHD_V1_SOR_PWR, 1826 .base.hasht = nv_encoder->dcb->hasht, 1827 .base.hashm = nv_encoder->dcb->hashm, 1828 .pwr.state = mode == DRM_MODE_DPMS_ON, 1829 }; 1830 struct { 1831 struct nv50_disp_mthd_v1 base; 1832 struct nv50_disp_sor_dp_pwr_v0 pwr; 1833 } link = { 1834 .base.version = 1, 1835 .base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR, 1836 .base.hasht = nv_encoder->dcb->hasht, 1837 .base.hashm = nv_encoder->dcb->hashm, 1838 .pwr.state = mode == DRM_MODE_DPMS_ON, 1839 }; 1840 struct drm_device *dev = encoder->dev; 1841 struct drm_encoder *partner; 1842 1843 nv_encoder->last_dpms = mode; 1844 1845 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) { 1846 struct nouveau_encoder *nv_partner = nouveau_encoder(partner); 1847 1848 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS) 1849 continue; 1850 1851 if (nv_partner != nv_encoder && 1852 nv_partner->dcb->or == nv_encoder->dcb->or) { 1853 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON) 1854 return; 1855 break; 1856 } 1857 } 1858 1859 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) { 1860 args.pwr.state = 1; 1861 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1862 nvif_mthd(disp->disp, 0, &link, sizeof(link)); 1863 } else { 1864 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1865 } 1866} 1867 1868static void 1869nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data) 1870{ 1871 struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev); 1872 u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push; 1873 if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) { 1874 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1875 evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1); 1876 evo_data(push, (nv_encoder->ctrl = temp)); 1877 } else { 1878 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1); 1879 evo_data(push, (nv_encoder->ctrl = temp)); 1880 } 1881 evo_kick(push, mast); 1882 } 1883} 1884 1885static void 1886nv50_sor_disconnect(struct drm_encoder *encoder) 1887{ 1888 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1889 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc); 1890 1891 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF; 1892 nv_encoder->crtc = NULL; 1893 1894 if (nv_crtc) { 1895 nv50_crtc_prepare(&nv_crtc->base); 1896 nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0); 1897 nv50_audio_disconnect(encoder, nv_crtc); 1898 nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc); 1899 } 1900} 1901 1902static void 1903nv50_sor_commit(struct drm_encoder *encoder) 1904{ 1905} 1906 1907static void 1908nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode, 1909 struct drm_display_mode *mode) 1910{ 1911 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1912 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1913 struct { 1914 struct nv50_disp_mthd_v1 base; 1915 struct nv50_disp_sor_lvds_script_v0 lvds; 1916 } lvds = { 1917 .base.version = 1, 1918 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT, 1919 .base.hasht = nv_encoder->dcb->hasht, 1920 .base.hashm = nv_encoder->dcb->hashm, 1921 }; 1922 struct nv50_disp *disp = nv50_disp(encoder->dev); 1923 struct nv50_mast *mast = nv50_mast(encoder->dev); 1924 struct drm_device *dev = encoder->dev; 1925 struct nouveau_drm *drm = nouveau_drm(dev); 1926 struct nouveau_connector *nv_connector; 1927 struct nvbios *bios = &drm->vbios; 1928 u32 mask, ctrl; 1929 u8 owner = 1 << nv_crtc->index; 1930 u8 proto = 0xf; 1931 u8 depth = 0x0; 1932 1933 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1934 nv_encoder->crtc = encoder->crtc; 1935 1936 switch (nv_encoder->dcb->type) { 1937 case DCB_OUTPUT_TMDS: 1938 if (nv_encoder->dcb->sorconf.link & 1) { 1939 if (mode->clock < 165000) 1940 proto = 0x1; 1941 else 1942 proto = 0x5; 1943 } else { 1944 proto = 0x2; 1945 } 1946 1947 nv50_hdmi_mode_set(&nv_encoder->base.base, mode); 1948 break; 1949 case DCB_OUTPUT_LVDS: 1950 proto = 0x0; 1951 1952 if (bios->fp_no_ddc) { 1953 if (bios->fp.dual_link) 1954 lvds.lvds.script |= 0x0100; 1955 if (bios->fp.if_is_24bit) 1956 lvds.lvds.script |= 0x0200; 1957 } else { 1958 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) { 1959 if (((u8 *)nv_connector->edid)[121] == 2) 1960 lvds.lvds.script |= 0x0100; 1961 } else 1962 if (mode->clock >= bios->fp.duallink_transition_clk) { 1963 lvds.lvds.script |= 0x0100; 1964 } 1965 1966 if (lvds.lvds.script & 0x0100) { 1967 if (bios->fp.strapless_is_24bit & 2) 1968 lvds.lvds.script |= 0x0200; 1969 } else { 1970 if (bios->fp.strapless_is_24bit & 1) 1971 lvds.lvds.script |= 0x0200; 1972 } 1973 1974 if (nv_connector->base.display_info.bpc == 8) 1975 lvds.lvds.script |= 0x0200; 1976 } 1977 1978 nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds)); 1979 break; 1980 case DCB_OUTPUT_DP: 1981 if (nv_connector->base.display_info.bpc == 6) { 1982 nv_encoder->dp.datarate = mode->clock * 18 / 8; 1983 depth = 0x2; 1984 } else 1985 if (nv_connector->base.display_info.bpc == 8) { 1986 nv_encoder->dp.datarate = mode->clock * 24 / 8; 1987 depth = 0x5; 1988 } else { 1989 nv_encoder->dp.datarate = mode->clock * 30 / 8; 1990 depth = 0x6; 1991 } 1992 1993 if (nv_encoder->dcb->sorconf.link & 1) 1994 proto = 0x8; 1995 else 1996 proto = 0x9; 1997 nv50_audio_mode_set(encoder, mode); 1998 break; 1999 default: 2000 BUG_ON(1); 2001 break; 2002 } 2003 2004 nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON); 2005 2006 if (nv50_vers(mast) >= GF110_DISP) { 2007 u32 *push = evo_wait(mast, 3); 2008 if (push) { 2009 u32 magic = 0x31ec6000 | (nv_crtc->index << 25); 2010 u32 syncs = 0x00000001; 2011 2012 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 2013 syncs |= 0x00000008; 2014 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 2015 syncs |= 0x00000010; 2016 2017 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 2018 magic |= 0x00000001; 2019 2020 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2); 2021 evo_data(push, syncs | (depth << 6)); 2022 evo_data(push, magic); 2023 evo_kick(push, mast); 2024 } 2025 2026 ctrl = proto << 8; 2027 mask = 0x00000f00; 2028 } else { 2029 ctrl = (depth << 16) | (proto << 8); 2030 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 2031 ctrl |= 0x00001000; 2032 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 2033 ctrl |= 0x00002000; 2034 mask = 0x000f3f00; 2035 } 2036 2037 nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner); 2038} 2039 2040static void 2041nv50_sor_destroy(struct drm_encoder *encoder) 2042{ 2043 drm_encoder_cleanup(encoder); 2044 kfree(encoder); 2045} 2046 2047static const struct drm_encoder_helper_funcs nv50_sor_hfunc = { 2048 .dpms = nv50_sor_dpms, 2049 .mode_fixup = nv50_encoder_mode_fixup, 2050 .prepare = nv50_sor_disconnect, 2051 .commit = nv50_sor_commit, 2052 .mode_set = nv50_sor_mode_set, 2053 .disable = nv50_sor_disconnect, 2054 .get_crtc = nv50_display_crtc_get, 2055}; 2056 2057static const struct drm_encoder_funcs nv50_sor_func = { 2058 .destroy = nv50_sor_destroy, 2059}; 2060 2061static int 2062nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) 2063{ 2064 struct nouveau_drm *drm = nouveau_drm(connector->dev); 2065 struct nvkm_i2c *i2c = nvxx_i2c(&drm->device); 2066 struct nouveau_encoder *nv_encoder; 2067 struct drm_encoder *encoder; 2068 int type; 2069 2070 switch (dcbe->type) { 2071 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break; 2072 case DCB_OUTPUT_TMDS: 2073 case DCB_OUTPUT_DP: 2074 default: 2075 type = DRM_MODE_ENCODER_TMDS; 2076 break; 2077 } 2078 2079 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 2080 if (!nv_encoder) 2081 return -ENOMEM; 2082 nv_encoder->dcb = dcbe; 2083 nv_encoder->or = ffs(dcbe->or) - 1; 2084 nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index); 2085 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF; 2086 2087 encoder = to_drm_encoder(nv_encoder); 2088 encoder->possible_crtcs = dcbe->heads; 2089 encoder->possible_clones = 0; 2090 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type); 2091 drm_encoder_helper_add(encoder, &nv50_sor_hfunc); 2092 2093 drm_mode_connector_attach_encoder(connector, encoder); 2094 return 0; 2095} 2096 2097/****************************************************************************** 2098 * PIOR 2099 *****************************************************************************/ 2100 2101static void 2102nv50_pior_dpms(struct drm_encoder *encoder, int mode) 2103{ 2104 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2105 struct nv50_disp *disp = nv50_disp(encoder->dev); 2106 struct { 2107 struct nv50_disp_mthd_v1 base; 2108 struct nv50_disp_pior_pwr_v0 pwr; 2109 } args = { 2110 .base.version = 1, 2111 .base.method = NV50_DISP_MTHD_V1_PIOR_PWR, 2112 .base.hasht = nv_encoder->dcb->hasht, 2113 .base.hashm = nv_encoder->dcb->hashm, 2114 .pwr.state = mode == DRM_MODE_DPMS_ON, 2115 .pwr.type = nv_encoder->dcb->type, 2116 }; 2117 2118 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 2119} 2120 2121static bool 2122nv50_pior_mode_fixup(struct drm_encoder *encoder, 2123 const struct drm_display_mode *mode, 2124 struct drm_display_mode *adjusted_mode) 2125{ 2126 if (!nv50_encoder_mode_fixup(encoder, mode, adjusted_mode)) 2127 return false; 2128 adjusted_mode->clock *= 2; 2129 return true; 2130} 2131 2132static void 2133nv50_pior_commit(struct drm_encoder *encoder) 2134{ 2135} 2136 2137static void 2138nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 2139 struct drm_display_mode *adjusted_mode) 2140{ 2141 struct nv50_mast *mast = nv50_mast(encoder->dev); 2142 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2143 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 2144 struct nouveau_connector *nv_connector; 2145 u8 owner = 1 << nv_crtc->index; 2146 u8 proto, depth; 2147 u32 *push; 2148 2149 nv_connector = nouveau_encoder_connector_get(nv_encoder); 2150 switch (nv_connector->base.display_info.bpc) { 2151 case 10: depth = 0x6; break; 2152 case 8: depth = 0x5; break; 2153 case 6: depth = 0x2; break; 2154 default: depth = 0x0; break; 2155 } 2156 2157 switch (nv_encoder->dcb->type) { 2158 case DCB_OUTPUT_TMDS: 2159 case DCB_OUTPUT_DP: 2160 proto = 0x0; 2161 break; 2162 default: 2163 BUG_ON(1); 2164 break; 2165 } 2166 2167 nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON); 2168 2169 push = evo_wait(mast, 8); 2170 if (push) { 2171 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 2172 u32 ctrl = (depth << 16) | (proto << 8) | owner; 2173 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 2174 ctrl |= 0x00001000; 2175 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 2176 ctrl |= 0x00002000; 2177 evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1); 2178 evo_data(push, ctrl); 2179 } 2180 2181 evo_kick(push, mast); 2182 } 2183 2184 nv_encoder->crtc = encoder->crtc; 2185} 2186 2187static void 2188nv50_pior_disconnect(struct drm_encoder *encoder) 2189{ 2190 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2191 struct nv50_mast *mast = nv50_mast(encoder->dev); 2192 const int or = nv_encoder->or; 2193 u32 *push; 2194 2195 if (nv_encoder->crtc) { 2196 nv50_crtc_prepare(nv_encoder->crtc); 2197 2198 push = evo_wait(mast, 4); 2199 if (push) { 2200 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 2201 evo_mthd(push, 0x0700 + (or * 0x040), 1); 2202 evo_data(push, 0x00000000); 2203 } 2204 evo_kick(push, mast); 2205 } 2206 } 2207 2208 nv_encoder->crtc = NULL; 2209} 2210 2211static void 2212nv50_pior_destroy(struct drm_encoder *encoder) 2213{ 2214 drm_encoder_cleanup(encoder); 2215 kfree(encoder); 2216} 2217 2218static const struct drm_encoder_helper_funcs nv50_pior_hfunc = { 2219 .dpms = nv50_pior_dpms, 2220 .mode_fixup = nv50_pior_mode_fixup, 2221 .prepare = nv50_pior_disconnect, 2222 .commit = nv50_pior_commit, 2223 .mode_set = nv50_pior_mode_set, 2224 .disable = nv50_pior_disconnect, 2225 .get_crtc = nv50_display_crtc_get, 2226}; 2227 2228static const struct drm_encoder_funcs nv50_pior_func = { 2229 .destroy = nv50_pior_destroy, 2230}; 2231 2232static int 2233nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe) 2234{ 2235 struct nouveau_drm *drm = nouveau_drm(connector->dev); 2236 struct nvkm_i2c *i2c = nvxx_i2c(&drm->device); 2237 struct nvkm_i2c_port *ddc = NULL; 2238 struct nouveau_encoder *nv_encoder; 2239 struct drm_encoder *encoder; 2240 int type; 2241 2242 switch (dcbe->type) { 2243 case DCB_OUTPUT_TMDS: 2244 ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTDDC(dcbe->extdev)); 2245 type = DRM_MODE_ENCODER_TMDS; 2246 break; 2247 case DCB_OUTPUT_DP: 2248 ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTAUX(dcbe->extdev)); 2249 type = DRM_MODE_ENCODER_TMDS; 2250 break; 2251 default: 2252 return -ENODEV; 2253 } 2254 2255 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 2256 if (!nv_encoder) 2257 return -ENOMEM; 2258 nv_encoder->dcb = dcbe; 2259 nv_encoder->or = ffs(dcbe->or) - 1; 2260 nv_encoder->i2c = ddc; 2261 2262 encoder = to_drm_encoder(nv_encoder); 2263 encoder->possible_crtcs = dcbe->heads; 2264 encoder->possible_clones = 0; 2265 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type); 2266 drm_encoder_helper_add(encoder, &nv50_pior_hfunc); 2267 2268 drm_mode_connector_attach_encoder(connector, encoder); 2269 return 0; 2270} 2271 2272/****************************************************************************** 2273 * Framebuffer 2274 *****************************************************************************/ 2275 2276static void 2277nv50_fbdma_fini(struct nv50_fbdma *fbdma) 2278{ 2279 int i; 2280 for (i = 0; i < ARRAY_SIZE(fbdma->base); i++) 2281 nvif_object_fini(&fbdma->base[i]); 2282 nvif_object_fini(&fbdma->core); 2283 list_del(&fbdma->head); 2284 kfree(fbdma); 2285} 2286 2287static int 2288nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind) 2289{ 2290 struct nouveau_drm *drm = nouveau_drm(dev); 2291 struct nv50_disp *disp = nv50_disp(dev); 2292 struct nv50_mast *mast = nv50_mast(dev); 2293 struct __attribute__ ((packed)) { 2294 struct nv_dma_v0 base; 2295 union { 2296 struct nv50_dma_v0 nv50; 2297 struct gf100_dma_v0 gf100; 2298 struct gf110_dma_v0 gf110; 2299 }; 2300 } args = {}; 2301 struct nv50_fbdma *fbdma; 2302 struct drm_crtc *crtc; 2303 u32 size = sizeof(args.base); 2304 int ret; 2305 2306 list_for_each_entry(fbdma, &disp->fbdma, head) { 2307 if (fbdma->core.handle == name) 2308 return 0; 2309 } 2310 2311 fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL); 2312 if (!fbdma) 2313 return -ENOMEM; 2314 list_add(&fbdma->head, &disp->fbdma); 2315 2316 args.base.target = NV_DMA_V0_TARGET_VRAM; 2317 args.base.access = NV_DMA_V0_ACCESS_RDWR; 2318 args.base.start = offset; 2319 args.base.limit = offset + length - 1; 2320 2321 if (drm->device.info.chipset < 0x80) { 2322 args.nv50.part = NV50_DMA_V0_PART_256; 2323 size += sizeof(args.nv50); 2324 } else 2325 if (drm->device.info.chipset < 0xc0) { 2326 args.nv50.part = NV50_DMA_V0_PART_256; 2327 args.nv50.kind = kind; 2328 size += sizeof(args.nv50); 2329 } else 2330 if (drm->device.info.chipset < 0xd0) { 2331 args.gf100.kind = kind; 2332 size += sizeof(args.gf100); 2333 } else { 2334 args.gf110.page = GF110_DMA_V0_PAGE_LP; 2335 args.gf110.kind = kind; 2336 size += sizeof(args.gf110); 2337 } 2338 2339 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 2340 struct nv50_head *head = nv50_head(crtc); 2341 int ret = nvif_object_init(&head->sync.base.base.user, NULL, 2342 name, NV_DMA_IN_MEMORY, &args, size, 2343 &fbdma->base[head->base.index]); 2344 if (ret) { 2345 nv50_fbdma_fini(fbdma); 2346 return ret; 2347 } 2348 } 2349 2350 ret = nvif_object_init(&mast->base.base.user, NULL, name, 2351 NV_DMA_IN_MEMORY, &args, size, 2352 &fbdma->core); 2353 if (ret) { 2354 nv50_fbdma_fini(fbdma); 2355 return ret; 2356 } 2357 2358 return 0; 2359} 2360 2361static void 2362nv50_fb_dtor(struct drm_framebuffer *fb) 2363{ 2364} 2365 2366static int 2367nv50_fb_ctor(struct drm_framebuffer *fb) 2368{ 2369 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb); 2370 struct nouveau_drm *drm = nouveau_drm(fb->dev); 2371 struct nouveau_bo *nvbo = nv_fb->nvbo; 2372 struct nv50_disp *disp = nv50_disp(fb->dev); 2373 u8 kind = nouveau_bo_tile_layout(nvbo) >> 8; 2374 u8 tile = nvbo->tile_mode; 2375 2376 if (drm->device.info.chipset >= 0xc0) 2377 tile >>= 4; /* yep.. */ 2378 2379 switch (fb->depth) { 2380 case 8: nv_fb->r_format = 0x1e00; break; 2381 case 15: nv_fb->r_format = 0xe900; break; 2382 case 16: nv_fb->r_format = 0xe800; break; 2383 case 24: 2384 case 32: nv_fb->r_format = 0xcf00; break; 2385 case 30: nv_fb->r_format = 0xd100; break; 2386 default: 2387 NV_ERROR(drm, "unknown depth %d\n", fb->depth); 2388 return -EINVAL; 2389 } 2390 2391 if (disp->disp->oclass < G82_DISP) { 2392 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : 2393 (fb->pitches[0] | 0x00100000); 2394 nv_fb->r_format |= kind << 16; 2395 } else 2396 if (disp->disp->oclass < GF110_DISP) { 2397 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : 2398 (fb->pitches[0] | 0x00100000); 2399 } else { 2400 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : 2401 (fb->pitches[0] | 0x01000000); 2402 } 2403 nv_fb->r_handle = 0xffff0000 | kind; 2404 2405 return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0, 2406 drm->device.info.ram_user, kind); 2407} 2408 2409/****************************************************************************** 2410 * Init 2411 *****************************************************************************/ 2412 2413void 2414nv50_display_fini(struct drm_device *dev) 2415{ 2416} 2417 2418int 2419nv50_display_init(struct drm_device *dev) 2420{ 2421 struct nv50_disp *disp = nv50_disp(dev); 2422 struct drm_crtc *crtc; 2423 u32 *push; 2424 2425 push = evo_wait(nv50_mast(dev), 32); 2426 if (!push) 2427 return -EBUSY; 2428 2429 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 2430 struct nv50_sync *sync = nv50_sync(crtc); 2431 2432 nv50_crtc_lut_load(crtc); 2433 nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data); 2434 } 2435 2436 evo_mthd(push, 0x0088, 1); 2437 evo_data(push, nv50_mast(dev)->base.sync.handle); 2438 evo_kick(push, nv50_mast(dev)); 2439 return 0; 2440} 2441 2442void 2443nv50_display_destroy(struct drm_device *dev) 2444{ 2445 struct nv50_disp *disp = nv50_disp(dev); 2446 struct nv50_fbdma *fbdma, *fbtmp; 2447 2448 list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) { 2449 nv50_fbdma_fini(fbdma); 2450 } 2451 2452 nv50_dmac_destroy(&disp->mast.base, disp->disp); 2453 2454 nouveau_bo_unmap(disp->sync); 2455 if (disp->sync) 2456 nouveau_bo_unpin(disp->sync); 2457 nouveau_bo_ref(NULL, &disp->sync); 2458 2459 nouveau_display(dev)->priv = NULL; 2460 kfree(disp); 2461} 2462 2463int 2464nv50_display_create(struct drm_device *dev) 2465{ 2466 struct nvif_device *device = &nouveau_drm(dev)->device; 2467 struct nouveau_drm *drm = nouveau_drm(dev); 2468 struct dcb_table *dcb = &drm->vbios.dcb; 2469 struct drm_connector *connector, *tmp; 2470 struct nv50_disp *disp; 2471 struct dcb_output *dcbe; 2472 int crtcs, ret, i; 2473 2474 disp = kzalloc(sizeof(*disp), GFP_KERNEL); 2475 if (!disp) 2476 return -ENOMEM; 2477 INIT_LIST_HEAD(&disp->fbdma); 2478 2479 nouveau_display(dev)->priv = disp; 2480 nouveau_display(dev)->dtor = nv50_display_destroy; 2481 nouveau_display(dev)->init = nv50_display_init; 2482 nouveau_display(dev)->fini = nv50_display_fini; 2483 nouveau_display(dev)->fb_ctor = nv50_fb_ctor; 2484 nouveau_display(dev)->fb_dtor = nv50_fb_dtor; 2485 disp->disp = &nouveau_display(dev)->disp; 2486 2487 /* small shared memory area we use for notifiers and semaphores */ 2488 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM, 2489 0, 0x0000, NULL, NULL, &disp->sync); 2490 if (!ret) { 2491 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true); 2492 if (!ret) { 2493 ret = nouveau_bo_map(disp->sync); 2494 if (ret) 2495 nouveau_bo_unpin(disp->sync); 2496 } 2497 if (ret) 2498 nouveau_bo_ref(NULL, &disp->sync); 2499 } 2500 2501 if (ret) 2502 goto out; 2503 2504 /* allocate master evo channel */ 2505 ret = nv50_core_create(disp->disp, disp->sync->bo.offset, 2506 &disp->mast); 2507 if (ret) 2508 goto out; 2509 2510 /* create crtc objects to represent the hw heads */ 2511 if (disp->disp->oclass >= GF110_DISP) 2512 crtcs = nvif_rd32(device, 0x022448); 2513 else 2514 crtcs = 2; 2515 2516 for (i = 0; i < crtcs; i++) { 2517 ret = nv50_crtc_create(dev, i); 2518 if (ret) 2519 goto out; 2520 } 2521 2522 /* create encoder/connector objects based on VBIOS DCB table */ 2523 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) { 2524 connector = nouveau_connector_create(dev, dcbe->connector); 2525 if (IS_ERR(connector)) 2526 continue; 2527 2528 if (dcbe->location == DCB_LOC_ON_CHIP) { 2529 switch (dcbe->type) { 2530 case DCB_OUTPUT_TMDS: 2531 case DCB_OUTPUT_LVDS: 2532 case DCB_OUTPUT_DP: 2533 ret = nv50_sor_create(connector, dcbe); 2534 break; 2535 case DCB_OUTPUT_ANALOG: 2536 ret = nv50_dac_create(connector, dcbe); 2537 break; 2538 default: 2539 ret = -ENODEV; 2540 break; 2541 } 2542 } else { 2543 ret = nv50_pior_create(connector, dcbe); 2544 } 2545 2546 if (ret) { 2547 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n", 2548 dcbe->location, dcbe->type, 2549 ffs(dcbe->or) - 1, ret); 2550 ret = 0; 2551 } 2552 } 2553 2554 /* cull any connectors we created that don't have an encoder */ 2555 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) { 2556 if (connector->encoder_ids[0]) 2557 continue; 2558 2559 NV_WARN(drm, "%s has no encoders, removing\n", 2560 connector->name); 2561 connector->funcs->destroy(connector); 2562 } 2563 2564out: 2565 if (ret) 2566 nv50_display_destroy(dev); 2567 return ret; 2568} 2569