1/* 2 * Copyright 2011 Red Hat Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: Ben Skeggs 23 */ 24 25#include <linux/dma-mapping.h> 26 27#include <drm/drmP.h> 28#include <drm/drm_crtc_helper.h> 29#include <drm/drm_dp_helper.h> 30 31#include <nvif/class.h> 32 33#include "nouveau_drm.h" 34#include "nouveau_dma.h" 35#include "nouveau_gem.h" 36#include "nouveau_connector.h" 37#include "nouveau_encoder.h" 38#include "nouveau_crtc.h" 39#include "nouveau_fence.h" 40#include "nv50_display.h" 41 42#define EVO_DMA_NR 9 43 44#define EVO_MASTER (0x00) 45#define EVO_FLIP(c) (0x01 + (c)) 46#define EVO_OVLY(c) (0x05 + (c)) 47#define EVO_OIMM(c) (0x09 + (c)) 48#define EVO_CURS(c) (0x0d + (c)) 49 50/* offsets in shared sync bo of various structures */ 51#define EVO_SYNC(c, o) ((c) * 0x0100 + (o)) 52#define EVO_MAST_NTFY EVO_SYNC( 0, 0x00) 53#define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00) 54#define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10) 55 56/****************************************************************************** 57 * EVO channel 58 *****************************************************************************/ 59 60struct nv50_chan { 61 struct nvif_object user; 62}; 63 64static int 65nv50_chan_create(struct nvif_object *disp, const u32 *oclass, u8 head, 66 void *data, u32 size, struct nv50_chan *chan) 67{ 68 while (oclass[0]) { 69 int ret = nvif_object_init(disp, NULL, (oclass[0] << 16) | head, 70 oclass[0], data, size, 71 &chan->user); 72 if (oclass++, ret == 0) { 73 nvif_object_map(&chan->user); 74 return ret; 75 } 76 } 77 return -ENOSYS; 78} 79 80static void 81nv50_chan_destroy(struct nv50_chan *chan) 82{ 83 nvif_object_fini(&chan->user); 84} 85 86/****************************************************************************** 87 * PIO EVO channel 88 *****************************************************************************/ 89 90struct nv50_pioc { 91 struct nv50_chan base; 92}; 93 94static void 95nv50_pioc_destroy(struct nv50_pioc *pioc) 96{ 97 nv50_chan_destroy(&pioc->base); 98} 99 100static int 101nv50_pioc_create(struct nvif_object *disp, const u32 *oclass, u8 head, 102 void *data, u32 size, struct nv50_pioc *pioc) 103{ 104 return nv50_chan_create(disp, oclass, head, data, size, &pioc->base); 105} 106 107/****************************************************************************** 108 * Cursor Immediate 109 *****************************************************************************/ 110 111struct nv50_curs { 112 struct nv50_pioc base; 113}; 114 115static int 116nv50_curs_create(struct nvif_object *disp, int head, struct nv50_curs *curs) 117{ 118 struct nv50_disp_cursor_v0 args = { 119 .head = head, 120 }; 121 static const u32 oclass[] = { 122 GK104_DISP_CURSOR, 123 GF110_DISP_CURSOR, 124 GT214_DISP_CURSOR, 125 G82_DISP_CURSOR, 126 NV50_DISP_CURSOR, 127 0 128 }; 129 130 return nv50_pioc_create(disp, oclass, head, &args, sizeof(args), 131 &curs->base); 132} 133 134/****************************************************************************** 135 * Overlay Immediate 136 *****************************************************************************/ 137 138struct nv50_oimm { 139 struct nv50_pioc base; 140}; 141 142static int 143nv50_oimm_create(struct nvif_object *disp, int head, struct nv50_oimm *oimm) 144{ 145 struct nv50_disp_cursor_v0 args = { 146 .head = head, 147 }; 148 static const u32 oclass[] = { 149 GK104_DISP_OVERLAY, 150 GF110_DISP_OVERLAY, 151 GT214_DISP_OVERLAY, 152 G82_DISP_OVERLAY, 153 NV50_DISP_OVERLAY, 154 0 155 }; 156 157 return nv50_pioc_create(disp, oclass, head, &args, sizeof(args), 158 &oimm->base); 159} 160 161/****************************************************************************** 162 * DMA EVO channel 163 *****************************************************************************/ 164 165struct nv50_dmac { 166 struct nv50_chan base; 167 dma_addr_t handle; 168 u32 *ptr; 169 170 struct nvif_object sync; 171 struct nvif_object vram; 172 173 /* Protects against concurrent pushbuf access to this channel, lock is 174 * grabbed by evo_wait (if the pushbuf reservation is successful) and 175 * dropped again by evo_kick. */ 176 struct mutex lock; 177}; 178 179static void 180nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp) 181{ 182 nvif_object_fini(&dmac->vram); 183 nvif_object_fini(&dmac->sync); 184 185 nv50_chan_destroy(&dmac->base); 186 187 if (dmac->ptr) { 188 struct pci_dev *pdev = nvkm_device(nvif_device(disp))->pdev; 189 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle); 190 } 191} 192 193static int 194nv50_dmac_create(struct nvif_object *disp, const u32 *oclass, u8 head, 195 void *data, u32 size, u64 syncbuf, 196 struct nv50_dmac *dmac) 197{ 198 struct nvif_device *device = nvif_device(disp); 199 struct nv50_disp_core_channel_dma_v0 *args = data; 200 struct nvif_object pushbuf; 201 int ret; 202 203 mutex_init(&dmac->lock); 204 205 dmac->ptr = pci_alloc_consistent(nvkm_device(device)->pdev, 206 PAGE_SIZE, &dmac->handle); 207 if (!dmac->ptr) 208 return -ENOMEM; 209 210 ret = nvif_object_init(nvif_object(device), NULL, 211 args->pushbuf, NV_DMA_FROM_MEMORY, 212 &(struct nv_dma_v0) { 213 .target = NV_DMA_V0_TARGET_PCI_US, 214 .access = NV_DMA_V0_ACCESS_RD, 215 .start = dmac->handle + 0x0000, 216 .limit = dmac->handle + 0x0fff, 217 }, sizeof(struct nv_dma_v0), &pushbuf); 218 if (ret) 219 return ret; 220 221 ret = nv50_chan_create(disp, oclass, head, data, size, &dmac->base); 222 nvif_object_fini(&pushbuf); 223 if (ret) 224 return ret; 225 226 ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000000, 227 NV_DMA_IN_MEMORY, 228 &(struct nv_dma_v0) { 229 .target = NV_DMA_V0_TARGET_VRAM, 230 .access = NV_DMA_V0_ACCESS_RDWR, 231 .start = syncbuf + 0x0000, 232 .limit = syncbuf + 0x0fff, 233 }, sizeof(struct nv_dma_v0), 234 &dmac->sync); 235 if (ret) 236 return ret; 237 238 ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000001, 239 NV_DMA_IN_MEMORY, 240 &(struct nv_dma_v0) { 241 .target = NV_DMA_V0_TARGET_VRAM, 242 .access = NV_DMA_V0_ACCESS_RDWR, 243 .start = 0, 244 .limit = device->info.ram_user - 1, 245 }, sizeof(struct nv_dma_v0), 246 &dmac->vram); 247 if (ret) 248 return ret; 249 250 return ret; 251} 252 253/****************************************************************************** 254 * Core 255 *****************************************************************************/ 256 257struct nv50_mast { 258 struct nv50_dmac base; 259}; 260 261static int 262nv50_core_create(struct nvif_object *disp, u64 syncbuf, struct nv50_mast *core) 263{ 264 struct nv50_disp_core_channel_dma_v0 args = { 265 .pushbuf = 0xb0007d00, 266 }; 267 static const u32 oclass[] = { 268 GM107_DISP_CORE_CHANNEL_DMA, 269 GK110_DISP_CORE_CHANNEL_DMA, 270 GK104_DISP_CORE_CHANNEL_DMA, 271 GF110_DISP_CORE_CHANNEL_DMA, 272 GT214_DISP_CORE_CHANNEL_DMA, 273 GT206_DISP_CORE_CHANNEL_DMA, 274 GT200_DISP_CORE_CHANNEL_DMA, 275 G82_DISP_CORE_CHANNEL_DMA, 276 NV50_DISP_CORE_CHANNEL_DMA, 277 0 278 }; 279 280 return nv50_dmac_create(disp, oclass, 0, &args, sizeof(args), syncbuf, 281 &core->base); 282} 283 284/****************************************************************************** 285 * Base 286 *****************************************************************************/ 287 288struct nv50_sync { 289 struct nv50_dmac base; 290 u32 addr; 291 u32 data; 292}; 293 294static int 295nv50_base_create(struct nvif_object *disp, int head, u64 syncbuf, 296 struct nv50_sync *base) 297{ 298 struct nv50_disp_base_channel_dma_v0 args = { 299 .pushbuf = 0xb0007c00 | head, 300 .head = head, 301 }; 302 static const u32 oclass[] = { 303 GK110_DISP_BASE_CHANNEL_DMA, 304 GK104_DISP_BASE_CHANNEL_DMA, 305 GF110_DISP_BASE_CHANNEL_DMA, 306 GT214_DISP_BASE_CHANNEL_DMA, 307 GT200_DISP_BASE_CHANNEL_DMA, 308 G82_DISP_BASE_CHANNEL_DMA, 309 NV50_DISP_BASE_CHANNEL_DMA, 310 0 311 }; 312 313 return nv50_dmac_create(disp, oclass, head, &args, sizeof(args), 314 syncbuf, &base->base); 315} 316 317/****************************************************************************** 318 * Overlay 319 *****************************************************************************/ 320 321struct nv50_ovly { 322 struct nv50_dmac base; 323}; 324 325static int 326nv50_ovly_create(struct nvif_object *disp, int head, u64 syncbuf, 327 struct nv50_ovly *ovly) 328{ 329 struct nv50_disp_overlay_channel_dma_v0 args = { 330 .pushbuf = 0xb0007e00 | head, 331 .head = head, 332 }; 333 static const u32 oclass[] = { 334 GK104_DISP_OVERLAY_CONTROL_DMA, 335 GF110_DISP_OVERLAY_CONTROL_DMA, 336 GT214_DISP_OVERLAY_CHANNEL_DMA, 337 GT200_DISP_OVERLAY_CHANNEL_DMA, 338 G82_DISP_OVERLAY_CHANNEL_DMA, 339 NV50_DISP_OVERLAY_CHANNEL_DMA, 340 0 341 }; 342 343 return nv50_dmac_create(disp, oclass, head, &args, sizeof(args), 344 syncbuf, &ovly->base); 345} 346 347struct nv50_head { 348 struct nouveau_crtc base; 349 struct nouveau_bo *image; 350 struct nv50_curs curs; 351 struct nv50_sync sync; 352 struct nv50_ovly ovly; 353 struct nv50_oimm oimm; 354}; 355 356#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c)) 357#define nv50_curs(c) (&nv50_head(c)->curs) 358#define nv50_sync(c) (&nv50_head(c)->sync) 359#define nv50_ovly(c) (&nv50_head(c)->ovly) 360#define nv50_oimm(c) (&nv50_head(c)->oimm) 361#define nv50_chan(c) (&(c)->base.base) 362#define nv50_vers(c) nv50_chan(c)->user.oclass 363 364struct nv50_fbdma { 365 struct list_head head; 366 struct nvif_object core; 367 struct nvif_object base[4]; 368}; 369 370struct nv50_disp { 371 struct nvif_object *disp; 372 struct nv50_mast mast; 373 374 struct list_head fbdma; 375 376 struct nouveau_bo *sync; 377}; 378 379static struct nv50_disp * 380nv50_disp(struct drm_device *dev) 381{ 382 return nouveau_display(dev)->priv; 383} 384 385#define nv50_mast(d) (&nv50_disp(d)->mast) 386 387static struct drm_crtc * 388nv50_display_crtc_get(struct drm_encoder *encoder) 389{ 390 return nouveau_encoder(encoder)->crtc; 391} 392 393/****************************************************************************** 394 * EVO channel helpers 395 *****************************************************************************/ 396static u32 * 397evo_wait(void *evoc, int nr) 398{ 399 struct nv50_dmac *dmac = evoc; 400 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4; 401 402 mutex_lock(&dmac->lock); 403 if (put + nr >= (PAGE_SIZE / 4) - 8) { 404 dmac->ptr[put] = 0x20000000; 405 406 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000); 407 if (!nvkm_wait(&dmac->base.user, 0x0004, ~0, 0x00000000)) { 408 mutex_unlock(&dmac->lock); 409 nv_error(nvkm_object(&dmac->base.user), "channel stalled\n"); 410 return NULL; 411 } 412 413 put = 0; 414 } 415 416 return dmac->ptr + put; 417} 418 419static void 420evo_kick(u32 *push, void *evoc) 421{ 422 struct nv50_dmac *dmac = evoc; 423 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2); 424 mutex_unlock(&dmac->lock); 425} 426 427#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m)) 428#define evo_data(p,d) *((p)++) = (d) 429 430static bool 431evo_sync_wait(void *data) 432{ 433 if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000) 434 return true; 435 usleep_range(1, 2); 436 return false; 437} 438 439static int 440evo_sync(struct drm_device *dev) 441{ 442 struct nvif_device *device = &nouveau_drm(dev)->device; 443 struct nv50_disp *disp = nv50_disp(dev); 444 struct nv50_mast *mast = nv50_mast(dev); 445 u32 *push = evo_wait(mast, 8); 446 if (push) { 447 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000); 448 evo_mthd(push, 0x0084, 1); 449 evo_data(push, 0x80000000 | EVO_MAST_NTFY); 450 evo_mthd(push, 0x0080, 2); 451 evo_data(push, 0x00000000); 452 evo_data(push, 0x00000000); 453 evo_kick(push, mast); 454 if (nv_wait_cb(nvkm_device(device), evo_sync_wait, disp->sync)) 455 return 0; 456 } 457 458 return -EBUSY; 459} 460 461/****************************************************************************** 462 * Page flipping channel 463 *****************************************************************************/ 464struct nouveau_bo * 465nv50_display_crtc_sema(struct drm_device *dev, int crtc) 466{ 467 return nv50_disp(dev)->sync; 468} 469 470struct nv50_display_flip { 471 struct nv50_disp *disp; 472 struct nv50_sync *chan; 473}; 474 475static bool 476nv50_display_flip_wait(void *data) 477{ 478 struct nv50_display_flip *flip = data; 479 if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) == 480 flip->chan->data) 481 return true; 482 usleep_range(1, 2); 483 return false; 484} 485 486void 487nv50_display_flip_stop(struct drm_crtc *crtc) 488{ 489 struct nvif_device *device = &nouveau_drm(crtc->dev)->device; 490 struct nv50_display_flip flip = { 491 .disp = nv50_disp(crtc->dev), 492 .chan = nv50_sync(crtc), 493 }; 494 u32 *push; 495 496 push = evo_wait(flip.chan, 8); 497 if (push) { 498 evo_mthd(push, 0x0084, 1); 499 evo_data(push, 0x00000000); 500 evo_mthd(push, 0x0094, 1); 501 evo_data(push, 0x00000000); 502 evo_mthd(push, 0x00c0, 1); 503 evo_data(push, 0x00000000); 504 evo_mthd(push, 0x0080, 1); 505 evo_data(push, 0x00000000); 506 evo_kick(push, flip.chan); 507 } 508 509 nv_wait_cb(nvkm_device(device), nv50_display_flip_wait, &flip); 510} 511 512int 513nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb, 514 struct nouveau_channel *chan, u32 swap_interval) 515{ 516 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb); 517 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 518 struct nv50_head *head = nv50_head(crtc); 519 struct nv50_sync *sync = nv50_sync(crtc); 520 u32 *push; 521 int ret; 522 523 swap_interval <<= 4; 524 if (swap_interval == 0) 525 swap_interval |= 0x100; 526 if (chan == NULL) 527 evo_sync(crtc->dev); 528 529 push = evo_wait(sync, 128); 530 if (unlikely(push == NULL)) 531 return -EBUSY; 532 533 if (chan && chan->object->oclass < G82_CHANNEL_GPFIFO) { 534 ret = RING_SPACE(chan, 8); 535 if (ret) 536 return ret; 537 538 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2); 539 OUT_RING (chan, NvEvoSema0 + nv_crtc->index); 540 OUT_RING (chan, sync->addr ^ 0x10); 541 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1); 542 OUT_RING (chan, sync->data + 1); 543 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2); 544 OUT_RING (chan, sync->addr); 545 OUT_RING (chan, sync->data); 546 } else 547 if (chan && chan->object->oclass < FERMI_CHANNEL_GPFIFO) { 548 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr; 549 ret = RING_SPACE(chan, 12); 550 if (ret) 551 return ret; 552 553 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1); 554 OUT_RING (chan, chan->vram.handle); 555 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 556 OUT_RING (chan, upper_32_bits(addr ^ 0x10)); 557 OUT_RING (chan, lower_32_bits(addr ^ 0x10)); 558 OUT_RING (chan, sync->data + 1); 559 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG); 560 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 561 OUT_RING (chan, upper_32_bits(addr)); 562 OUT_RING (chan, lower_32_bits(addr)); 563 OUT_RING (chan, sync->data); 564 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL); 565 } else 566 if (chan) { 567 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr; 568 ret = RING_SPACE(chan, 10); 569 if (ret) 570 return ret; 571 572 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 573 OUT_RING (chan, upper_32_bits(addr ^ 0x10)); 574 OUT_RING (chan, lower_32_bits(addr ^ 0x10)); 575 OUT_RING (chan, sync->data + 1); 576 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG | 577 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD); 578 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 579 OUT_RING (chan, upper_32_bits(addr)); 580 OUT_RING (chan, lower_32_bits(addr)); 581 OUT_RING (chan, sync->data); 582 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL | 583 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD); 584 } 585 586 if (chan) { 587 sync->addr ^= 0x10; 588 sync->data++; 589 FIRE_RING (chan); 590 } 591 592 /* queue the flip */ 593 evo_mthd(push, 0x0100, 1); 594 evo_data(push, 0xfffe0000); 595 evo_mthd(push, 0x0084, 1); 596 evo_data(push, swap_interval); 597 if (!(swap_interval & 0x00000100)) { 598 evo_mthd(push, 0x00e0, 1); 599 evo_data(push, 0x40000000); 600 } 601 evo_mthd(push, 0x0088, 4); 602 evo_data(push, sync->addr); 603 evo_data(push, sync->data++); 604 evo_data(push, sync->data); 605 evo_data(push, sync->base.sync.handle); 606 evo_mthd(push, 0x00a0, 2); 607 evo_data(push, 0x00000000); 608 evo_data(push, 0x00000000); 609 evo_mthd(push, 0x00c0, 1); 610 evo_data(push, nv_fb->r_handle); 611 evo_mthd(push, 0x0110, 2); 612 evo_data(push, 0x00000000); 613 evo_data(push, 0x00000000); 614 if (nv50_vers(sync) < GF110_DISP_BASE_CHANNEL_DMA) { 615 evo_mthd(push, 0x0800, 5); 616 evo_data(push, nv_fb->nvbo->bo.offset >> 8); 617 evo_data(push, 0); 618 evo_data(push, (fb->height << 16) | fb->width); 619 evo_data(push, nv_fb->r_pitch); 620 evo_data(push, nv_fb->r_format); 621 } else { 622 evo_mthd(push, 0x0400, 5); 623 evo_data(push, nv_fb->nvbo->bo.offset >> 8); 624 evo_data(push, 0); 625 evo_data(push, (fb->height << 16) | fb->width); 626 evo_data(push, nv_fb->r_pitch); 627 evo_data(push, nv_fb->r_format); 628 } 629 evo_mthd(push, 0x0080, 1); 630 evo_data(push, 0x00000000); 631 evo_kick(push, sync); 632 633 nouveau_bo_ref(nv_fb->nvbo, &head->image); 634 return 0; 635} 636 637/****************************************************************************** 638 * CRTC 639 *****************************************************************************/ 640static int 641nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update) 642{ 643 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 644 struct nouveau_connector *nv_connector; 645 struct drm_connector *connector; 646 u32 *push, mode = 0x00; 647 648 nv_connector = nouveau_crtc_connector_get(nv_crtc); 649 connector = &nv_connector->base; 650 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) { 651 if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3) 652 mode = DITHERING_MODE_DYNAMIC2X2; 653 } else { 654 mode = nv_connector->dithering_mode; 655 } 656 657 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) { 658 if (connector->display_info.bpc >= 8) 659 mode |= DITHERING_DEPTH_8BPC; 660 } else { 661 mode |= nv_connector->dithering_depth; 662 } 663 664 push = evo_wait(mast, 4); 665 if (push) { 666 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 667 evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1); 668 evo_data(push, mode); 669 } else 670 if (nv50_vers(mast) < GK104_DISP_CORE_CHANNEL_DMA) { 671 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1); 672 evo_data(push, mode); 673 } else { 674 evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1); 675 evo_data(push, mode); 676 } 677 678 if (update) { 679 evo_mthd(push, 0x0080, 1); 680 evo_data(push, 0x00000000); 681 } 682 evo_kick(push, mast); 683 } 684 685 return 0; 686} 687 688static int 689nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update) 690{ 691 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 692 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode; 693 struct drm_crtc *crtc = &nv_crtc->base; 694 struct nouveau_connector *nv_connector; 695 int mode = DRM_MODE_SCALE_NONE; 696 u32 oX, oY, *push; 697 698 /* start off at the resolution we programmed the crtc for, this 699 * effectively handles NONE/FULL scaling 700 */ 701 nv_connector = nouveau_crtc_connector_get(nv_crtc); 702 if (nv_connector && nv_connector->native_mode) 703 mode = nv_connector->scaling_mode; 704 705 if (mode != DRM_MODE_SCALE_NONE) 706 omode = nv_connector->native_mode; 707 else 708 omode = umode; 709 710 oX = omode->hdisplay; 711 oY = omode->vdisplay; 712 if (omode->flags & DRM_MODE_FLAG_DBLSCAN) 713 oY *= 2; 714 715 /* add overscan compensation if necessary, will keep the aspect 716 * ratio the same as the backend mode unless overridden by the 717 * user setting both hborder and vborder properties. 718 */ 719 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON || 720 (nv_connector->underscan == UNDERSCAN_AUTO && 721 nv_connector->edid && 722 drm_detect_hdmi_monitor(nv_connector->edid)))) { 723 u32 bX = nv_connector->underscan_hborder; 724 u32 bY = nv_connector->underscan_vborder; 725 u32 aspect = (oY << 19) / oX; 726 727 if (bX) { 728 oX -= (bX * 2); 729 if (bY) oY -= (bY * 2); 730 else oY = ((oX * aspect) + (aspect / 2)) >> 19; 731 } else { 732 oX -= (oX >> 4) + 32; 733 if (bY) oY -= (bY * 2); 734 else oY = ((oX * aspect) + (aspect / 2)) >> 19; 735 } 736 } 737 738 /* handle CENTER/ASPECT scaling, taking into account the areas 739 * removed already for overscan compensation 740 */ 741 switch (mode) { 742 case DRM_MODE_SCALE_CENTER: 743 oX = min((u32)umode->hdisplay, oX); 744 oY = min((u32)umode->vdisplay, oY); 745 /* fall-through */ 746 case DRM_MODE_SCALE_ASPECT: 747 if (oY < oX) { 748 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay; 749 oX = ((oY * aspect) + (aspect / 2)) >> 19; 750 } else { 751 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay; 752 oY = ((oX * aspect) + (aspect / 2)) >> 19; 753 } 754 break; 755 default: 756 break; 757 } 758 759 push = evo_wait(mast, 8); 760 if (push) { 761 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 762 /*XXX: SCALE_CTRL_ACTIVE??? */ 763 evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2); 764 evo_data(push, (oY << 16) | oX); 765 evo_data(push, (oY << 16) | oX); 766 evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1); 767 evo_data(push, 0x00000000); 768 evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1); 769 evo_data(push, umode->vdisplay << 16 | umode->hdisplay); 770 } else { 771 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3); 772 evo_data(push, (oY << 16) | oX); 773 evo_data(push, (oY << 16) | oX); 774 evo_data(push, (oY << 16) | oX); 775 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1); 776 evo_data(push, 0x00000000); 777 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1); 778 evo_data(push, umode->vdisplay << 16 | umode->hdisplay); 779 } 780 781 evo_kick(push, mast); 782 783 if (update) { 784 nv50_display_flip_stop(crtc); 785 nv50_display_flip_next(crtc, crtc->primary->fb, 786 NULL, 1); 787 } 788 } 789 790 return 0; 791} 792 793static int 794nv50_crtc_set_raster_vblank_dmi(struct nouveau_crtc *nv_crtc, u32 usec) 795{ 796 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 797 u32 *push; 798 799 push = evo_wait(mast, 8); 800 if (!push) 801 return -ENOMEM; 802 803 evo_mthd(push, 0x0828 + (nv_crtc->index * 0x400), 1); 804 evo_data(push, usec); 805 evo_kick(push, mast); 806 return 0; 807} 808 809static int 810nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update) 811{ 812 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 813 u32 *push, hue, vib; 814 int adj; 815 816 adj = (nv_crtc->color_vibrance > 0) ? 50 : 0; 817 vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff; 818 hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff; 819 820 push = evo_wait(mast, 16); 821 if (push) { 822 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 823 evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1); 824 evo_data(push, (hue << 20) | (vib << 8)); 825 } else { 826 evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1); 827 evo_data(push, (hue << 20) | (vib << 8)); 828 } 829 830 if (update) { 831 evo_mthd(push, 0x0080, 1); 832 evo_data(push, 0x00000000); 833 } 834 evo_kick(push, mast); 835 } 836 837 return 0; 838} 839 840static int 841nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb, 842 int x, int y, bool update) 843{ 844 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb); 845 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 846 u32 *push; 847 848 push = evo_wait(mast, 16); 849 if (push) { 850 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 851 evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1); 852 evo_data(push, nvfb->nvbo->bo.offset >> 8); 853 evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3); 854 evo_data(push, (fb->height << 16) | fb->width); 855 evo_data(push, nvfb->r_pitch); 856 evo_data(push, nvfb->r_format); 857 evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1); 858 evo_data(push, (y << 16) | x); 859 if (nv50_vers(mast) > NV50_DISP_CORE_CHANNEL_DMA) { 860 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 861 evo_data(push, nvfb->r_handle); 862 } 863 } else { 864 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1); 865 evo_data(push, nvfb->nvbo->bo.offset >> 8); 866 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4); 867 evo_data(push, (fb->height << 16) | fb->width); 868 evo_data(push, nvfb->r_pitch); 869 evo_data(push, nvfb->r_format); 870 evo_data(push, nvfb->r_handle); 871 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1); 872 evo_data(push, (y << 16) | x); 873 } 874 875 if (update) { 876 evo_mthd(push, 0x0080, 1); 877 evo_data(push, 0x00000000); 878 } 879 evo_kick(push, mast); 880 } 881 882 nv_crtc->fb.handle = nvfb->r_handle; 883 return 0; 884} 885 886static void 887nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc) 888{ 889 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 890 u32 *push = evo_wait(mast, 16); 891 if (push) { 892 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { 893 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2); 894 evo_data(push, 0x85000000); 895 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); 896 } else 897 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 898 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2); 899 evo_data(push, 0x85000000); 900 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); 901 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1); 902 evo_data(push, mast->base.vram.handle); 903 } else { 904 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2); 905 evo_data(push, 0x85000000); 906 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); 907 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1); 908 evo_data(push, mast->base.vram.handle); 909 } 910 evo_kick(push, mast); 911 } 912} 913 914static void 915nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc) 916{ 917 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 918 u32 *push = evo_wait(mast, 16); 919 if (push) { 920 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { 921 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1); 922 evo_data(push, 0x05000000); 923 } else 924 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 925 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1); 926 evo_data(push, 0x05000000); 927 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1); 928 evo_data(push, 0x00000000); 929 } else { 930 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1); 931 evo_data(push, 0x05000000); 932 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1); 933 evo_data(push, 0x00000000); 934 } 935 evo_kick(push, mast); 936 } 937} 938 939static void 940nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update) 941{ 942 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 943 944 if (show) 945 nv50_crtc_cursor_show(nv_crtc); 946 else 947 nv50_crtc_cursor_hide(nv_crtc); 948 949 if (update) { 950 u32 *push = evo_wait(mast, 2); 951 if (push) { 952 evo_mthd(push, 0x0080, 1); 953 evo_data(push, 0x00000000); 954 evo_kick(push, mast); 955 } 956 } 957} 958 959static void 960nv50_crtc_dpms(struct drm_crtc *crtc, int mode) 961{ 962} 963 964static void 965nv50_crtc_prepare(struct drm_crtc *crtc) 966{ 967 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 968 struct nv50_mast *mast = nv50_mast(crtc->dev); 969 u32 *push; 970 971 nv50_display_flip_stop(crtc); 972 973 push = evo_wait(mast, 6); 974 if (push) { 975 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { 976 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 977 evo_data(push, 0x00000000); 978 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1); 979 evo_data(push, 0x40000000); 980 } else 981 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 982 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 983 evo_data(push, 0x00000000); 984 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1); 985 evo_data(push, 0x40000000); 986 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1); 987 evo_data(push, 0x00000000); 988 } else { 989 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1); 990 evo_data(push, 0x00000000); 991 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1); 992 evo_data(push, 0x03000000); 993 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1); 994 evo_data(push, 0x00000000); 995 } 996 997 evo_kick(push, mast); 998 } 999 1000 nv50_crtc_cursor_show_hide(nv_crtc, false, false); 1001} 1002 1003static void 1004nv50_crtc_commit(struct drm_crtc *crtc) 1005{ 1006 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1007 struct nv50_mast *mast = nv50_mast(crtc->dev); 1008 u32 *push; 1009 1010 push = evo_wait(mast, 32); 1011 if (push) { 1012 if (nv50_vers(mast) < G82_DISP_CORE_CHANNEL_DMA) { 1013 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 1014 evo_data(push, nv_crtc->fb.handle); 1015 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2); 1016 evo_data(push, 0xc0000000); 1017 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); 1018 } else 1019 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1020 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 1021 evo_data(push, nv_crtc->fb.handle); 1022 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2); 1023 evo_data(push, 0xc0000000); 1024 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); 1025 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1); 1026 evo_data(push, mast->base.vram.handle); 1027 } else { 1028 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1); 1029 evo_data(push, nv_crtc->fb.handle); 1030 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4); 1031 evo_data(push, 0x83000000); 1032 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); 1033 evo_data(push, 0x00000000); 1034 evo_data(push, 0x00000000); 1035 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1); 1036 evo_data(push, mast->base.vram.handle); 1037 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1); 1038 evo_data(push, 0xffffff00); 1039 } 1040 1041 evo_kick(push, mast); 1042 } 1043 1044 nv50_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true); 1045 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1); 1046} 1047 1048static bool 1049nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode, 1050 struct drm_display_mode *adjusted_mode) 1051{ 1052 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V); 1053 return true; 1054} 1055 1056static int 1057nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb) 1058{ 1059 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb); 1060 struct nv50_head *head = nv50_head(crtc); 1061 int ret; 1062 1063 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM); 1064 if (ret == 0) { 1065 if (head->image) 1066 nouveau_bo_unpin(head->image); 1067 nouveau_bo_ref(nvfb->nvbo, &head->image); 1068 } 1069 1070 return ret; 1071} 1072 1073static int 1074nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode, 1075 struct drm_display_mode *mode, int x, int y, 1076 struct drm_framebuffer *old_fb) 1077{ 1078 struct nv50_mast *mast = nv50_mast(crtc->dev); 1079 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1080 struct nouveau_connector *nv_connector; 1081 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1; 1082 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1; 1083 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks; 1084 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks; 1085 u32 vblan2e = 0, vblan2s = 1, vblankus = 0; 1086 u32 *push; 1087 int ret; 1088 1089 hactive = mode->htotal; 1090 hsynce = mode->hsync_end - mode->hsync_start - 1; 1091 hbackp = mode->htotal - mode->hsync_end; 1092 hblanke = hsynce + hbackp; 1093 hfrontp = mode->hsync_start - mode->hdisplay; 1094 hblanks = mode->htotal - hfrontp - 1; 1095 1096 vactive = mode->vtotal * vscan / ilace; 1097 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1; 1098 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace; 1099 vblanke = vsynce + vbackp; 1100 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace; 1101 vblanks = vactive - vfrontp - 1; 1102 /* XXX: Safe underestimate, even "0" works */ 1103 vblankus = (vactive - mode->vdisplay - 2) * hactive; 1104 vblankus *= 1000; 1105 vblankus /= mode->clock; 1106 1107 if (mode->flags & DRM_MODE_FLAG_INTERLACE) { 1108 vblan2e = vactive + vsynce + vbackp; 1109 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace); 1110 vactive = (vactive * 2) + 1; 1111 } 1112 1113 ret = nv50_crtc_swap_fbs(crtc, old_fb); 1114 if (ret) 1115 return ret; 1116 1117 push = evo_wait(mast, 64); 1118 if (push) { 1119 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1120 evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2); 1121 evo_data(push, 0x00800000 | mode->clock); 1122 evo_data(push, (ilace == 2) ? 2 : 0); 1123 evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6); 1124 evo_data(push, 0x00000000); 1125 evo_data(push, (vactive << 16) | hactive); 1126 evo_data(push, ( vsynce << 16) | hsynce); 1127 evo_data(push, (vblanke << 16) | hblanke); 1128 evo_data(push, (vblanks << 16) | hblanks); 1129 evo_data(push, (vblan2e << 16) | vblan2s); 1130 evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1); 1131 evo_data(push, 0x00000000); 1132 evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2); 1133 evo_data(push, 0x00000311); 1134 evo_data(push, 0x00000100); 1135 } else { 1136 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6); 1137 evo_data(push, 0x00000000); 1138 evo_data(push, (vactive << 16) | hactive); 1139 evo_data(push, ( vsynce << 16) | hsynce); 1140 evo_data(push, (vblanke << 16) | hblanke); 1141 evo_data(push, (vblanks << 16) | hblanks); 1142 evo_data(push, (vblan2e << 16) | vblan2s); 1143 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1); 1144 evo_data(push, 0x00000000); /* ??? */ 1145 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3); 1146 evo_data(push, mode->clock * 1000); 1147 evo_data(push, 0x00200000); /* ??? */ 1148 evo_data(push, mode->clock * 1000); 1149 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2); 1150 evo_data(push, 0x00000311); 1151 evo_data(push, 0x00000100); 1152 } 1153 1154 evo_kick(push, mast); 1155 } 1156 1157 nv_connector = nouveau_crtc_connector_get(nv_crtc); 1158 nv50_crtc_set_dither(nv_crtc, false); 1159 nv50_crtc_set_scale(nv_crtc, false); 1160 1161 /* G94 only accepts this after setting scale */ 1162 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) 1163 nv50_crtc_set_raster_vblank_dmi(nv_crtc, vblankus); 1164 1165 nv50_crtc_set_color_vibrance(nv_crtc, false); 1166 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false); 1167 return 0; 1168} 1169 1170static int 1171nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y, 1172 struct drm_framebuffer *old_fb) 1173{ 1174 struct nouveau_drm *drm = nouveau_drm(crtc->dev); 1175 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1176 int ret; 1177 1178 if (!crtc->primary->fb) { 1179 NV_DEBUG(drm, "No FB bound\n"); 1180 return 0; 1181 } 1182 1183 ret = nv50_crtc_swap_fbs(crtc, old_fb); 1184 if (ret) 1185 return ret; 1186 1187 nv50_display_flip_stop(crtc); 1188 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true); 1189 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1); 1190 return 0; 1191} 1192 1193static int 1194nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc, 1195 struct drm_framebuffer *fb, int x, int y, 1196 enum mode_set_atomic state) 1197{ 1198 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1199 nv50_display_flip_stop(crtc); 1200 nv50_crtc_set_image(nv_crtc, fb, x, y, true); 1201 return 0; 1202} 1203 1204static void 1205nv50_crtc_lut_load(struct drm_crtc *crtc) 1206{ 1207 struct nv50_disp *disp = nv50_disp(crtc->dev); 1208 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1209 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo); 1210 int i; 1211 1212 for (i = 0; i < 256; i++) { 1213 u16 r = nv_crtc->lut.r[i] >> 2; 1214 u16 g = nv_crtc->lut.g[i] >> 2; 1215 u16 b = nv_crtc->lut.b[i] >> 2; 1216 1217 if (disp->disp->oclass < GF110_DISP) { 1218 writew(r + 0x0000, lut + (i * 0x08) + 0); 1219 writew(g + 0x0000, lut + (i * 0x08) + 2); 1220 writew(b + 0x0000, lut + (i * 0x08) + 4); 1221 } else { 1222 writew(r + 0x6000, lut + (i * 0x20) + 0); 1223 writew(g + 0x6000, lut + (i * 0x20) + 2); 1224 writew(b + 0x6000, lut + (i * 0x20) + 4); 1225 } 1226 } 1227} 1228 1229static void 1230nv50_crtc_disable(struct drm_crtc *crtc) 1231{ 1232 struct nv50_head *head = nv50_head(crtc); 1233 evo_sync(crtc->dev); 1234 if (head->image) 1235 nouveau_bo_unpin(head->image); 1236 nouveau_bo_ref(NULL, &head->image); 1237} 1238 1239static int 1240nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv, 1241 uint32_t handle, uint32_t width, uint32_t height) 1242{ 1243 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1244 struct drm_device *dev = crtc->dev; 1245 struct drm_gem_object *gem; 1246 struct nouveau_bo *nvbo; 1247 bool visible = (handle != 0); 1248 int i, ret = 0; 1249 1250 if (visible) { 1251 if (width != 64 || height != 64) 1252 return -EINVAL; 1253 1254 gem = drm_gem_object_lookup(dev, file_priv, handle); 1255 if (unlikely(!gem)) 1256 return -ENOENT; 1257 nvbo = nouveau_gem_object(gem); 1258 1259 ret = nouveau_bo_map(nvbo); 1260 if (ret == 0) { 1261 for (i = 0; i < 64 * 64; i++) { 1262 u32 v = nouveau_bo_rd32(nvbo, i); 1263 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v); 1264 } 1265 nouveau_bo_unmap(nvbo); 1266 } 1267 1268 drm_gem_object_unreference_unlocked(gem); 1269 } 1270 1271 if (visible != nv_crtc->cursor.visible) { 1272 nv50_crtc_cursor_show_hide(nv_crtc, visible, true); 1273 nv_crtc->cursor.visible = visible; 1274 } 1275 1276 return ret; 1277} 1278 1279static int 1280nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y) 1281{ 1282 struct nv50_curs *curs = nv50_curs(crtc); 1283 struct nv50_chan *chan = nv50_chan(curs); 1284 nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff)); 1285 nvif_wr32(&chan->user, 0x0080, 0x00000000); 1286 return 0; 1287} 1288 1289static void 1290nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b, 1291 uint32_t start, uint32_t size) 1292{ 1293 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1294 u32 end = min_t(u32, start + size, 256); 1295 u32 i; 1296 1297 for (i = start; i < end; i++) { 1298 nv_crtc->lut.r[i] = r[i]; 1299 nv_crtc->lut.g[i] = g[i]; 1300 nv_crtc->lut.b[i] = b[i]; 1301 } 1302 1303 nv50_crtc_lut_load(crtc); 1304} 1305 1306static void 1307nv50_crtc_destroy(struct drm_crtc *crtc) 1308{ 1309 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1310 struct nv50_disp *disp = nv50_disp(crtc->dev); 1311 struct nv50_head *head = nv50_head(crtc); 1312 struct nv50_fbdma *fbdma; 1313 1314 list_for_each_entry(fbdma, &disp->fbdma, head) { 1315 nvif_object_fini(&fbdma->base[nv_crtc->index]); 1316 } 1317 1318 nv50_dmac_destroy(&head->ovly.base, disp->disp); 1319 nv50_pioc_destroy(&head->oimm.base); 1320 nv50_dmac_destroy(&head->sync.base, disp->disp); 1321 nv50_pioc_destroy(&head->curs.base); 1322 1323 /*XXX: this shouldn't be necessary, but the core doesn't call 1324 * disconnect() during the cleanup paths 1325 */ 1326 if (head->image) 1327 nouveau_bo_unpin(head->image); 1328 nouveau_bo_ref(NULL, &head->image); 1329 1330 nouveau_bo_unmap(nv_crtc->cursor.nvbo); 1331 if (nv_crtc->cursor.nvbo) 1332 nouveau_bo_unpin(nv_crtc->cursor.nvbo); 1333 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo); 1334 1335 nouveau_bo_unmap(nv_crtc->lut.nvbo); 1336 if (nv_crtc->lut.nvbo) 1337 nouveau_bo_unpin(nv_crtc->lut.nvbo); 1338 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo); 1339 1340 drm_crtc_cleanup(crtc); 1341 kfree(crtc); 1342} 1343 1344static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = { 1345 .dpms = nv50_crtc_dpms, 1346 .prepare = nv50_crtc_prepare, 1347 .commit = nv50_crtc_commit, 1348 .mode_fixup = nv50_crtc_mode_fixup, 1349 .mode_set = nv50_crtc_mode_set, 1350 .mode_set_base = nv50_crtc_mode_set_base, 1351 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic, 1352 .load_lut = nv50_crtc_lut_load, 1353 .disable = nv50_crtc_disable, 1354}; 1355 1356static const struct drm_crtc_funcs nv50_crtc_func = { 1357 .cursor_set = nv50_crtc_cursor_set, 1358 .cursor_move = nv50_crtc_cursor_move, 1359 .gamma_set = nv50_crtc_gamma_set, 1360 .set_config = nouveau_crtc_set_config, 1361 .destroy = nv50_crtc_destroy, 1362 .page_flip = nouveau_crtc_page_flip, 1363}; 1364 1365static void 1366nv50_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y) 1367{ 1368} 1369 1370static void 1371nv50_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset) 1372{ 1373} 1374 1375static int 1376nv50_crtc_create(struct drm_device *dev, int index) 1377{ 1378 struct nv50_disp *disp = nv50_disp(dev); 1379 struct nv50_head *head; 1380 struct drm_crtc *crtc; 1381 int ret, i; 1382 1383 head = kzalloc(sizeof(*head), GFP_KERNEL); 1384 if (!head) 1385 return -ENOMEM; 1386 1387 head->base.index = index; 1388 head->base.set_dither = nv50_crtc_set_dither; 1389 head->base.set_scale = nv50_crtc_set_scale; 1390 head->base.set_color_vibrance = nv50_crtc_set_color_vibrance; 1391 head->base.color_vibrance = 50; 1392 head->base.vibrant_hue = 0; 1393 head->base.cursor.set_offset = nv50_cursor_set_offset; 1394 head->base.cursor.set_pos = nv50_cursor_set_pos; 1395 for (i = 0; i < 256; i++) { 1396 head->base.lut.r[i] = i << 8; 1397 head->base.lut.g[i] = i << 8; 1398 head->base.lut.b[i] = i << 8; 1399 } 1400 1401 crtc = &head->base.base; 1402 drm_crtc_init(dev, crtc, &nv50_crtc_func); 1403 drm_crtc_helper_add(crtc, &nv50_crtc_hfunc); 1404 drm_mode_crtc_set_gamma_size(crtc, 256); 1405 1406 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM, 1407 0, 0x0000, NULL, NULL, &head->base.lut.nvbo); 1408 if (!ret) { 1409 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM); 1410 if (!ret) { 1411 ret = nouveau_bo_map(head->base.lut.nvbo); 1412 if (ret) 1413 nouveau_bo_unpin(head->base.lut.nvbo); 1414 } 1415 if (ret) 1416 nouveau_bo_ref(NULL, &head->base.lut.nvbo); 1417 } 1418 1419 if (ret) 1420 goto out; 1421 1422 nv50_crtc_lut_load(crtc); 1423 1424 /* allocate cursor resources */ 1425 ret = nv50_curs_create(disp->disp, index, &head->curs); 1426 if (ret) 1427 goto out; 1428 1429 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM, 1430 0, 0x0000, NULL, NULL, &head->base.cursor.nvbo); 1431 if (!ret) { 1432 ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM); 1433 if (!ret) { 1434 ret = nouveau_bo_map(head->base.cursor.nvbo); 1435 if (ret) 1436 nouveau_bo_unpin(head->base.lut.nvbo); 1437 } 1438 if (ret) 1439 nouveau_bo_ref(NULL, &head->base.cursor.nvbo); 1440 } 1441 1442 if (ret) 1443 goto out; 1444 1445 /* allocate page flip / sync resources */ 1446 ret = nv50_base_create(disp->disp, index, disp->sync->bo.offset, 1447 &head->sync); 1448 if (ret) 1449 goto out; 1450 1451 head->sync.addr = EVO_FLIP_SEM0(index); 1452 head->sync.data = 0x00000000; 1453 1454 /* allocate overlay resources */ 1455 ret = nv50_oimm_create(disp->disp, index, &head->oimm); 1456 if (ret) 1457 goto out; 1458 1459 ret = nv50_ovly_create(disp->disp, index, disp->sync->bo.offset, 1460 &head->ovly); 1461 if (ret) 1462 goto out; 1463 1464out: 1465 if (ret) 1466 nv50_crtc_destroy(crtc); 1467 return ret; 1468} 1469 1470/****************************************************************************** 1471 * DAC 1472 *****************************************************************************/ 1473static void 1474nv50_dac_dpms(struct drm_encoder *encoder, int mode) 1475{ 1476 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1477 struct nv50_disp *disp = nv50_disp(encoder->dev); 1478 struct { 1479 struct nv50_disp_mthd_v1 base; 1480 struct nv50_disp_dac_pwr_v0 pwr; 1481 } args = { 1482 .base.version = 1, 1483 .base.method = NV50_DISP_MTHD_V1_DAC_PWR, 1484 .base.hasht = nv_encoder->dcb->hasht, 1485 .base.hashm = nv_encoder->dcb->hashm, 1486 .pwr.state = 1, 1487 .pwr.data = 1, 1488 .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND && 1489 mode != DRM_MODE_DPMS_OFF), 1490 .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY && 1491 mode != DRM_MODE_DPMS_OFF), 1492 }; 1493 1494 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1495} 1496 1497static bool 1498nv50_dac_mode_fixup(struct drm_encoder *encoder, 1499 const struct drm_display_mode *mode, 1500 struct drm_display_mode *adjusted_mode) 1501{ 1502 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1503 struct nouveau_connector *nv_connector; 1504 1505 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1506 if (nv_connector && nv_connector->native_mode) { 1507 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) { 1508 int id = adjusted_mode->base.id; 1509 *adjusted_mode = *nv_connector->native_mode; 1510 adjusted_mode->base.id = id; 1511 } 1512 } 1513 1514 return true; 1515} 1516 1517static void 1518nv50_dac_commit(struct drm_encoder *encoder) 1519{ 1520} 1521 1522static void 1523nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 1524 struct drm_display_mode *adjusted_mode) 1525{ 1526 struct nv50_mast *mast = nv50_mast(encoder->dev); 1527 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1528 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1529 u32 *push; 1530 1531 nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON); 1532 1533 push = evo_wait(mast, 8); 1534 if (push) { 1535 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1536 u32 syncs = 0x00000000; 1537 1538 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 1539 syncs |= 0x00000001; 1540 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 1541 syncs |= 0x00000002; 1542 1543 evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2); 1544 evo_data(push, 1 << nv_crtc->index); 1545 evo_data(push, syncs); 1546 } else { 1547 u32 magic = 0x31ec6000 | (nv_crtc->index << 25); 1548 u32 syncs = 0x00000001; 1549 1550 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 1551 syncs |= 0x00000008; 1552 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 1553 syncs |= 0x00000010; 1554 1555 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 1556 magic |= 0x00000001; 1557 1558 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2); 1559 evo_data(push, syncs); 1560 evo_data(push, magic); 1561 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1); 1562 evo_data(push, 1 << nv_crtc->index); 1563 } 1564 1565 evo_kick(push, mast); 1566 } 1567 1568 nv_encoder->crtc = encoder->crtc; 1569} 1570 1571static void 1572nv50_dac_disconnect(struct drm_encoder *encoder) 1573{ 1574 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1575 struct nv50_mast *mast = nv50_mast(encoder->dev); 1576 const int or = nv_encoder->or; 1577 u32 *push; 1578 1579 if (nv_encoder->crtc) { 1580 nv50_crtc_prepare(nv_encoder->crtc); 1581 1582 push = evo_wait(mast, 4); 1583 if (push) { 1584 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1585 evo_mthd(push, 0x0400 + (or * 0x080), 1); 1586 evo_data(push, 0x00000000); 1587 } else { 1588 evo_mthd(push, 0x0180 + (or * 0x020), 1); 1589 evo_data(push, 0x00000000); 1590 } 1591 evo_kick(push, mast); 1592 } 1593 } 1594 1595 nv_encoder->crtc = NULL; 1596} 1597 1598static enum drm_connector_status 1599nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector) 1600{ 1601 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1602 struct nv50_disp *disp = nv50_disp(encoder->dev); 1603 struct { 1604 struct nv50_disp_mthd_v1 base; 1605 struct nv50_disp_dac_load_v0 load; 1606 } args = { 1607 .base.version = 1, 1608 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD, 1609 .base.hasht = nv_encoder->dcb->hasht, 1610 .base.hashm = nv_encoder->dcb->hashm, 1611 }; 1612 int ret; 1613 1614 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval; 1615 if (args.load.data == 0) 1616 args.load.data = 340; 1617 1618 ret = nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1619 if (ret || !args.load.load) 1620 return connector_status_disconnected; 1621 1622 return connector_status_connected; 1623} 1624 1625static void 1626nv50_dac_destroy(struct drm_encoder *encoder) 1627{ 1628 drm_encoder_cleanup(encoder); 1629 kfree(encoder); 1630} 1631 1632static const struct drm_encoder_helper_funcs nv50_dac_hfunc = { 1633 .dpms = nv50_dac_dpms, 1634 .mode_fixup = nv50_dac_mode_fixup, 1635 .prepare = nv50_dac_disconnect, 1636 .commit = nv50_dac_commit, 1637 .mode_set = nv50_dac_mode_set, 1638 .disable = nv50_dac_disconnect, 1639 .get_crtc = nv50_display_crtc_get, 1640 .detect = nv50_dac_detect 1641}; 1642 1643static const struct drm_encoder_funcs nv50_dac_func = { 1644 .destroy = nv50_dac_destroy, 1645}; 1646 1647static int 1648nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe) 1649{ 1650 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1651 struct nouveau_i2c *i2c = nvkm_i2c(&drm->device); 1652 struct nouveau_encoder *nv_encoder; 1653 struct drm_encoder *encoder; 1654 int type = DRM_MODE_ENCODER_DAC; 1655 1656 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 1657 if (!nv_encoder) 1658 return -ENOMEM; 1659 nv_encoder->dcb = dcbe; 1660 nv_encoder->or = ffs(dcbe->or) - 1; 1661 nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index); 1662 1663 encoder = to_drm_encoder(nv_encoder); 1664 encoder->possible_crtcs = dcbe->heads; 1665 encoder->possible_clones = 0; 1666 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type); 1667 drm_encoder_helper_add(encoder, &nv50_dac_hfunc); 1668 1669 drm_mode_connector_attach_encoder(connector, encoder); 1670 return 0; 1671} 1672 1673/****************************************************************************** 1674 * Audio 1675 *****************************************************************************/ 1676static void 1677nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) 1678{ 1679 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1680 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1681 struct nouveau_connector *nv_connector; 1682 struct nv50_disp *disp = nv50_disp(encoder->dev); 1683 struct __packed { 1684 struct { 1685 struct nv50_disp_mthd_v1 mthd; 1686 struct nv50_disp_sor_hda_eld_v0 eld; 1687 } base; 1688 u8 data[sizeof(nv_connector->base.eld)]; 1689 } args = { 1690 .base.mthd.version = 1, 1691 .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, 1692 .base.mthd.hasht = nv_encoder->dcb->hasht, 1693 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 1694 (0x0100 << nv_crtc->index), 1695 }; 1696 1697 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1698 if (!drm_detect_monitor_audio(nv_connector->edid)) 1699 return; 1700 1701 drm_edid_to_eld(&nv_connector->base, nv_connector->edid); 1702 memcpy(args.data, nv_connector->base.eld, sizeof(args.data)); 1703 1704 nvif_mthd(disp->disp, 0, &args, sizeof(args.base) + args.data[2] * 4); 1705} 1706 1707static void 1708nv50_audio_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) 1709{ 1710 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1711 struct nv50_disp *disp = nv50_disp(encoder->dev); 1712 struct { 1713 struct nv50_disp_mthd_v1 base; 1714 struct nv50_disp_sor_hda_eld_v0 eld; 1715 } args = { 1716 .base.version = 1, 1717 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, 1718 .base.hasht = nv_encoder->dcb->hasht, 1719 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 1720 (0x0100 << nv_crtc->index), 1721 }; 1722 1723 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1724} 1725 1726/****************************************************************************** 1727 * HDMI 1728 *****************************************************************************/ 1729static void 1730nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) 1731{ 1732 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1733 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1734 struct nv50_disp *disp = nv50_disp(encoder->dev); 1735 struct { 1736 struct nv50_disp_mthd_v1 base; 1737 struct nv50_disp_sor_hdmi_pwr_v0 pwr; 1738 } args = { 1739 .base.version = 1, 1740 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, 1741 .base.hasht = nv_encoder->dcb->hasht, 1742 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 1743 (0x0100 << nv_crtc->index), 1744 .pwr.state = 1, 1745 .pwr.rekey = 56, /* binary driver, and tegra, constant */ 1746 }; 1747 struct nouveau_connector *nv_connector; 1748 u32 max_ac_packet; 1749 1750 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1751 if (!drm_detect_hdmi_monitor(nv_connector->edid)) 1752 return; 1753 1754 max_ac_packet = mode->htotal - mode->hdisplay; 1755 max_ac_packet -= args.pwr.rekey; 1756 max_ac_packet -= 18; /* constant from tegra */ 1757 args.pwr.max_ac_packet = max_ac_packet / 32; 1758 1759 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1760 nv50_audio_mode_set(encoder, mode); 1761} 1762 1763static void 1764nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) 1765{ 1766 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1767 struct nv50_disp *disp = nv50_disp(encoder->dev); 1768 struct { 1769 struct nv50_disp_mthd_v1 base; 1770 struct nv50_disp_sor_hdmi_pwr_v0 pwr; 1771 } args = { 1772 .base.version = 1, 1773 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, 1774 .base.hasht = nv_encoder->dcb->hasht, 1775 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 1776 (0x0100 << nv_crtc->index), 1777 }; 1778 1779 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1780} 1781 1782/****************************************************************************** 1783 * SOR 1784 *****************************************************************************/ 1785static void 1786nv50_sor_dpms(struct drm_encoder *encoder, int mode) 1787{ 1788 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1789 struct nv50_disp *disp = nv50_disp(encoder->dev); 1790 struct { 1791 struct nv50_disp_mthd_v1 base; 1792 struct nv50_disp_sor_pwr_v0 pwr; 1793 } args = { 1794 .base.version = 1, 1795 .base.method = NV50_DISP_MTHD_V1_SOR_PWR, 1796 .base.hasht = nv_encoder->dcb->hasht, 1797 .base.hashm = nv_encoder->dcb->hashm, 1798 .pwr.state = mode == DRM_MODE_DPMS_ON, 1799 }; 1800 struct { 1801 struct nv50_disp_mthd_v1 base; 1802 struct nv50_disp_sor_dp_pwr_v0 pwr; 1803 } link = { 1804 .base.version = 1, 1805 .base.method = NV50_DISP_MTHD_V1_SOR_DP_PWR, 1806 .base.hasht = nv_encoder->dcb->hasht, 1807 .base.hashm = nv_encoder->dcb->hashm, 1808 .pwr.state = mode == DRM_MODE_DPMS_ON, 1809 }; 1810 struct drm_device *dev = encoder->dev; 1811 struct drm_encoder *partner; 1812 1813 nv_encoder->last_dpms = mode; 1814 1815 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) { 1816 struct nouveau_encoder *nv_partner = nouveau_encoder(partner); 1817 1818 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS) 1819 continue; 1820 1821 if (nv_partner != nv_encoder && 1822 nv_partner->dcb->or == nv_encoder->dcb->or) { 1823 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON) 1824 return; 1825 break; 1826 } 1827 } 1828 1829 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) { 1830 args.pwr.state = 1; 1831 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1832 nvif_mthd(disp->disp, 0, &link, sizeof(link)); 1833 } else { 1834 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1835 } 1836} 1837 1838static bool 1839nv50_sor_mode_fixup(struct drm_encoder *encoder, 1840 const struct drm_display_mode *mode, 1841 struct drm_display_mode *adjusted_mode) 1842{ 1843 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1844 struct nouveau_connector *nv_connector; 1845 1846 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1847 if (nv_connector && nv_connector->native_mode) { 1848 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) { 1849 int id = adjusted_mode->base.id; 1850 *adjusted_mode = *nv_connector->native_mode; 1851 adjusted_mode->base.id = id; 1852 } 1853 } 1854 1855 return true; 1856} 1857 1858static void 1859nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data) 1860{ 1861 struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev); 1862 u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push; 1863 if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) { 1864 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 1865 evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1); 1866 evo_data(push, (nv_encoder->ctrl = temp)); 1867 } else { 1868 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1); 1869 evo_data(push, (nv_encoder->ctrl = temp)); 1870 } 1871 evo_kick(push, mast); 1872 } 1873} 1874 1875static void 1876nv50_sor_disconnect(struct drm_encoder *encoder) 1877{ 1878 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1879 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc); 1880 1881 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF; 1882 nv_encoder->crtc = NULL; 1883 1884 if (nv_crtc) { 1885 nv50_crtc_prepare(&nv_crtc->base); 1886 nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0); 1887 nv50_audio_disconnect(encoder, nv_crtc); 1888 nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc); 1889 } 1890} 1891 1892static void 1893nv50_sor_commit(struct drm_encoder *encoder) 1894{ 1895} 1896 1897static void 1898nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode, 1899 struct drm_display_mode *mode) 1900{ 1901 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1902 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1903 struct { 1904 struct nv50_disp_mthd_v1 base; 1905 struct nv50_disp_sor_lvds_script_v0 lvds; 1906 } lvds = { 1907 .base.version = 1, 1908 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT, 1909 .base.hasht = nv_encoder->dcb->hasht, 1910 .base.hashm = nv_encoder->dcb->hashm, 1911 }; 1912 struct nv50_disp *disp = nv50_disp(encoder->dev); 1913 struct nv50_mast *mast = nv50_mast(encoder->dev); 1914 struct drm_device *dev = encoder->dev; 1915 struct nouveau_drm *drm = nouveau_drm(dev); 1916 struct nouveau_connector *nv_connector; 1917 struct nvbios *bios = &drm->vbios; 1918 u32 mask, ctrl; 1919 u8 owner = 1 << nv_crtc->index; 1920 u8 proto = 0xf; 1921 u8 depth = 0x0; 1922 1923 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1924 nv_encoder->crtc = encoder->crtc; 1925 1926 switch (nv_encoder->dcb->type) { 1927 case DCB_OUTPUT_TMDS: 1928 if (nv_encoder->dcb->sorconf.link & 1) { 1929 if (mode->clock < 165000) 1930 proto = 0x1; 1931 else 1932 proto = 0x5; 1933 } else { 1934 proto = 0x2; 1935 } 1936 1937 nv50_hdmi_mode_set(&nv_encoder->base.base, mode); 1938 break; 1939 case DCB_OUTPUT_LVDS: 1940 proto = 0x0; 1941 1942 if (bios->fp_no_ddc) { 1943 if (bios->fp.dual_link) 1944 lvds.lvds.script |= 0x0100; 1945 if (bios->fp.if_is_24bit) 1946 lvds.lvds.script |= 0x0200; 1947 } else { 1948 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) { 1949 if (((u8 *)nv_connector->edid)[121] == 2) 1950 lvds.lvds.script |= 0x0100; 1951 } else 1952 if (mode->clock >= bios->fp.duallink_transition_clk) { 1953 lvds.lvds.script |= 0x0100; 1954 } 1955 1956 if (lvds.lvds.script & 0x0100) { 1957 if (bios->fp.strapless_is_24bit & 2) 1958 lvds.lvds.script |= 0x0200; 1959 } else { 1960 if (bios->fp.strapless_is_24bit & 1) 1961 lvds.lvds.script |= 0x0200; 1962 } 1963 1964 if (nv_connector->base.display_info.bpc == 8) 1965 lvds.lvds.script |= 0x0200; 1966 } 1967 1968 nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds)); 1969 break; 1970 case DCB_OUTPUT_DP: 1971 if (nv_connector->base.display_info.bpc == 6) { 1972 nv_encoder->dp.datarate = mode->clock * 18 / 8; 1973 depth = 0x2; 1974 } else 1975 if (nv_connector->base.display_info.bpc == 8) { 1976 nv_encoder->dp.datarate = mode->clock * 24 / 8; 1977 depth = 0x5; 1978 } else { 1979 nv_encoder->dp.datarate = mode->clock * 30 / 8; 1980 depth = 0x6; 1981 } 1982 1983 if (nv_encoder->dcb->sorconf.link & 1) 1984 proto = 0x8; 1985 else 1986 proto = 0x9; 1987 nv50_audio_mode_set(encoder, mode); 1988 break; 1989 default: 1990 BUG_ON(1); 1991 break; 1992 } 1993 1994 nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON); 1995 1996 if (nv50_vers(mast) >= GF110_DISP) { 1997 u32 *push = evo_wait(mast, 3); 1998 if (push) { 1999 u32 magic = 0x31ec6000 | (nv_crtc->index << 25); 2000 u32 syncs = 0x00000001; 2001 2002 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 2003 syncs |= 0x00000008; 2004 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 2005 syncs |= 0x00000010; 2006 2007 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 2008 magic |= 0x00000001; 2009 2010 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2); 2011 evo_data(push, syncs | (depth << 6)); 2012 evo_data(push, magic); 2013 evo_kick(push, mast); 2014 } 2015 2016 ctrl = proto << 8; 2017 mask = 0x00000f00; 2018 } else { 2019 ctrl = (depth << 16) | (proto << 8); 2020 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 2021 ctrl |= 0x00001000; 2022 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 2023 ctrl |= 0x00002000; 2024 mask = 0x000f3f00; 2025 } 2026 2027 nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner); 2028} 2029 2030static void 2031nv50_sor_destroy(struct drm_encoder *encoder) 2032{ 2033 drm_encoder_cleanup(encoder); 2034 kfree(encoder); 2035} 2036 2037static const struct drm_encoder_helper_funcs nv50_sor_hfunc = { 2038 .dpms = nv50_sor_dpms, 2039 .mode_fixup = nv50_sor_mode_fixup, 2040 .prepare = nv50_sor_disconnect, 2041 .commit = nv50_sor_commit, 2042 .mode_set = nv50_sor_mode_set, 2043 .disable = nv50_sor_disconnect, 2044 .get_crtc = nv50_display_crtc_get, 2045}; 2046 2047static const struct drm_encoder_funcs nv50_sor_func = { 2048 .destroy = nv50_sor_destroy, 2049}; 2050 2051static int 2052nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) 2053{ 2054 struct nouveau_drm *drm = nouveau_drm(connector->dev); 2055 struct nouveau_i2c *i2c = nvkm_i2c(&drm->device); 2056 struct nouveau_encoder *nv_encoder; 2057 struct drm_encoder *encoder; 2058 int type; 2059 2060 switch (dcbe->type) { 2061 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break; 2062 case DCB_OUTPUT_TMDS: 2063 case DCB_OUTPUT_DP: 2064 default: 2065 type = DRM_MODE_ENCODER_TMDS; 2066 break; 2067 } 2068 2069 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 2070 if (!nv_encoder) 2071 return -ENOMEM; 2072 nv_encoder->dcb = dcbe; 2073 nv_encoder->or = ffs(dcbe->or) - 1; 2074 nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index); 2075 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF; 2076 2077 encoder = to_drm_encoder(nv_encoder); 2078 encoder->possible_crtcs = dcbe->heads; 2079 encoder->possible_clones = 0; 2080 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type); 2081 drm_encoder_helper_add(encoder, &nv50_sor_hfunc); 2082 2083 drm_mode_connector_attach_encoder(connector, encoder); 2084 return 0; 2085} 2086 2087/****************************************************************************** 2088 * PIOR 2089 *****************************************************************************/ 2090 2091static void 2092nv50_pior_dpms(struct drm_encoder *encoder, int mode) 2093{ 2094 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2095 struct nv50_disp *disp = nv50_disp(encoder->dev); 2096 struct { 2097 struct nv50_disp_mthd_v1 base; 2098 struct nv50_disp_pior_pwr_v0 pwr; 2099 } args = { 2100 .base.version = 1, 2101 .base.method = NV50_DISP_MTHD_V1_PIOR_PWR, 2102 .base.hasht = nv_encoder->dcb->hasht, 2103 .base.hashm = nv_encoder->dcb->hashm, 2104 .pwr.state = mode == DRM_MODE_DPMS_ON, 2105 .pwr.type = nv_encoder->dcb->type, 2106 }; 2107 2108 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 2109} 2110 2111static bool 2112nv50_pior_mode_fixup(struct drm_encoder *encoder, 2113 const struct drm_display_mode *mode, 2114 struct drm_display_mode *adjusted_mode) 2115{ 2116 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2117 struct nouveau_connector *nv_connector; 2118 2119 nv_connector = nouveau_encoder_connector_get(nv_encoder); 2120 if (nv_connector && nv_connector->native_mode) { 2121 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) { 2122 int id = adjusted_mode->base.id; 2123 *adjusted_mode = *nv_connector->native_mode; 2124 adjusted_mode->base.id = id; 2125 } 2126 } 2127 2128 adjusted_mode->clock *= 2; 2129 return true; 2130} 2131 2132static void 2133nv50_pior_commit(struct drm_encoder *encoder) 2134{ 2135} 2136 2137static void 2138nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 2139 struct drm_display_mode *adjusted_mode) 2140{ 2141 struct nv50_mast *mast = nv50_mast(encoder->dev); 2142 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2143 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 2144 struct nouveau_connector *nv_connector; 2145 u8 owner = 1 << nv_crtc->index; 2146 u8 proto, depth; 2147 u32 *push; 2148 2149 nv_connector = nouveau_encoder_connector_get(nv_encoder); 2150 switch (nv_connector->base.display_info.bpc) { 2151 case 10: depth = 0x6; break; 2152 case 8: depth = 0x5; break; 2153 case 6: depth = 0x2; break; 2154 default: depth = 0x0; break; 2155 } 2156 2157 switch (nv_encoder->dcb->type) { 2158 case DCB_OUTPUT_TMDS: 2159 case DCB_OUTPUT_DP: 2160 proto = 0x0; 2161 break; 2162 default: 2163 BUG_ON(1); 2164 break; 2165 } 2166 2167 nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON); 2168 2169 push = evo_wait(mast, 8); 2170 if (push) { 2171 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 2172 u32 ctrl = (depth << 16) | (proto << 8) | owner; 2173 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 2174 ctrl |= 0x00001000; 2175 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 2176 ctrl |= 0x00002000; 2177 evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1); 2178 evo_data(push, ctrl); 2179 } 2180 2181 evo_kick(push, mast); 2182 } 2183 2184 nv_encoder->crtc = encoder->crtc; 2185} 2186 2187static void 2188nv50_pior_disconnect(struct drm_encoder *encoder) 2189{ 2190 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2191 struct nv50_mast *mast = nv50_mast(encoder->dev); 2192 const int or = nv_encoder->or; 2193 u32 *push; 2194 2195 if (nv_encoder->crtc) { 2196 nv50_crtc_prepare(nv_encoder->crtc); 2197 2198 push = evo_wait(mast, 4); 2199 if (push) { 2200 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) { 2201 evo_mthd(push, 0x0700 + (or * 0x040), 1); 2202 evo_data(push, 0x00000000); 2203 } 2204 evo_kick(push, mast); 2205 } 2206 } 2207 2208 nv_encoder->crtc = NULL; 2209} 2210 2211static void 2212nv50_pior_destroy(struct drm_encoder *encoder) 2213{ 2214 drm_encoder_cleanup(encoder); 2215 kfree(encoder); 2216} 2217 2218static const struct drm_encoder_helper_funcs nv50_pior_hfunc = { 2219 .dpms = nv50_pior_dpms, 2220 .mode_fixup = nv50_pior_mode_fixup, 2221 .prepare = nv50_pior_disconnect, 2222 .commit = nv50_pior_commit, 2223 .mode_set = nv50_pior_mode_set, 2224 .disable = nv50_pior_disconnect, 2225 .get_crtc = nv50_display_crtc_get, 2226}; 2227 2228static const struct drm_encoder_funcs nv50_pior_func = { 2229 .destroy = nv50_pior_destroy, 2230}; 2231 2232static int 2233nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe) 2234{ 2235 struct nouveau_drm *drm = nouveau_drm(connector->dev); 2236 struct nouveau_i2c *i2c = nvkm_i2c(&drm->device); 2237 struct nouveau_i2c_port *ddc = NULL; 2238 struct nouveau_encoder *nv_encoder; 2239 struct drm_encoder *encoder; 2240 int type; 2241 2242 switch (dcbe->type) { 2243 case DCB_OUTPUT_TMDS: 2244 ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTDDC(dcbe->extdev)); 2245 type = DRM_MODE_ENCODER_TMDS; 2246 break; 2247 case DCB_OUTPUT_DP: 2248 ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTAUX(dcbe->extdev)); 2249 type = DRM_MODE_ENCODER_TMDS; 2250 break; 2251 default: 2252 return -ENODEV; 2253 } 2254 2255 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 2256 if (!nv_encoder) 2257 return -ENOMEM; 2258 nv_encoder->dcb = dcbe; 2259 nv_encoder->or = ffs(dcbe->or) - 1; 2260 nv_encoder->i2c = ddc; 2261 2262 encoder = to_drm_encoder(nv_encoder); 2263 encoder->possible_crtcs = dcbe->heads; 2264 encoder->possible_clones = 0; 2265 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type); 2266 drm_encoder_helper_add(encoder, &nv50_pior_hfunc); 2267 2268 drm_mode_connector_attach_encoder(connector, encoder); 2269 return 0; 2270} 2271 2272/****************************************************************************** 2273 * Framebuffer 2274 *****************************************************************************/ 2275 2276static void 2277nv50_fbdma_fini(struct nv50_fbdma *fbdma) 2278{ 2279 int i; 2280 for (i = 0; i < ARRAY_SIZE(fbdma->base); i++) 2281 nvif_object_fini(&fbdma->base[i]); 2282 nvif_object_fini(&fbdma->core); 2283 list_del(&fbdma->head); 2284 kfree(fbdma); 2285} 2286 2287static int 2288nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind) 2289{ 2290 struct nouveau_drm *drm = nouveau_drm(dev); 2291 struct nv50_disp *disp = nv50_disp(dev); 2292 struct nv50_mast *mast = nv50_mast(dev); 2293 struct __attribute__ ((packed)) { 2294 struct nv_dma_v0 base; 2295 union { 2296 struct nv50_dma_v0 nv50; 2297 struct gf100_dma_v0 gf100; 2298 struct gf110_dma_v0 gf110; 2299 }; 2300 } args = {}; 2301 struct nv50_fbdma *fbdma; 2302 struct drm_crtc *crtc; 2303 u32 size = sizeof(args.base); 2304 int ret; 2305 2306 list_for_each_entry(fbdma, &disp->fbdma, head) { 2307 if (fbdma->core.handle == name) 2308 return 0; 2309 } 2310 2311 fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL); 2312 if (!fbdma) 2313 return -ENOMEM; 2314 list_add(&fbdma->head, &disp->fbdma); 2315 2316 args.base.target = NV_DMA_V0_TARGET_VRAM; 2317 args.base.access = NV_DMA_V0_ACCESS_RDWR; 2318 args.base.start = offset; 2319 args.base.limit = offset + length - 1; 2320 2321 if (drm->device.info.chipset < 0x80) { 2322 args.nv50.part = NV50_DMA_V0_PART_256; 2323 size += sizeof(args.nv50); 2324 } else 2325 if (drm->device.info.chipset < 0xc0) { 2326 args.nv50.part = NV50_DMA_V0_PART_256; 2327 args.nv50.kind = kind; 2328 size += sizeof(args.nv50); 2329 } else 2330 if (drm->device.info.chipset < 0xd0) { 2331 args.gf100.kind = kind; 2332 size += sizeof(args.gf100); 2333 } else { 2334 args.gf110.page = GF110_DMA_V0_PAGE_LP; 2335 args.gf110.kind = kind; 2336 size += sizeof(args.gf110); 2337 } 2338 2339 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 2340 struct nv50_head *head = nv50_head(crtc); 2341 int ret = nvif_object_init(&head->sync.base.base.user, NULL, 2342 name, NV_DMA_IN_MEMORY, &args, size, 2343 &fbdma->base[head->base.index]); 2344 if (ret) { 2345 nv50_fbdma_fini(fbdma); 2346 return ret; 2347 } 2348 } 2349 2350 ret = nvif_object_init(&mast->base.base.user, NULL, name, 2351 NV_DMA_IN_MEMORY, &args, size, 2352 &fbdma->core); 2353 if (ret) { 2354 nv50_fbdma_fini(fbdma); 2355 return ret; 2356 } 2357 2358 return 0; 2359} 2360 2361static void 2362nv50_fb_dtor(struct drm_framebuffer *fb) 2363{ 2364} 2365 2366static int 2367nv50_fb_ctor(struct drm_framebuffer *fb) 2368{ 2369 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb); 2370 struct nouveau_drm *drm = nouveau_drm(fb->dev); 2371 struct nouveau_bo *nvbo = nv_fb->nvbo; 2372 struct nv50_disp *disp = nv50_disp(fb->dev); 2373 u8 kind = nouveau_bo_tile_layout(nvbo) >> 8; 2374 u8 tile = nvbo->tile_mode; 2375 2376 if (nvbo->tile_flags & NOUVEAU_GEM_TILE_NONCONTIG) { 2377 NV_ERROR(drm, "framebuffer requires contiguous bo\n"); 2378 return -EINVAL; 2379 } 2380 2381 if (drm->device.info.chipset >= 0xc0) 2382 tile >>= 4; /* yep.. */ 2383 2384 switch (fb->depth) { 2385 case 8: nv_fb->r_format = 0x1e00; break; 2386 case 15: nv_fb->r_format = 0xe900; break; 2387 case 16: nv_fb->r_format = 0xe800; break; 2388 case 24: 2389 case 32: nv_fb->r_format = 0xcf00; break; 2390 case 30: nv_fb->r_format = 0xd100; break; 2391 default: 2392 NV_ERROR(drm, "unknown depth %d\n", fb->depth); 2393 return -EINVAL; 2394 } 2395 2396 if (disp->disp->oclass < G82_DISP) { 2397 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : 2398 (fb->pitches[0] | 0x00100000); 2399 nv_fb->r_format |= kind << 16; 2400 } else 2401 if (disp->disp->oclass < GF110_DISP) { 2402 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : 2403 (fb->pitches[0] | 0x00100000); 2404 } else { 2405 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : 2406 (fb->pitches[0] | 0x01000000); 2407 } 2408 nv_fb->r_handle = 0xffff0000 | kind; 2409 2410 return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0, 2411 drm->device.info.ram_user, kind); 2412} 2413 2414/****************************************************************************** 2415 * Init 2416 *****************************************************************************/ 2417 2418void 2419nv50_display_fini(struct drm_device *dev) 2420{ 2421} 2422 2423int 2424nv50_display_init(struct drm_device *dev) 2425{ 2426 struct nv50_disp *disp = nv50_disp(dev); 2427 struct drm_crtc *crtc; 2428 u32 *push; 2429 2430 push = evo_wait(nv50_mast(dev), 32); 2431 if (!push) 2432 return -EBUSY; 2433 2434 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 2435 struct nv50_sync *sync = nv50_sync(crtc); 2436 nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data); 2437 } 2438 2439 evo_mthd(push, 0x0088, 1); 2440 evo_data(push, nv50_mast(dev)->base.sync.handle); 2441 evo_kick(push, nv50_mast(dev)); 2442 return 0; 2443} 2444 2445void 2446nv50_display_destroy(struct drm_device *dev) 2447{ 2448 struct nv50_disp *disp = nv50_disp(dev); 2449 struct nv50_fbdma *fbdma, *fbtmp; 2450 2451 list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) { 2452 nv50_fbdma_fini(fbdma); 2453 } 2454 2455 nv50_dmac_destroy(&disp->mast.base, disp->disp); 2456 2457 nouveau_bo_unmap(disp->sync); 2458 if (disp->sync) 2459 nouveau_bo_unpin(disp->sync); 2460 nouveau_bo_ref(NULL, &disp->sync); 2461 2462 nouveau_display(dev)->priv = NULL; 2463 kfree(disp); 2464} 2465 2466int 2467nv50_display_create(struct drm_device *dev) 2468{ 2469 struct nvif_device *device = &nouveau_drm(dev)->device; 2470 struct nouveau_drm *drm = nouveau_drm(dev); 2471 struct dcb_table *dcb = &drm->vbios.dcb; 2472 struct drm_connector *connector, *tmp; 2473 struct nv50_disp *disp; 2474 struct dcb_output *dcbe; 2475 int crtcs, ret, i; 2476 2477 disp = kzalloc(sizeof(*disp), GFP_KERNEL); 2478 if (!disp) 2479 return -ENOMEM; 2480 INIT_LIST_HEAD(&disp->fbdma); 2481 2482 nouveau_display(dev)->priv = disp; 2483 nouveau_display(dev)->dtor = nv50_display_destroy; 2484 nouveau_display(dev)->init = nv50_display_init; 2485 nouveau_display(dev)->fini = nv50_display_fini; 2486 nouveau_display(dev)->fb_ctor = nv50_fb_ctor; 2487 nouveau_display(dev)->fb_dtor = nv50_fb_dtor; 2488 disp->disp = &nouveau_display(dev)->disp; 2489 2490 /* small shared memory area we use for notifiers and semaphores */ 2491 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM, 2492 0, 0x0000, NULL, NULL, &disp->sync); 2493 if (!ret) { 2494 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM); 2495 if (!ret) { 2496 ret = nouveau_bo_map(disp->sync); 2497 if (ret) 2498 nouveau_bo_unpin(disp->sync); 2499 } 2500 if (ret) 2501 nouveau_bo_ref(NULL, &disp->sync); 2502 } 2503 2504 if (ret) 2505 goto out; 2506 2507 /* allocate master evo channel */ 2508 ret = nv50_core_create(disp->disp, disp->sync->bo.offset, 2509 &disp->mast); 2510 if (ret) 2511 goto out; 2512 2513 /* create crtc objects to represent the hw heads */ 2514 if (disp->disp->oclass >= GF110_DISP) 2515 crtcs = nvif_rd32(device, 0x022448); 2516 else 2517 crtcs = 2; 2518 2519 for (i = 0; i < crtcs; i++) { 2520 ret = nv50_crtc_create(dev, i); 2521 if (ret) 2522 goto out; 2523 } 2524 2525 /* create encoder/connector objects based on VBIOS DCB table */ 2526 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) { 2527 connector = nouveau_connector_create(dev, dcbe->connector); 2528 if (IS_ERR(connector)) 2529 continue; 2530 2531 if (dcbe->location == DCB_LOC_ON_CHIP) { 2532 switch (dcbe->type) { 2533 case DCB_OUTPUT_TMDS: 2534 case DCB_OUTPUT_LVDS: 2535 case DCB_OUTPUT_DP: 2536 ret = nv50_sor_create(connector, dcbe); 2537 break; 2538 case DCB_OUTPUT_ANALOG: 2539 ret = nv50_dac_create(connector, dcbe); 2540 break; 2541 default: 2542 ret = -ENODEV; 2543 break; 2544 } 2545 } else { 2546 ret = nv50_pior_create(connector, dcbe); 2547 } 2548 2549 if (ret) { 2550 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n", 2551 dcbe->location, dcbe->type, 2552 ffs(dcbe->or) - 1, ret); 2553 ret = 0; 2554 } 2555 } 2556 2557 /* cull any connectors we created that don't have an encoder */ 2558 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) { 2559 if (connector->encoder_ids[0]) 2560 continue; 2561 2562 NV_WARN(drm, "%s has no encoders, removing\n", 2563 connector->name); 2564 connector->funcs->destroy(connector); 2565 } 2566 2567out: 2568 if (ret) 2569 nv50_display_destroy(dev); 2570 return ret; 2571} 2572