1/* $NetBSD: nouveau_dispnv50_disp.c,v 1.7 2021/12/26 21:00:14 riastradh Exp $ */ 2 3/* 4 * Copyright 2011 Red Hat Inc. 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a 7 * copy of this software and associated documentation files (the "Software"), 8 * to deal in the Software without restriction, including without limitation 9 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 10 * and/or sell copies of the Software, and to permit persons to whom the 11 * Software is furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 * OTHER DEALINGS IN THE SOFTWARE. 23 * 24 * Authors: Ben Skeggs 25 */ 26#include <sys/cdefs.h> 27__KERNEL_RCSID(0, "$NetBSD: nouveau_dispnv50_disp.c,v 1.7 2021/12/26 21:00:14 riastradh Exp $"); 28 29#include "disp.h" 30#include "atom.h" 31#include "core.h" 32#include "head.h" 33#include "wndw.h" 34 35#include <linux/dma-mapping.h> 36#include <linux/hdmi.h> 37#include <linux/component.h> 38 39#include <drm/drm_atomic_helper.h> 40#include <drm/drm_dp_helper.h> 41#include <drm/drm_edid.h> 42#include <drm/drm_fb_helper.h> 43#include <drm/drm_plane_helper.h> 44#include <drm/drm_probe_helper.h> 45#include <drm/drm_scdc_helper.h> 46#include <drm/drm_vblank.h> 47 48#include <nvif/class.h> 49#include <nvif/cl0002.h> 50#include <nvif/cl5070.h> 51#include <nvif/cl507d.h> 52#include <nvif/event.h> 53 54#include "nouveau_drv.h" 55#include "nouveau_dma.h" 56#include "nouveau_gem.h" 57#include "nouveau_connector.h" 58#include "nouveau_encoder.h" 59#include "nouveau_fence.h" 60#include "nouveau_fbcon.h" 61#include "nv50_display.h" 62 63#include <subdev/bios/dp.h> 64 65#include <linux/nbsd-namespace.h> 66 67/****************************************************************************** 68 * Atomic state 69 *****************************************************************************/ 70 71struct nv50_outp_atom { 72 struct list_head head; 73 74 struct drm_encoder *encoder; 75 bool flush_disable; 76 77 union nv50_outp_atom_mask { 78 struct { 79 bool ctrl:1; 80 }; 81 u8 mask; 82 } set, clr; 83}; 84 85/****************************************************************************** 86 * EVO channel 87 *****************************************************************************/ 88 89static int 90nv50_chan_create(struct nvif_device *device, struct nvif_object *disp, 91 const s32 *oclass, u8 head, void *data, u32 size, 92 struct nv50_chan *chan) 93{ 94 struct nvif_sclass *sclass; 95 int ret, i, n; 96 97 chan->device = device; 98 99 ret = n = nvif_object_sclass_get(disp, &sclass); 100 if (ret < 0) 101 return ret; 102 103 while (oclass[0]) { 104 for (i = 0; i < n; i++) { 105 if (sclass[i].oclass == oclass[0]) { 106 ret = nvif_object_init(disp, 0, oclass[0], 107 data, size, &chan->user); 108 if (ret == 0) { 109 ret = nvif_object_map(&chan->user, NULL, 0); 110 if (ret) { 111 printk(KERN_ERR "%s:%d" 112 ": nvif_object_map, %d\n", 113 __func__, __LINE__, ret); 114 nvif_object_fini(&chan->user); 115 } 116 } 117 nvif_object_sclass_put(&sclass); 118 return ret; 119 } 120 } 121 oclass++; 122 } 123 124 nvif_object_sclass_put(&sclass); 125 return -ENOSYS; 126} 127 128static void 129nv50_chan_destroy(struct nv50_chan *chan) 130{ 131 nvif_object_fini(&chan->user); 132} 133 134/****************************************************************************** 135 * DMA EVO channel 136 *****************************************************************************/ 137 138void 139nv50_dmac_destroy(struct nv50_dmac *dmac) 140{ 141 spin_lock_destroy(&dmac->lock); 142 nvif_object_fini(&dmac->vram); 143 nvif_object_fini(&dmac->sync); 144 145 nv50_chan_destroy(&dmac->base); 146 147 nvif_mem_fini(&dmac->push); 148} 149 150int 151nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp, 152 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf, 153 struct nv50_dmac *dmac) 154{ 155 struct nouveau_cli *cli = (void *)device->object.client; 156 struct nv50_disp_core_channel_dma_v0 *args = data; 157 u8 type = NVIF_MEM_COHERENT; 158 int ret; 159 160 spin_lock_init(&dmac->lock); 161 162 /* Pascal added support for 47-bit physical addresses, but some 163 * parts of EVO still only accept 40-bit PAs. 164 * 165 * To avoid issues on systems with large amounts of RAM, and on 166 * systems where an IOMMU maps pages at a high address, we need 167 * to allocate push buffers in VRAM instead. 168 * 169 * This appears to match NVIDIA's behaviour on Pascal. 170 */ 171 if (device->info.family == NV_DEVICE_INFO_V0_PASCAL) 172 type |= NVIF_MEM_VRAM; 173 174 ret = nvif_mem_init_map(&cli->mmu, type, 0x1000, &dmac->push); 175 if (ret) 176 return ret; 177 178 dmac->ptr = __UNVOLATILE(dmac->push.object.map.ptr); 179 180 args->pushbuf = nvif_handle(&dmac->push.object); 181 182 ret = nv50_chan_create(device, disp, oclass, head, data, size, 183 &dmac->base); 184 if (ret) 185 return ret; 186 187 if (!syncbuf) 188 return 0; 189 190 ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY, 191 &(struct nv_dma_v0) { 192 .target = NV_DMA_V0_TARGET_VRAM, 193 .access = NV_DMA_V0_ACCESS_RDWR, 194 .start = syncbuf + 0x0000, 195 .limit = syncbuf + 0x0fff, 196 }, sizeof(struct nv_dma_v0), 197 &dmac->sync); 198 if (ret) 199 return ret; 200 201 ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY, 202 &(struct nv_dma_v0) { 203 .target = NV_DMA_V0_TARGET_VRAM, 204 .access = NV_DMA_V0_ACCESS_RDWR, 205 .start = 0, 206 .limit = device->info.ram_user - 1, 207 }, sizeof(struct nv_dma_v0), 208 &dmac->vram); 209 if (ret) 210 return ret; 211 212 return ret; 213} 214 215/****************************************************************************** 216 * EVO channel helpers 217 *****************************************************************************/ 218static void 219evo_flush(struct nv50_dmac *dmac) 220{ 221 /* Push buffer fetches are not coherent with BAR1, we need to ensure 222 * writes have been flushed right through to VRAM before writing PUT. 223 */ 224 if (dmac->push.type & NVIF_MEM_VRAM) { 225 struct nvif_device *device = dmac->base.device; 226 nvif_wr32(&device->object, 0x070000, 0x00000001); 227 nvif_msec(device, 2000, 228 if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002)) 229 break; 230 ); 231 } 232} 233 234u32 * 235evo_wait(struct nv50_dmac *evoc, int nr) 236{ 237 struct nv50_dmac *dmac = evoc; 238 struct nvif_device *device = dmac->base.device; 239 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4; 240 241 spin_lock(&dmac->lock); 242 if (put + nr >= (PAGE_SIZE / 4) - 8) { 243 dmac->ptr[put] = 0x20000000; 244 evo_flush(dmac); 245 246 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000); 247 if (nvif_msec(device, 2000, 248 if (!nvif_rd32(&dmac->base.user, 0x0004)) 249 break; 250 ) < 0) { 251 spin_unlock(&dmac->lock); 252 pr_err("nouveau: evo channel stalled\n"); 253 return NULL; 254 } 255 256 put = 0; 257 } 258 259 return dmac->ptr + put; 260} 261 262void 263evo_kick(u32 *push, struct nv50_dmac *evoc) 264{ 265 struct nv50_dmac *dmac = evoc; 266 267 evo_flush(dmac); 268 269 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2); 270 spin_unlock(&dmac->lock); 271} 272 273/****************************************************************************** 274 * Output path helpers 275 *****************************************************************************/ 276static void 277nv50_outp_release(struct nouveau_encoder *nv_encoder) 278{ 279 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev); 280 struct { 281 struct nv50_disp_mthd_v1 base; 282 } args = { 283 .base.version = 1, 284 .base.method = NV50_DISP_MTHD_V1_RELEASE, 285 .base.hasht = nv_encoder->dcb->hasht, 286 .base.hashm = nv_encoder->dcb->hashm, 287 }; 288 289 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 290 nv_encoder->or = -1; 291 nv_encoder->link = 0; 292} 293 294static int 295nv50_outp_acquire(struct nouveau_encoder *nv_encoder) 296{ 297 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev); 298 struct nv50_disp *disp = nv50_disp(drm->dev); 299 struct { 300 struct nv50_disp_mthd_v1 base; 301 struct nv50_disp_acquire_v0 info; 302 } args = { 303 .base.version = 1, 304 .base.method = NV50_DISP_MTHD_V1_ACQUIRE, 305 .base.hasht = nv_encoder->dcb->hasht, 306 .base.hashm = nv_encoder->dcb->hashm, 307 }; 308 int ret; 309 310 ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 311 if (ret) { 312 NV_ERROR(drm, "error acquiring output path: %d\n", ret); 313 return ret; 314 } 315 316 nv_encoder->or = args.info.or; 317 nv_encoder->link = args.info.link; 318 return 0; 319} 320 321static int 322nv50_outp_atomic_check_view(struct drm_encoder *encoder, 323 struct drm_crtc_state *crtc_state, 324 struct drm_connector_state *conn_state, 325 struct drm_display_mode *native_mode) 326{ 327 struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode; 328 struct drm_display_mode *mode = &crtc_state->mode; 329 struct drm_connector *connector = conn_state->connector; 330 struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state); 331 struct nouveau_drm *drm = nouveau_drm(encoder->dev); 332 333 NV_ATOMIC(drm, "%s atomic_check\n", encoder->name); 334 asyc->scaler.full = false; 335 if (!native_mode) 336 return 0; 337 338 if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) { 339 switch (connector->connector_type) { 340 case DRM_MODE_CONNECTOR_LVDS: 341 case DRM_MODE_CONNECTOR_eDP: 342 /* Don't force scaler for EDID modes with 343 * same size as the native one (e.g. different 344 * refresh rate) 345 */ 346 if (mode->hdisplay == native_mode->hdisplay && 347 mode->vdisplay == native_mode->vdisplay && 348 mode->type & DRM_MODE_TYPE_DRIVER) 349 break; 350 mode = native_mode; 351 asyc->scaler.full = true; 352 break; 353 default: 354 break; 355 } 356 } else { 357 mode = native_mode; 358 } 359 360 if (!drm_mode_equal(adjusted_mode, mode)) { 361 drm_mode_copy(adjusted_mode, mode); 362 crtc_state->mode_changed = true; 363 } 364 365 return 0; 366} 367 368static int 369nv50_outp_atomic_check(struct drm_encoder *encoder, 370 struct drm_crtc_state *crtc_state, 371 struct drm_connector_state *conn_state) 372{ 373 struct drm_connector *connector = conn_state->connector; 374 struct nouveau_connector *nv_connector = nouveau_connector(connector); 375 struct nv50_head_atom *asyh = nv50_head_atom(crtc_state); 376 int ret; 377 378 ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state, 379 nv_connector->native_mode); 380 if (ret) 381 return ret; 382 383 if (crtc_state->mode_changed || crtc_state->connectors_changed) 384 asyh->or.bpc = connector->display_info.bpc; 385 386 return 0; 387} 388 389/****************************************************************************** 390 * DAC 391 *****************************************************************************/ 392static void 393nv50_dac_disable(struct drm_encoder *encoder) 394{ 395 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 396 struct nv50_core *core = nv50_disp(encoder->dev)->core; 397 if (nv_encoder->crtc) 398 core->func->dac->ctrl(core, nv_encoder->or, 0x00000000, NULL); 399 nv_encoder->crtc = NULL; 400 nv50_outp_release(nv_encoder); 401} 402 403static void 404nv50_dac_enable(struct drm_encoder *encoder) 405{ 406 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 407 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 408 struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state); 409 struct nv50_core *core = nv50_disp(encoder->dev)->core; 410 411 nv50_outp_acquire(nv_encoder); 412 413 core->func->dac->ctrl(core, nv_encoder->or, 1 << nv_crtc->index, asyh); 414 asyh->or.depth = 0; 415 416 nv_encoder->crtc = encoder->crtc; 417} 418 419static enum drm_connector_status 420nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector) 421{ 422 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 423 struct nv50_disp *disp = nv50_disp(encoder->dev); 424 struct { 425 struct nv50_disp_mthd_v1 base; 426 struct nv50_disp_dac_load_v0 load; 427 } args = { 428 .base.version = 1, 429 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD, 430 .base.hasht = nv_encoder->dcb->hasht, 431 .base.hashm = nv_encoder->dcb->hashm, 432 }; 433 int ret; 434 435 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval; 436 if (args.load.data == 0) 437 args.load.data = 340; 438 439 ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 440 if (ret || !args.load.load) 441 return connector_status_disconnected; 442 443 return connector_status_connected; 444} 445 446static const struct drm_encoder_helper_funcs 447nv50_dac_help = { 448 .atomic_check = nv50_outp_atomic_check, 449 .enable = nv50_dac_enable, 450 .disable = nv50_dac_disable, 451 .detect = nv50_dac_detect 452}; 453 454static void 455nv50_dac_destroy(struct drm_encoder *encoder) 456{ 457 drm_encoder_cleanup(encoder); 458 kfree(encoder); 459} 460 461static const struct drm_encoder_funcs 462nv50_dac_func = { 463 .destroy = nv50_dac_destroy, 464}; 465 466static int 467nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe) 468{ 469 struct nouveau_drm *drm = nouveau_drm(connector->dev); 470 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); 471 struct nvkm_i2c_bus *bus; 472 struct nouveau_encoder *nv_encoder; 473 struct drm_encoder *encoder; 474 int type = DRM_MODE_ENCODER_DAC; 475 476 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 477 if (!nv_encoder) 478 return -ENOMEM; 479 nv_encoder->dcb = dcbe; 480 481 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index); 482 if (bus) 483 nv_encoder->i2c = &bus->i2c; 484 485 encoder = to_drm_encoder(nv_encoder); 486 encoder->possible_crtcs = dcbe->heads; 487 encoder->possible_clones = 0; 488 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type, 489 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm); 490 drm_encoder_helper_add(encoder, &nv50_dac_help); 491 492 drm_connector_attach_encoder(connector, encoder); 493 return 0; 494} 495 496/* 497 * audio component binding for ELD notification 498 */ 499static void 500nv50_audio_component_eld_notify(struct drm_audio_component *acomp, int port) 501{ 502#ifndef __NetBSD__ /* XXX nouveau audio component */ 503 if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify) 504 acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr, 505 port, -1); 506#endif 507} 508 509#ifndef __NetBSD__ /* XXX nouveau audio component */ 510 511static int 512nv50_audio_component_get_eld(struct device *kdev, int port, int pipe, 513 bool *enabled, unsigned char *buf, int max_bytes) 514{ 515 struct drm_device *drm_dev = dev_get_drvdata(kdev); 516 struct nouveau_drm *drm = nouveau_drm(drm_dev); 517 struct drm_encoder *encoder; 518 struct nouveau_encoder *nv_encoder; 519 struct nouveau_connector *nv_connector; 520 struct nouveau_crtc *nv_crtc; 521 int ret = 0; 522 523 *enabled = false; 524 drm_for_each_encoder(encoder, drm->dev) { 525 nv_encoder = nouveau_encoder(encoder); 526 nv_connector = nouveau_encoder_connector_get(nv_encoder); 527 nv_crtc = nouveau_crtc(encoder->crtc); 528 if (!nv_connector || !nv_crtc || nv_crtc->index != port) 529 continue; 530 *enabled = drm_detect_monitor_audio(nv_connector->edid); 531 if (*enabled) { 532 ret = drm_eld_size(nv_connector->base.eld); 533 memcpy(buf, nv_connector->base.eld, 534 min(max_bytes, ret)); 535 } 536 break; 537 } 538 return ret; 539} 540 541static const struct drm_audio_component_ops nv50_audio_component_ops = { 542 .get_eld = nv50_audio_component_get_eld, 543}; 544 545static int 546nv50_audio_component_bind(struct device *kdev, struct device *hda_kdev, 547 void *data) 548{ 549 struct drm_device *drm_dev = dev_get_drvdata(kdev); 550 struct nouveau_drm *drm = nouveau_drm(drm_dev); 551 struct drm_audio_component *acomp = data; 552 553 if (WARN_ON(!device_link_add(hda_kdev, kdev, DL_FLAG_STATELESS))) 554 return -ENOMEM; 555 556 drm_modeset_lock_all(drm_dev); 557 acomp->ops = &nv50_audio_component_ops; 558 acomp->dev = kdev; 559 drm->audio.component = acomp; 560 drm_modeset_unlock_all(drm_dev); 561 return 0; 562} 563 564static void 565nv50_audio_component_unbind(struct device *kdev, struct device *hda_kdev, 566 void *data) 567{ 568 struct drm_device *drm_dev = dev_get_drvdata(kdev); 569 struct nouveau_drm *drm = nouveau_drm(drm_dev); 570 struct drm_audio_component *acomp = data; 571 572 drm_modeset_lock_all(drm_dev); 573 drm->audio.component = NULL; 574 acomp->ops = NULL; 575 acomp->dev = NULL; 576 drm_modeset_unlock_all(drm_dev); 577} 578 579static const struct component_ops nv50_audio_component_bind_ops = { 580 .bind = nv50_audio_component_bind, 581 .unbind = nv50_audio_component_unbind, 582}; 583 584#endif 585 586static void 587nv50_audio_component_init(struct nouveau_drm *drm) 588{ 589#ifndef __NetBSD__ /* XXX nouveau audio component */ 590 if (!component_add(drm->dev->dev, &nv50_audio_component_bind_ops)) 591 drm->audio.component_registered = true; 592#endif 593} 594 595static void 596nv50_audio_component_fini(struct nouveau_drm *drm) 597{ 598#ifndef __NetBSD__ /* XXX nouveau audio component */ 599 if (drm->audio.component_registered) { 600 component_del(drm->dev->dev, &nv50_audio_component_bind_ops); 601 drm->audio.component_registered = false; 602 } 603#endif 604} 605 606/****************************************************************************** 607 * Audio 608 *****************************************************************************/ 609static void 610nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) 611{ 612 struct nouveau_drm *drm = nouveau_drm(encoder->dev); 613 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 614 struct nv50_disp *disp = nv50_disp(encoder->dev); 615 struct { 616 struct nv50_disp_mthd_v1 base; 617 struct nv50_disp_sor_hda_eld_v0 eld; 618 } args = { 619 .base.version = 1, 620 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, 621 .base.hasht = nv_encoder->dcb->hasht, 622 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 623 (0x0100 << nv_crtc->index), 624 }; 625 626 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 627 628 nv50_audio_component_eld_notify(drm->audio.component, nv_crtc->index); 629} 630 631static void 632nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode) 633{ 634 struct nouveau_drm *drm = nouveau_drm(encoder->dev); 635 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 636 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 637 struct nouveau_connector *nv_connector; 638 struct nv50_disp *disp = nv50_disp(encoder->dev); 639 struct __packed { 640 struct { 641 struct nv50_disp_mthd_v1 mthd; 642 struct nv50_disp_sor_hda_eld_v0 eld; 643 } base; 644 u8 data[sizeof(nv_connector->base.eld)]; 645 } args = { 646 .base.mthd.version = 1, 647 .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, 648 .base.mthd.hasht = nv_encoder->dcb->hasht, 649 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 650 (0x0100 << nv_crtc->index), 651 }; 652 653 nv_connector = nouveau_encoder_connector_get(nv_encoder); 654 if (!drm_detect_monitor_audio(nv_connector->edid)) 655 return; 656 657 memcpy(args.data, nv_connector->base.eld, sizeof(args.data)); 658 659 nvif_mthd(&disp->disp->object, 0, &args, 660 sizeof(args.base) + drm_eld_size(args.data)); 661 662 nv50_audio_component_eld_notify(drm->audio.component, nv_crtc->index); 663} 664 665/****************************************************************************** 666 * HDMI 667 *****************************************************************************/ 668static void 669nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) 670{ 671 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 672 struct nv50_disp *disp = nv50_disp(encoder->dev); 673 struct { 674 struct nv50_disp_mthd_v1 base; 675 struct nv50_disp_sor_hdmi_pwr_v0 pwr; 676 } args = { 677 .base.version = 1, 678 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, 679 .base.hasht = nv_encoder->dcb->hasht, 680 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 681 (0x0100 << nv_crtc->index), 682 }; 683 684 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 685} 686 687static void 688nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode) 689{ 690 struct nouveau_drm *drm = nouveau_drm(encoder->dev); 691 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 692 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 693 struct nv50_disp *disp = nv50_disp(encoder->dev); 694 struct { 695 struct nv50_disp_mthd_v1 base; 696 struct nv50_disp_sor_hdmi_pwr_v0 pwr; 697 u8 infoframes[2 * 17]; /* two frames, up to 17 bytes each */ 698 } args = { 699 .base.version = 1, 700 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, 701 .base.hasht = nv_encoder->dcb->hasht, 702 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 703 (0x0100 << nv_crtc->index), 704 .pwr.state = 1, 705 .pwr.rekey = 56, /* binary driver, and tegra, constant */ 706 }; 707 struct nouveau_connector *nv_connector; 708 struct drm_hdmi_info *hdmi; 709 u32 max_ac_packet; 710 union hdmi_infoframe avi_frame; 711 union hdmi_infoframe vendor_frame; 712 bool high_tmds_clock_ratio = false, scrambling = false; 713 u8 config; 714 int ret; 715 int size; 716 717 nv_connector = nouveau_encoder_connector_get(nv_encoder); 718 if (!drm_detect_hdmi_monitor(nv_connector->edid)) 719 return; 720 721 hdmi = &nv_connector->base.display_info.hdmi; 722 723 ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi, 724 &nv_connector->base, mode); 725 if (!ret) { 726 /* We have an AVI InfoFrame, populate it to the display */ 727 args.pwr.avi_infoframe_length 728 = hdmi_infoframe_pack(&avi_frame, args.infoframes, 17); 729 } 730 731 ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi, 732 &nv_connector->base, mode); 733 if (!ret) { 734 /* We have a Vendor InfoFrame, populate it to the display */ 735 args.pwr.vendor_infoframe_length 736 = hdmi_infoframe_pack(&vendor_frame, 737 args.infoframes 738 + args.pwr.avi_infoframe_length, 739 17); 740 } 741 742 max_ac_packet = mode->htotal - mode->hdisplay; 743 max_ac_packet -= args.pwr.rekey; 744 max_ac_packet -= 18; /* constant from tegra */ 745 args.pwr.max_ac_packet = max_ac_packet / 32; 746 747 if (hdmi->scdc.scrambling.supported) { 748 high_tmds_clock_ratio = mode->clock > 340000; 749 scrambling = high_tmds_clock_ratio || 750 hdmi->scdc.scrambling.low_rates; 751 } 752 753 args.pwr.scdc = 754 NV50_DISP_SOR_HDMI_PWR_V0_SCDC_SCRAMBLE * scrambling | 755 NV50_DISP_SOR_HDMI_PWR_V0_SCDC_DIV_BY_4 * high_tmds_clock_ratio; 756 757 size = sizeof(args.base) 758 + sizeof(args.pwr) 759 + args.pwr.avi_infoframe_length 760 + args.pwr.vendor_infoframe_length; 761 nvif_mthd(&disp->disp->object, 0, &args, size); 762 763 nv50_audio_enable(encoder, mode); 764 765 /* If SCDC is supported by the downstream monitor, update 766 * divider / scrambling settings to what we programmed above. 767 */ 768 if (!hdmi->scdc.scrambling.supported) 769 return; 770 771 ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &config); 772 if (ret < 0) { 773 NV_ERROR(drm, "Failure to read SCDC_TMDS_CONFIG: %d\n", ret); 774 return; 775 } 776 config &= ~(SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 | SCDC_SCRAMBLING_ENABLE); 777 config |= SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 * high_tmds_clock_ratio; 778 config |= SCDC_SCRAMBLING_ENABLE * scrambling; 779 ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, config); 780 if (ret < 0) 781 NV_ERROR(drm, "Failure to write SCDC_TMDS_CONFIG = 0x%02x: %d\n", 782 config, ret); 783} 784 785/****************************************************************************** 786 * MST 787 *****************************************************************************/ 788#define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr) 789#define nv50_mstc(p) container_of((p), struct nv50_mstc, connector) 790#define nv50_msto(p) container_of((p), struct nv50_msto, encoder) 791 792struct nv50_mstm { 793 struct nouveau_encoder *outp; 794 795 struct drm_dp_mst_topology_mgr mgr; 796 797 bool modified; 798 bool disabled; 799 int links; 800}; 801 802struct nv50_mstc { 803 struct nv50_mstm *mstm; 804 struct drm_dp_mst_port *port; 805 struct drm_connector connector; 806 807 struct drm_display_mode *native; 808 struct edid *edid; 809}; 810 811struct nv50_msto { 812 struct drm_encoder encoder; 813 814 struct nv50_head *head; 815 struct nv50_mstc *mstc; 816 bool disabled; 817}; 818 819static struct drm_dp_payload * 820nv50_msto_payload(struct nv50_msto *msto) 821{ 822 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); 823 struct nv50_mstc *mstc = msto->mstc; 824 struct nv50_mstm *mstm = mstc->mstm; 825 int vcpi = mstc->port->vcpi.vcpi, i; 826 827 WARN_ON(!mutex_is_locked(&mstm->mgr.payload_lock)); 828 829 NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi); 830 for (i = 0; i < mstm->mgr.max_payloads; i++) { 831 struct drm_dp_payload *payload = &mstm->mgr.payloads[i]; 832 NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n", 833 mstm->outp->base.base.name, i, payload->vcpi, 834 payload->start_slot, payload->num_slots); 835 } 836 837 for (i = 0; i < mstm->mgr.max_payloads; i++) { 838 struct drm_dp_payload *payload = &mstm->mgr.payloads[i]; 839 if (payload->vcpi == vcpi) 840 return payload; 841 } 842 843 return NULL; 844} 845 846static void 847nv50_msto_cleanup(struct nv50_msto *msto) 848{ 849 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); 850 struct nv50_mstc *mstc = msto->mstc; 851 struct nv50_mstm *mstm = mstc->mstm; 852 853 if (!msto->disabled) 854 return; 855 856 NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name); 857 858 drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port); 859 860 msto->mstc = NULL; 861 msto->disabled = false; 862} 863 864static void 865nv50_msto_prepare(struct nv50_msto *msto) 866{ 867 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); 868 struct nv50_mstc *mstc = msto->mstc; 869 struct nv50_mstm *mstm = mstc->mstm; 870 struct { 871 struct nv50_disp_mthd_v1 base; 872 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi; 873 } args = { 874 .base.version = 1, 875 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI, 876 .base.hasht = mstm->outp->dcb->hasht, 877 .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) | 878 (0x0100 << msto->head->base.index), 879 }; 880 881 mutex_lock(&mstm->mgr.payload_lock); 882 883 NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name); 884 if (mstc->port->vcpi.vcpi > 0) { 885 struct drm_dp_payload *payload = nv50_msto_payload(msto); 886 if (payload) { 887 args.vcpi.start_slot = payload->start_slot; 888 args.vcpi.num_slots = payload->num_slots; 889 args.vcpi.pbn = mstc->port->vcpi.pbn; 890 args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn; 891 } 892 } 893 894 NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n", 895 msto->encoder.name, msto->head->base.base.name, 896 args.vcpi.start_slot, args.vcpi.num_slots, 897 args.vcpi.pbn, args.vcpi.aligned_pbn); 898 899 nvif_mthd(&drm->display->disp.object, 0, &args, sizeof(args)); 900 mutex_unlock(&mstm->mgr.payload_lock); 901} 902 903static int 904nv50_msto_atomic_check(struct drm_encoder *encoder, 905 struct drm_crtc_state *crtc_state, 906 struct drm_connector_state *conn_state) 907{ 908 struct drm_atomic_state *state = crtc_state->state; 909 struct drm_connector *connector = conn_state->connector; 910 struct nv50_mstc *mstc = nv50_mstc(connector); 911 struct nv50_mstm *mstm = mstc->mstm; 912 struct nv50_head_atom *asyh = nv50_head_atom(crtc_state); 913 int slots; 914 int ret; 915 916 ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state, 917 mstc->native); 918 if (ret) 919 return ret; 920 921 if (!crtc_state->mode_changed && !crtc_state->connectors_changed) 922 return 0; 923 924 /* 925 * When restoring duplicated states, we need to make sure that the bw 926 * remains the same and avoid recalculating it, as the connector's bpc 927 * may have changed after the state was duplicated 928 */ 929 if (!state->duplicated) { 930 const int clock = crtc_state->adjusted_mode.clock; 931 932 /* 933 * XXX: Since we don't use HDR in userspace quite yet, limit 934 * the bpc to 8 to save bandwidth on the topology. In the 935 * future, we'll want to properly fix this by dynamically 936 * selecting the highest possible bpc that would fit in the 937 * topology 938 */ 939 asyh->or.bpc = min(connector->display_info.bpc, 8U); 940 asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3, false); 941 } 942 943 slots = drm_dp_atomic_find_vcpi_slots(state, &mstm->mgr, mstc->port, 944 asyh->dp.pbn, 0); 945 if (slots < 0) 946 return slots; 947 948 asyh->dp.tu = slots; 949 950 return 0; 951} 952 953static u8 954nv50_dp_bpc_to_depth(unsigned int bpc) 955{ 956 switch (bpc) { 957 case 6: return 0x2; 958 case 8: return 0x5; 959 case 10: /* fall-through */ 960 default: return 0x6; 961 } 962} 963 964static void 965nv50_msto_enable(struct drm_encoder *encoder) 966{ 967 struct nv50_head *head = nv50_head(encoder->crtc); 968 struct nv50_head_atom *armh = nv50_head_atom(head->base.base.state); 969 struct nv50_msto *msto = nv50_msto(encoder); 970 struct nv50_mstc *mstc = NULL; 971 struct nv50_mstm *mstm = NULL; 972 struct drm_connector *connector; 973 struct drm_connector_list_iter conn_iter; 974 u8 proto; 975 bool r; 976 977 drm_connector_list_iter_begin(encoder->dev, &conn_iter); 978 drm_for_each_connector_iter(connector, &conn_iter) { 979 if (connector->state->best_encoder == &msto->encoder) { 980 mstc = nv50_mstc(connector); 981 mstm = mstc->mstm; 982 break; 983 } 984 } 985 drm_connector_list_iter_end(&conn_iter); 986 987 if (WARN_ON(!mstc)) 988 return; 989 990 r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, armh->dp.pbn, 991 armh->dp.tu); 992 if (!r) 993 DRM_DEBUG_KMS("Failed to allocate VCPI\n"); 994 995 if (!mstm->links++) 996 nv50_outp_acquire(mstm->outp); 997 998 if (mstm->outp->link & 1) 999 proto = 0x8; 1000 else 1001 proto = 0x9; 1002 1003 mstm->outp->update(mstm->outp, head->base.index, armh, proto, 1004 nv50_dp_bpc_to_depth(armh->or.bpc)); 1005 1006 msto->mstc = mstc; 1007 mstm->modified = true; 1008} 1009 1010static void 1011nv50_msto_disable(struct drm_encoder *encoder) 1012{ 1013 struct nv50_msto *msto = nv50_msto(encoder); 1014 struct nv50_mstc *mstc = msto->mstc; 1015 struct nv50_mstm *mstm = mstc->mstm; 1016 1017 drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port); 1018 1019 mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0); 1020 mstm->modified = true; 1021 if (!--mstm->links) 1022 mstm->disabled = true; 1023 msto->disabled = true; 1024} 1025 1026static const struct drm_encoder_helper_funcs 1027nv50_msto_help = { 1028 .disable = nv50_msto_disable, 1029 .enable = nv50_msto_enable, 1030 .atomic_check = nv50_msto_atomic_check, 1031}; 1032 1033static void 1034nv50_msto_destroy(struct drm_encoder *encoder) 1035{ 1036 struct nv50_msto *msto = nv50_msto(encoder); 1037 drm_encoder_cleanup(&msto->encoder); 1038 kfree(msto); 1039} 1040 1041static const struct drm_encoder_funcs 1042nv50_msto = { 1043 .destroy = nv50_msto_destroy, 1044}; 1045 1046static struct nv50_msto * 1047nv50_msto_new(struct drm_device *dev, struct nv50_head *head, int id) 1048{ 1049 struct nv50_msto *msto; 1050 int ret; 1051 1052 msto = kzalloc(sizeof(*msto), GFP_KERNEL); 1053 if (!msto) 1054 return ERR_PTR(-ENOMEM); 1055 1056 ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto, 1057 DRM_MODE_ENCODER_DPMST, "mst-%d", id); 1058 if (ret) { 1059 kfree(msto); 1060 return ERR_PTR(ret); 1061 } 1062 1063 drm_encoder_helper_add(&msto->encoder, &nv50_msto_help); 1064 msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base); 1065 msto->head = head; 1066 return msto; 1067} 1068 1069static struct drm_encoder * 1070nv50_mstc_atomic_best_encoder(struct drm_connector *connector, 1071 struct drm_connector_state *connector_state) 1072{ 1073 struct nv50_mstc *mstc = nv50_mstc(connector); 1074 struct drm_crtc *crtc = connector_state->crtc; 1075 1076 if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc))) 1077 return NULL; 1078 1079 return &nv50_head(crtc)->msto->encoder; 1080} 1081 1082static enum drm_mode_status 1083nv50_mstc_mode_valid(struct drm_connector *connector, 1084 struct drm_display_mode *mode) 1085{ 1086 return MODE_OK; 1087} 1088 1089static int 1090nv50_mstc_get_modes(struct drm_connector *connector) 1091{ 1092 struct nv50_mstc *mstc = nv50_mstc(connector); 1093 int ret = 0; 1094 1095 mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port); 1096 drm_connector_update_edid_property(&mstc->connector, mstc->edid); 1097 if (mstc->edid) 1098 ret = drm_add_edid_modes(&mstc->connector, mstc->edid); 1099 1100 if (!mstc->connector.display_info.bpc) 1101 mstc->connector.display_info.bpc = 8; 1102 1103 if (mstc->native) 1104 drm_mode_destroy(mstc->connector.dev, mstc->native); 1105 mstc->native = nouveau_conn_native_mode(&mstc->connector); 1106 return ret; 1107} 1108 1109static int 1110nv50_mstc_atomic_check(struct drm_connector *connector, 1111 struct drm_atomic_state *state) 1112{ 1113 struct nv50_mstc *mstc = nv50_mstc(connector); 1114 struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr; 1115 struct drm_connector_state *new_conn_state = 1116 drm_atomic_get_new_connector_state(state, connector); 1117 struct drm_connector_state *old_conn_state = 1118 drm_atomic_get_old_connector_state(state, connector); 1119 struct drm_crtc_state *crtc_state; 1120 struct drm_crtc *new_crtc = new_conn_state->crtc; 1121 1122 if (!old_conn_state->crtc) 1123 return 0; 1124 1125 /* We only want to free VCPI if this state disables the CRTC on this 1126 * connector 1127 */ 1128 if (new_crtc) { 1129 crtc_state = drm_atomic_get_new_crtc_state(state, new_crtc); 1130 1131 if (!crtc_state || 1132 !drm_atomic_crtc_needs_modeset(crtc_state) || 1133 crtc_state->enable) 1134 return 0; 1135 } 1136 1137 return drm_dp_atomic_release_vcpi_slots(state, mgr, mstc->port); 1138} 1139 1140static int 1141nv50_mstc_detect(struct drm_connector *connector, 1142 struct drm_modeset_acquire_ctx *ctx, bool force) 1143{ 1144 struct nv50_mstc *mstc = nv50_mstc(connector); 1145 int ret; 1146 1147 if (drm_connector_is_unregistered(connector)) 1148 return connector_status_disconnected; 1149 1150 ret = pm_runtime_get_sync(connector->dev->dev); 1151 if (ret < 0 && ret != -EACCES) 1152 return connector_status_disconnected; 1153 1154 ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr, 1155 mstc->port); 1156 1157 pm_runtime_mark_last_busy(connector->dev->dev); 1158 pm_runtime_put_autosuspend(connector->dev->dev); 1159 return ret; 1160} 1161 1162static const struct drm_connector_helper_funcs 1163nv50_mstc_help = { 1164 .get_modes = nv50_mstc_get_modes, 1165 .mode_valid = nv50_mstc_mode_valid, 1166 .atomic_best_encoder = nv50_mstc_atomic_best_encoder, 1167 .atomic_check = nv50_mstc_atomic_check, 1168 .detect_ctx = nv50_mstc_detect, 1169}; 1170 1171static void 1172nv50_mstc_destroy(struct drm_connector *connector) 1173{ 1174 struct nv50_mstc *mstc = nv50_mstc(connector); 1175 1176 drm_connector_cleanup(&mstc->connector); 1177 drm_dp_mst_put_port_malloc(mstc->port); 1178 1179 kfree(mstc); 1180} 1181 1182static const struct drm_connector_funcs 1183nv50_mstc = { 1184 .reset = nouveau_conn_reset, 1185 .fill_modes = drm_helper_probe_single_connector_modes, 1186 .destroy = nv50_mstc_destroy, 1187 .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state, 1188 .atomic_destroy_state = nouveau_conn_atomic_destroy_state, 1189 .atomic_set_property = nouveau_conn_atomic_set_property, 1190 .atomic_get_property = nouveau_conn_atomic_get_property, 1191}; 1192 1193static int 1194nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port, 1195 const char *path, struct nv50_mstc **pmstc) 1196{ 1197 struct drm_device *dev = mstm->outp->base.base.dev; 1198 struct drm_crtc *crtc; 1199 struct nv50_mstc *mstc; 1200 int ret; 1201 1202 if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL))) 1203 return -ENOMEM; 1204 mstc->mstm = mstm; 1205 mstc->port = port; 1206 1207 ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc, 1208 DRM_MODE_CONNECTOR_DisplayPort); 1209 if (ret) { 1210 kfree(*pmstc); 1211 *pmstc = NULL; 1212 return ret; 1213 } 1214 1215 drm_connector_helper_add(&mstc->connector, &nv50_mstc_help); 1216 1217 mstc->connector.funcs->reset(&mstc->connector); 1218 nouveau_conn_attach_properties(&mstc->connector); 1219 1220 drm_for_each_crtc(crtc, dev) { 1221 if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc))) 1222 continue; 1223 1224 drm_connector_attach_encoder(&mstc->connector, 1225 &nv50_head(crtc)->msto->encoder); 1226 } 1227 1228 drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0); 1229 drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0); 1230 drm_connector_set_path_property(&mstc->connector, path); 1231 drm_dp_mst_get_port_malloc(port); 1232 return 0; 1233} 1234 1235static void 1236nv50_mstm_cleanup(struct nv50_mstm *mstm) 1237{ 1238 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); 1239 struct drm_encoder *encoder; 1240 int ret __unused; 1241 1242 NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name); 1243 ret = drm_dp_check_act_status(&mstm->mgr); 1244 1245 ret = drm_dp_update_payload_part2(&mstm->mgr); 1246 1247 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { 1248 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { 1249 struct nv50_msto *msto = nv50_msto(encoder); 1250 struct nv50_mstc *mstc = msto->mstc; 1251 if (mstc && mstc->mstm == mstm) 1252 nv50_msto_cleanup(msto); 1253 } 1254 } 1255 1256 mstm->modified = false; 1257} 1258 1259static void 1260nv50_mstm_prepare(struct nv50_mstm *mstm) 1261{ 1262 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); 1263 struct drm_encoder *encoder; 1264 int ret __unused; 1265 1266 NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name); 1267 ret = drm_dp_update_payload_part1(&mstm->mgr); 1268 1269 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { 1270 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { 1271 struct nv50_msto *msto = nv50_msto(encoder); 1272 struct nv50_mstc *mstc = msto->mstc; 1273 if (mstc && mstc->mstm == mstm) 1274 nv50_msto_prepare(msto); 1275 } 1276 } 1277 1278 if (mstm->disabled) { 1279 if (!mstm->links) 1280 nv50_outp_release(mstm->outp); 1281 mstm->disabled = false; 1282 } 1283} 1284 1285static void 1286nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr, 1287 struct drm_connector *connector) 1288{ 1289 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1290 struct nv50_mstc *mstc = nv50_mstc(connector); 1291 1292 drm_connector_unregister(&mstc->connector); 1293 1294 drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector); 1295 1296 drm_connector_put(&mstc->connector); 1297} 1298 1299static void 1300nv50_mstm_register_connector(struct drm_connector *connector) 1301{ 1302 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1303 1304 drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector); 1305 1306 drm_connector_register(connector); 1307} 1308 1309static struct drm_connector * 1310nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr, 1311 struct drm_dp_mst_port *port, const char *path) 1312{ 1313 struct nv50_mstm *mstm = nv50_mstm(mgr); 1314 struct nv50_mstc *mstc; 1315 int ret; 1316 1317 ret = nv50_mstc_new(mstm, port, path, &mstc); 1318 if (ret) 1319 return NULL; 1320 1321 return &mstc->connector; 1322} 1323 1324static const struct drm_dp_mst_topology_cbs 1325nv50_mstm = { 1326 .add_connector = nv50_mstm_add_connector, 1327 .register_connector = nv50_mstm_register_connector, 1328 .destroy_connector = nv50_mstm_destroy_connector, 1329}; 1330 1331void 1332nv50_mstm_service(struct nv50_mstm *mstm) 1333{ 1334 struct drm_dp_aux *aux = mstm ? mstm->mgr.aux : NULL; 1335 bool handled = true; 1336 int ret; 1337 u8 esi[8] = {}; 1338 1339 if (!aux) 1340 return; 1341 1342 while (handled) { 1343 ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8); 1344 if (ret != 8) { 1345 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); 1346 return; 1347 } 1348 1349 drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled); 1350 if (!handled) 1351 break; 1352 1353 drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3); 1354 } 1355} 1356 1357void 1358nv50_mstm_remove(struct nv50_mstm *mstm) 1359{ 1360 if (mstm) 1361 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); 1362} 1363 1364static int 1365nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state) 1366{ 1367 struct nouveau_encoder *outp = mstm->outp; 1368 struct { 1369 struct nv50_disp_mthd_v1 base; 1370 struct nv50_disp_sor_dp_mst_link_v0 mst; 1371 } args = { 1372 .base.version = 1, 1373 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK, 1374 .base.hasht = outp->dcb->hasht, 1375 .base.hashm = outp->dcb->hashm, 1376 .mst.state = state, 1377 }; 1378 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev); 1379 struct nvif_object *disp = &drm->display->disp.object; 1380 int ret; 1381 1382 if (dpcd >= 0x12) { 1383 /* Even if we're enabling MST, start with disabling the 1384 * branching unit to clear any sink-side MST topology state 1385 * that wasn't set by us 1386 */ 1387 ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, 0); 1388 if (ret < 0) 1389 return ret; 1390 1391 if (state) { 1392 /* Now, start initializing */ 1393 ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, 1394 DP_MST_EN); 1395 if (ret < 0) 1396 return ret; 1397 } 1398 } 1399 1400 return nvif_mthd(disp, 0, &args, sizeof(args)); 1401} 1402 1403int 1404nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow) 1405{ 1406 struct drm_dp_aux *aux; 1407 int ret; 1408 bool old_state, new_state; 1409 u8 mstm_ctrl; 1410 1411 if (!mstm) 1412 return 0; 1413 1414 mutex_lock(&mstm->mgr.lock); 1415 1416 old_state = mstm->mgr.mst_state; 1417 new_state = old_state; 1418 aux = mstm->mgr.aux; 1419 1420 if (old_state) { 1421 /* Just check that the MST hub is still as we expect it */ 1422 ret = drm_dp_dpcd_readb(aux, DP_MSTM_CTRL, &mstm_ctrl); 1423 if (ret < 0 || !(mstm_ctrl & DP_MST_EN)) { 1424 DRM_DEBUG_KMS("Hub gone, disabling MST topology\n"); 1425 new_state = false; 1426 } 1427 } else if (dpcd[0] >= 0x12) { 1428 ret = drm_dp_dpcd_readb(aux, DP_MSTM_CAP, &dpcd[1]); 1429 if (ret < 0) 1430 goto probe_error; 1431 1432 if (!(dpcd[1] & DP_MST_CAP)) 1433 dpcd[0] = 0x11; 1434 else 1435 new_state = allow; 1436 } 1437 1438 if (new_state == old_state) { 1439 mutex_unlock(&mstm->mgr.lock); 1440 return new_state; 1441 } 1442 1443 ret = nv50_mstm_enable(mstm, dpcd[0], new_state); 1444 if (ret) 1445 goto probe_error; 1446 1447 mutex_unlock(&mstm->mgr.lock); 1448 1449 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, new_state); 1450 if (ret) 1451 return nv50_mstm_enable(mstm, dpcd[0], 0); 1452 1453 return new_state; 1454 1455probe_error: 1456 mutex_unlock(&mstm->mgr.lock); 1457 return ret; 1458} 1459 1460static void 1461nv50_mstm_fini(struct nv50_mstm *mstm) 1462{ 1463 if (mstm && mstm->mgr.mst_state) 1464 drm_dp_mst_topology_mgr_suspend(&mstm->mgr); 1465} 1466 1467static void 1468nv50_mstm_init(struct nv50_mstm *mstm, bool runtime) 1469{ 1470 int ret; 1471 1472 if (!mstm || !mstm->mgr.mst_state) 1473 return; 1474 1475 ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime); 1476 if (ret == -1) { 1477 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); 1478 drm_kms_helper_hotplug_event(mstm->mgr.dev); 1479 } 1480} 1481 1482static void 1483nv50_mstm_del(struct nv50_mstm **pmstm) 1484{ 1485 struct nv50_mstm *mstm = *pmstm; 1486 if (mstm) { 1487 drm_dp_mst_topology_mgr_destroy(&mstm->mgr); 1488 kfree(*pmstm); 1489 *pmstm = NULL; 1490 } 1491} 1492 1493static int 1494nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max, 1495 int conn_base_id, struct nv50_mstm **pmstm) 1496{ 1497 const int max_payloads = hweight8(outp->dcb->heads); 1498 struct drm_device *dev = outp->base.base.dev; 1499 struct nv50_mstm *mstm; 1500 int ret; 1501 u8 dpcd; 1502 1503 /* This is a workaround for some monitors not functioning 1504 * correctly in MST mode on initial module load. I think 1505 * some bad interaction with the VBIOS may be responsible. 1506 * 1507 * A good ol' off and on again seems to work here ;) 1508 */ 1509 ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd); 1510 if (ret >= 0 && dpcd >= 0x12) 1511 drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0); 1512 1513 if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL))) 1514 return -ENOMEM; 1515 mstm->outp = outp; 1516 mstm->mgr.cbs = &nv50_mstm; 1517 1518 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max, 1519 max_payloads, conn_base_id); 1520 if (ret) 1521 return ret; 1522 1523 return 0; 1524} 1525 1526/****************************************************************************** 1527 * SOR 1528 *****************************************************************************/ 1529static void 1530nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head, 1531 struct nv50_head_atom *asyh, u8 proto, u8 depth) 1532{ 1533 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev); 1534 struct nv50_core *core = disp->core; 1535 1536 if (!asyh) { 1537 nv_encoder->ctrl &= ~BIT(head); 1538 if (!(nv_encoder->ctrl & 0x0000000f)) 1539 nv_encoder->ctrl = 0; 1540 } else { 1541 nv_encoder->ctrl |= proto << 8; 1542 nv_encoder->ctrl |= BIT(head); 1543 asyh->or.depth = depth; 1544 } 1545 1546 core->func->sor->ctrl(core, nv_encoder->or, nv_encoder->ctrl, asyh); 1547} 1548 1549static void 1550nv50_sor_disable(struct drm_encoder *encoder) 1551{ 1552 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1553 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc); 1554 1555 nv_encoder->crtc = NULL; 1556 1557 if (nv_crtc) { 1558 struct nvkm_i2c_aux *aux = nv_encoder->aux; 1559 u8 pwr; 1560 1561 if (aux) { 1562 int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1); 1563 if (ret == 0) { 1564 pwr &= ~DP_SET_POWER_MASK; 1565 pwr |= DP_SET_POWER_D3; 1566 nvkm_wraux(aux, DP_SET_POWER, &pwr, 1); 1567 } 1568 } 1569 1570 nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0); 1571 nv50_audio_disable(encoder, nv_crtc); 1572 nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc); 1573 nv50_outp_release(nv_encoder); 1574 } 1575} 1576 1577static void 1578nv50_sor_enable(struct drm_encoder *encoder) 1579{ 1580 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1581 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1582 struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state); 1583 struct drm_display_mode *mode = &asyh->state.adjusted_mode; 1584 struct { 1585 struct nv50_disp_mthd_v1 base; 1586 struct nv50_disp_sor_lvds_script_v0 lvds; 1587 } lvds = { 1588 .base.version = 1, 1589 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT, 1590 .base.hasht = nv_encoder->dcb->hasht, 1591 .base.hashm = nv_encoder->dcb->hashm, 1592 }; 1593 struct nv50_disp *disp = nv50_disp(encoder->dev); 1594 struct drm_device *dev = encoder->dev; 1595 struct nouveau_drm *drm = nouveau_drm(dev); 1596 struct nouveau_connector *nv_connector; 1597 struct nvbios *bios = &drm->vbios; 1598 u8 proto = 0xf; 1599 u8 depth = 0x0; 1600 1601 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1602 nv_encoder->crtc = encoder->crtc; 1603 nv50_outp_acquire(nv_encoder); 1604 1605 switch (nv_encoder->dcb->type) { 1606 case DCB_OUTPUT_TMDS: 1607 if (nv_encoder->link & 1) { 1608 proto = 0x1; 1609 /* Only enable dual-link if: 1610 * - Need to (i.e. rate > 165MHz) 1611 * - DCB says we can 1612 * - Not an HDMI monitor, since there's no dual-link 1613 * on HDMI. 1614 */ 1615 if (mode->clock >= 165000 && 1616 nv_encoder->dcb->duallink_possible && 1617 !drm_detect_hdmi_monitor(nv_connector->edid)) 1618 proto |= 0x4; 1619 } else { 1620 proto = 0x2; 1621 } 1622 1623 nv50_hdmi_enable(&nv_encoder->base.base, mode); 1624 break; 1625 case DCB_OUTPUT_LVDS: 1626 proto = 0x0; 1627 1628 if (bios->fp_no_ddc) { 1629 if (bios->fp.dual_link) 1630 lvds.lvds.script |= 0x0100; 1631 if (bios->fp.if_is_24bit) 1632 lvds.lvds.script |= 0x0200; 1633 } else { 1634 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) { 1635 if (((u8 *)nv_connector->edid)[121] == 2) 1636 lvds.lvds.script |= 0x0100; 1637 } else 1638 if (mode->clock >= bios->fp.duallink_transition_clk) { 1639 lvds.lvds.script |= 0x0100; 1640 } 1641 1642 if (lvds.lvds.script & 0x0100) { 1643 if (bios->fp.strapless_is_24bit & 2) 1644 lvds.lvds.script |= 0x0200; 1645 } else { 1646 if (bios->fp.strapless_is_24bit & 1) 1647 lvds.lvds.script |= 0x0200; 1648 } 1649 1650 if (asyh->or.bpc == 8) 1651 lvds.lvds.script |= 0x0200; 1652 } 1653 1654 nvif_mthd(&disp->disp->object, 0, &lvds, sizeof(lvds)); 1655 break; 1656 case DCB_OUTPUT_DP: 1657 depth = nv50_dp_bpc_to_depth(asyh->or.bpc); 1658 1659 if (nv_encoder->link & 1) 1660 proto = 0x8; 1661 else 1662 proto = 0x9; 1663 1664 nv50_audio_enable(encoder, mode); 1665 break; 1666 default: 1667 BUG(); 1668 break; 1669 } 1670 1671 nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth); 1672} 1673 1674static const struct drm_encoder_helper_funcs 1675nv50_sor_help = { 1676 .atomic_check = nv50_outp_atomic_check, 1677 .enable = nv50_sor_enable, 1678 .disable = nv50_sor_disable, 1679}; 1680 1681static void 1682nv50_sor_destroy(struct drm_encoder *encoder) 1683{ 1684 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1685 nv50_mstm_del(&nv_encoder->dp.mstm); 1686 drm_encoder_cleanup(encoder); 1687 kfree(encoder); 1688} 1689 1690static const struct drm_encoder_funcs 1691nv50_sor_func = { 1692 .destroy = nv50_sor_destroy, 1693}; 1694 1695static bool nv50_has_mst(struct nouveau_drm *drm) 1696{ 1697 struct nvkm_bios *bios = nvxx_bios(&drm->client.device); 1698 u32 data; 1699 u8 ver, hdr, cnt, len; 1700 1701 data = nvbios_dp_table(bios, &ver, &hdr, &cnt, &len); 1702 return data && ver >= 0x40 && (nvbios_rd08(bios, data + 0x08) & 0x04); 1703} 1704 1705static int 1706nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) 1707{ 1708 struct nouveau_connector *nv_connector = nouveau_connector(connector); 1709 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1710 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); 1711 struct nouveau_encoder *nv_encoder; 1712 struct drm_encoder *encoder; 1713 int type, ret; 1714 1715 switch (dcbe->type) { 1716 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break; 1717 case DCB_OUTPUT_TMDS: 1718 case DCB_OUTPUT_DP: 1719 default: 1720 type = DRM_MODE_ENCODER_TMDS; 1721 break; 1722 } 1723 1724 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 1725 if (!nv_encoder) 1726 return -ENOMEM; 1727 nv_encoder->dcb = dcbe; 1728 nv_encoder->update = nv50_sor_update; 1729 1730 encoder = to_drm_encoder(nv_encoder); 1731 encoder->possible_crtcs = dcbe->heads; 1732 encoder->possible_clones = 0; 1733 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type, 1734 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm); 1735 drm_encoder_helper_add(encoder, &nv50_sor_help); 1736 1737 drm_connector_attach_encoder(connector, encoder); 1738 1739 if (dcbe->type == DCB_OUTPUT_DP) { 1740 struct nv50_disp *disp = nv50_disp(encoder->dev); 1741 struct nvkm_i2c_aux *aux = 1742 nvkm_i2c_aux_find(i2c, dcbe->i2c_index); 1743 if (aux) { 1744 if (disp->disp->object.oclass < GF110_DISP) { 1745 /* HW has no support for address-only 1746 * transactions, so we're required to 1747 * use custom I2C-over-AUX code. 1748 */ 1749 nv_encoder->i2c = &aux->i2c; 1750 } else { 1751 nv_encoder->i2c = &nv_connector->aux.ddc; 1752 } 1753 nv_encoder->aux = aux; 1754 } 1755 1756 if (nv_connector->type != DCB_CONNECTOR_eDP && 1757 nv50_has_mst(drm)) { 1758 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 1759 16, nv_connector->base.base.id, 1760 &nv_encoder->dp.mstm); 1761 if (ret) 1762 return ret; 1763 } 1764 } else { 1765 struct nvkm_i2c_bus *bus = 1766 nvkm_i2c_bus_find(i2c, dcbe->i2c_index); 1767 if (bus) 1768 nv_encoder->i2c = &bus->i2c; 1769 } 1770 1771 return 0; 1772} 1773 1774/****************************************************************************** 1775 * PIOR 1776 *****************************************************************************/ 1777static int 1778nv50_pior_atomic_check(struct drm_encoder *encoder, 1779 struct drm_crtc_state *crtc_state, 1780 struct drm_connector_state *conn_state) 1781{ 1782 int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state); 1783 if (ret) 1784 return ret; 1785 crtc_state->adjusted_mode.clock *= 2; 1786 return 0; 1787} 1788 1789static void 1790nv50_pior_disable(struct drm_encoder *encoder) 1791{ 1792 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1793 struct nv50_core *core = nv50_disp(encoder->dev)->core; 1794 if (nv_encoder->crtc) 1795 core->func->pior->ctrl(core, nv_encoder->or, 0x00000000, NULL); 1796 nv_encoder->crtc = NULL; 1797 nv50_outp_release(nv_encoder); 1798} 1799 1800static void 1801nv50_pior_enable(struct drm_encoder *encoder) 1802{ 1803 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1804 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1805 struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state); 1806 struct nv50_core *core = nv50_disp(encoder->dev)->core; 1807 u8 owner = 1 << nv_crtc->index; 1808 u8 proto; 1809 1810 nv50_outp_acquire(nv_encoder); 1811 1812 switch (asyh->or.bpc) { 1813 case 10: asyh->or.depth = 0x6; break; 1814 case 8: asyh->or.depth = 0x5; break; 1815 case 6: asyh->or.depth = 0x2; break; 1816 default: asyh->or.depth = 0x0; break; 1817 } 1818 1819 switch (nv_encoder->dcb->type) { 1820 case DCB_OUTPUT_TMDS: 1821 case DCB_OUTPUT_DP: 1822 proto = 0x0; 1823 break; 1824 default: 1825 BUG(); 1826 break; 1827 } 1828 1829 core->func->pior->ctrl(core, nv_encoder->or, (proto << 8) | owner, asyh); 1830 nv_encoder->crtc = encoder->crtc; 1831} 1832 1833static const struct drm_encoder_helper_funcs 1834nv50_pior_help = { 1835 .atomic_check = nv50_pior_atomic_check, 1836 .enable = nv50_pior_enable, 1837 .disable = nv50_pior_disable, 1838}; 1839 1840static void 1841nv50_pior_destroy(struct drm_encoder *encoder) 1842{ 1843 drm_encoder_cleanup(encoder); 1844 kfree(encoder); 1845} 1846 1847static const struct drm_encoder_funcs 1848nv50_pior_func = { 1849 .destroy = nv50_pior_destroy, 1850}; 1851 1852static int 1853nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe) 1854{ 1855 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1856 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); 1857 struct nvkm_i2c_bus *bus = NULL; 1858 struct nvkm_i2c_aux *aux = NULL; 1859 struct i2c_adapter *ddc; 1860 struct nouveau_encoder *nv_encoder; 1861 struct drm_encoder *encoder; 1862 int type; 1863 1864 switch (dcbe->type) { 1865 case DCB_OUTPUT_TMDS: 1866 bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev)); 1867 ddc = bus ? &bus->i2c : NULL; 1868 type = DRM_MODE_ENCODER_TMDS; 1869 break; 1870 case DCB_OUTPUT_DP: 1871 aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev)); 1872 ddc = aux ? &aux->i2c : NULL; 1873 type = DRM_MODE_ENCODER_TMDS; 1874 break; 1875 default: 1876 return -ENODEV; 1877 } 1878 1879 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 1880 if (!nv_encoder) 1881 return -ENOMEM; 1882 nv_encoder->dcb = dcbe; 1883 nv_encoder->i2c = ddc; 1884 nv_encoder->aux = aux; 1885 1886 encoder = to_drm_encoder(nv_encoder); 1887 encoder->possible_crtcs = dcbe->heads; 1888 encoder->possible_clones = 0; 1889 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type, 1890 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm); 1891 drm_encoder_helper_add(encoder, &nv50_pior_help); 1892 1893 drm_connector_attach_encoder(connector, encoder); 1894 return 0; 1895} 1896 1897/****************************************************************************** 1898 * Atomic 1899 *****************************************************************************/ 1900 1901static void 1902nv50_disp_atomic_commit_core(struct drm_atomic_state *state, u32 *interlock) 1903{ 1904 struct nouveau_drm *drm = nouveau_drm(state->dev); 1905 struct nv50_disp *disp = nv50_disp(drm->dev); 1906 struct nv50_core *core = disp->core; 1907 struct nv50_mstm *mstm; 1908 struct drm_encoder *encoder; 1909 1910 NV_ATOMIC(drm, "commit core %08x\n", interlock[NV50_DISP_INTERLOCK_BASE]); 1911 1912 drm_for_each_encoder(encoder, drm->dev) { 1913 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { 1914 mstm = nouveau_encoder(encoder)->dp.mstm; 1915 if (mstm && mstm->modified) 1916 nv50_mstm_prepare(mstm); 1917 } 1918 } 1919 1920 core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY); 1921 core->func->update(core, interlock, true); 1922 if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY, 1923 disp->core->chan.base.device)) 1924 NV_ERROR(drm, "core notifier timeout\n"); 1925 1926 drm_for_each_encoder(encoder, drm->dev) { 1927 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { 1928 mstm = nouveau_encoder(encoder)->dp.mstm; 1929 if (mstm && mstm->modified) 1930 nv50_mstm_cleanup(mstm); 1931 } 1932 } 1933} 1934 1935static void 1936nv50_disp_atomic_commit_wndw(struct drm_atomic_state *state, u32 *interlock) 1937{ 1938 struct drm_plane_state *new_plane_state; 1939 struct drm_plane *plane; 1940 int i; 1941 1942 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 1943 struct nv50_wndw *wndw = nv50_wndw(plane); 1944 if (interlock[wndw->interlock.type] & wndw->interlock.data) { 1945 if (wndw->func->update) 1946 wndw->func->update(wndw, interlock); 1947 } 1948 } 1949} 1950 1951static void 1952nv50_disp_atomic_commit_tail(struct drm_atomic_state *state) 1953{ 1954 struct drm_device *dev = state->dev; 1955 struct drm_crtc_state *new_crtc_state, *old_crtc_state; 1956 struct drm_crtc *crtc; 1957 struct drm_plane_state *new_plane_state; 1958 struct drm_plane *plane; 1959 struct nouveau_drm *drm = nouveau_drm(dev); 1960 struct nv50_disp *disp = nv50_disp(dev); 1961 struct nv50_atom *atom = nv50_atom(state); 1962 struct nv50_core *core = disp->core; 1963 struct nv50_outp_atom *outp, *outt; 1964 u32 interlock[NV50_DISP_INTERLOCK__SIZE] = {}; 1965 int i; 1966 1967 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable); 1968 drm_atomic_helper_wait_for_fences(dev, state, false); 1969 drm_atomic_helper_wait_for_dependencies(state); 1970 drm_atomic_helper_update_legacy_modeset_state(dev, state); 1971 1972 if (atom->lock_core) 1973 mutex_lock(&disp->mutex); 1974 1975 /* Disable head(s). */ 1976 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) { 1977 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state); 1978 struct nv50_head *head = nv50_head(crtc); 1979 1980 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name, 1981 asyh->clr.mask, asyh->set.mask); 1982 1983 if (old_crtc_state->active && !new_crtc_state->active) { 1984 pm_runtime_put_noidle(dev->dev); 1985 drm_crtc_vblank_off(crtc); 1986 } 1987 1988 if (asyh->clr.mask) { 1989 nv50_head_flush_clr(head, asyh, atom->flush_disable); 1990 interlock[NV50_DISP_INTERLOCK_CORE] |= 1; 1991 } 1992 } 1993 1994 /* Disable plane(s). */ 1995 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 1996 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state); 1997 struct nv50_wndw *wndw = nv50_wndw(plane); 1998 1999 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name, 2000 asyw->clr.mask, asyw->set.mask); 2001 if (!asyw->clr.mask) 2002 continue; 2003 2004 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw); 2005 } 2006 2007 /* Disable output path(s). */ 2008 list_for_each_entry(outp, &atom->outp, head) { 2009 const struct drm_encoder_helper_funcs *help; 2010 struct drm_encoder *encoder; 2011 2012 encoder = outp->encoder; 2013 help = encoder->helper_private; 2014 2015 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name, 2016 outp->clr.mask, outp->set.mask); 2017 2018 if (outp->clr.mask) { 2019 help->disable(encoder); 2020 interlock[NV50_DISP_INTERLOCK_CORE] |= 1; 2021 if (outp->flush_disable) { 2022 nv50_disp_atomic_commit_wndw(state, interlock); 2023 nv50_disp_atomic_commit_core(state, interlock); 2024 memset(interlock, 0x00, sizeof(interlock)); 2025 } 2026 } 2027 } 2028 2029 /* Flush disable. */ 2030 if (interlock[NV50_DISP_INTERLOCK_CORE]) { 2031 if (atom->flush_disable) { 2032 nv50_disp_atomic_commit_wndw(state, interlock); 2033 nv50_disp_atomic_commit_core(state, interlock); 2034 memset(interlock, 0x00, sizeof(interlock)); 2035 } 2036 } 2037 2038 /* Update output path(s). */ 2039 list_for_each_entry_safe(outp, outt, &atom->outp, head) { 2040 const struct drm_encoder_helper_funcs *help; 2041 struct drm_encoder *encoder; 2042 2043 encoder = outp->encoder; 2044 help = encoder->helper_private; 2045 2046 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name, 2047 outp->set.mask, outp->clr.mask); 2048 2049 if (outp->set.mask) { 2050 help->enable(encoder); 2051 interlock[NV50_DISP_INTERLOCK_CORE] = 1; 2052 } 2053 2054 list_del(&outp->head); 2055 kfree(outp); 2056 } 2057 2058 /* Update head(s). */ 2059 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) { 2060 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state); 2061 struct nv50_head *head = nv50_head(crtc); 2062 2063 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name, 2064 asyh->set.mask, asyh->clr.mask); 2065 2066 if (asyh->set.mask) { 2067 nv50_head_flush_set(head, asyh); 2068 interlock[NV50_DISP_INTERLOCK_CORE] = 1; 2069 } 2070 2071 if (new_crtc_state->active) { 2072 if (!old_crtc_state->active) { 2073 drm_crtc_vblank_on(crtc); 2074 pm_runtime_get_noresume(dev->dev); 2075 } 2076 if (new_crtc_state->event) 2077 drm_crtc_vblank_get(crtc); 2078 } 2079 } 2080 2081 /* Update window->head assignment. 2082 * 2083 * This has to happen in an update that's not interlocked with 2084 * any window channels to avoid hitting HW error checks. 2085 * 2086 *TODO: Proper handling of window ownership (Turing apparently 2087 * supports non-fixed mappings). 2088 */ 2089 if (core->assign_windows) { 2090 core->func->wndw.owner(core); 2091 core->func->update(core, interlock, false); 2092 core->assign_windows = false; 2093 interlock[NV50_DISP_INTERLOCK_CORE] = 0; 2094 } 2095 2096 /* Update plane(s). */ 2097 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 2098 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state); 2099 struct nv50_wndw *wndw = nv50_wndw(plane); 2100 2101 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name, 2102 asyw->set.mask, asyw->clr.mask); 2103 if ( !asyw->set.mask && 2104 (!asyw->clr.mask || atom->flush_disable)) 2105 continue; 2106 2107 nv50_wndw_flush_set(wndw, interlock, asyw); 2108 } 2109 2110 /* Flush update. */ 2111 nv50_disp_atomic_commit_wndw(state, interlock); 2112 2113 if (interlock[NV50_DISP_INTERLOCK_CORE]) { 2114 if (interlock[NV50_DISP_INTERLOCK_BASE] || 2115 interlock[NV50_DISP_INTERLOCK_OVLY] || 2116 interlock[NV50_DISP_INTERLOCK_WNDW] || 2117 !atom->state.legacy_cursor_update) 2118 nv50_disp_atomic_commit_core(state, interlock); 2119 else 2120 disp->core->func->update(disp->core, interlock, false); 2121 } 2122 2123 if (atom->lock_core) 2124 mutex_unlock(&disp->mutex); 2125 2126 /* Wait for HW to signal completion. */ 2127 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 2128 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state); 2129 struct nv50_wndw *wndw = nv50_wndw(plane); 2130 int ret = nv50_wndw_wait_armed(wndw, asyw); 2131 if (ret) 2132 NV_ERROR(drm, "%s: timeout\n", plane->name); 2133 } 2134 2135 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) { 2136 if (new_crtc_state->event) { 2137 unsigned long flags; 2138 /* Get correct count/ts if racing with vblank irq */ 2139 spin_lock_irqsave(&crtc->dev->event_lock, flags); 2140 if (new_crtc_state->active) 2141 drm_crtc_accurate_vblank_count(crtc); 2142 drm_crtc_send_vblank_event(crtc, new_crtc_state->event); 2143 spin_unlock_irqrestore(&crtc->dev->event_lock, flags); 2144 2145 new_crtc_state->event = NULL; 2146 if (new_crtc_state->active) 2147 drm_crtc_vblank_put(crtc); 2148 } 2149 } 2150 2151 drm_atomic_helper_commit_hw_done(state); 2152 drm_atomic_helper_cleanup_planes(dev, state); 2153 drm_atomic_helper_commit_cleanup_done(state); 2154 drm_atomic_state_put(state); 2155 2156 /* Drop the RPM ref we got from nv50_disp_atomic_commit() */ 2157 pm_runtime_mark_last_busy(dev->dev); 2158 pm_runtime_put_autosuspend(dev->dev); 2159} 2160 2161static void 2162nv50_disp_atomic_commit_work(struct work_struct *work) 2163{ 2164 struct drm_atomic_state *state = 2165 container_of(work, typeof(*state), commit_work); 2166 nv50_disp_atomic_commit_tail(state); 2167} 2168 2169static int 2170nv50_disp_atomic_commit(struct drm_device *dev, 2171 struct drm_atomic_state *state, bool nonblock) 2172{ 2173 struct drm_plane_state *new_plane_state; 2174 struct drm_plane *plane; 2175 int ret, i; 2176 2177 ret = pm_runtime_get_sync(dev->dev); 2178 if (ret < 0 && ret != -EACCES) 2179 return ret; 2180 2181 ret = drm_atomic_helper_setup_commit(state, nonblock); 2182 if (ret) 2183 goto done; 2184 2185 INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work); 2186 2187 ret = drm_atomic_helper_prepare_planes(dev, state); 2188 if (ret) 2189 goto done; 2190 2191 if (!nonblock) { 2192 ret = drm_atomic_helper_wait_for_fences(dev, state, true); 2193 if (ret) 2194 goto err_cleanup; 2195 } 2196 2197 ret = drm_atomic_helper_swap_state(state, true); 2198 if (ret) 2199 goto err_cleanup; 2200 2201 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 2202 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state); 2203 struct nv50_wndw *wndw = nv50_wndw(plane); 2204 2205 if (asyw->set.image) 2206 nv50_wndw_ntfy_enable(wndw, asyw); 2207 } 2208 2209 drm_atomic_state_get(state); 2210 2211 /* 2212 * Grab another RPM ref for the commit tail, which will release the 2213 * ref when it's finished 2214 */ 2215 pm_runtime_get_noresume(dev->dev); 2216 2217 if (nonblock) 2218 queue_work(system_unbound_wq, &state->commit_work); 2219 else 2220 nv50_disp_atomic_commit_tail(state); 2221 2222err_cleanup: 2223 if (ret) 2224 drm_atomic_helper_cleanup_planes(dev, state); 2225done: 2226 pm_runtime_put_autosuspend(dev->dev); 2227 return ret; 2228} 2229 2230static struct nv50_outp_atom * 2231nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder) 2232{ 2233 struct nv50_outp_atom *outp; 2234 2235 list_for_each_entry(outp, &atom->outp, head) { 2236 if (outp->encoder == encoder) 2237 return outp; 2238 } 2239 2240 outp = kzalloc(sizeof(*outp), GFP_KERNEL); 2241 if (!outp) 2242 return ERR_PTR(-ENOMEM); 2243 2244 list_add(&outp->head, &atom->outp); 2245 outp->encoder = encoder; 2246 return outp; 2247} 2248 2249static int 2250nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom, 2251 struct drm_connector_state *old_connector_state) 2252{ 2253 struct drm_encoder *encoder = old_connector_state->best_encoder; 2254 struct drm_crtc_state *old_crtc_state, *new_crtc_state; 2255 struct drm_crtc *crtc; 2256 struct nv50_outp_atom *outp; 2257 2258 if (!(crtc = old_connector_state->crtc)) 2259 return 0; 2260 2261 old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc); 2262 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); 2263 if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { 2264 outp = nv50_disp_outp_atomic_add(atom, encoder); 2265 if (IS_ERR(outp)) 2266 return PTR_ERR(outp); 2267 2268 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { 2269 outp->flush_disable = true; 2270 atom->flush_disable = true; 2271 } 2272 outp->clr.ctrl = true; 2273 atom->lock_core = true; 2274 } 2275 2276 return 0; 2277} 2278 2279static int 2280nv50_disp_outp_atomic_check_set(struct nv50_atom *atom, 2281 struct drm_connector_state *connector_state) 2282{ 2283 struct drm_encoder *encoder = connector_state->best_encoder; 2284 struct drm_crtc_state *new_crtc_state; 2285 struct drm_crtc *crtc; 2286 struct nv50_outp_atom *outp; 2287 2288 if (!(crtc = connector_state->crtc)) 2289 return 0; 2290 2291 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); 2292 if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { 2293 outp = nv50_disp_outp_atomic_add(atom, encoder); 2294 if (IS_ERR(outp)) 2295 return PTR_ERR(outp); 2296 2297 outp->set.ctrl = true; 2298 atom->lock_core = true; 2299 } 2300 2301 return 0; 2302} 2303 2304static int 2305nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state) 2306{ 2307 struct nv50_atom *atom = nv50_atom(state); 2308 struct drm_connector_state *old_connector_state, *new_connector_state; 2309 struct drm_connector *connector; 2310 struct drm_crtc_state *new_crtc_state; 2311 struct drm_crtc *crtc; 2312 int ret, i; 2313 2314 /* We need to handle colour management on a per-plane basis. */ 2315 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) { 2316 if (new_crtc_state->color_mgmt_changed) { 2317 ret = drm_atomic_add_affected_planes(state, crtc); 2318 if (ret) 2319 return ret; 2320 } 2321 } 2322 2323 ret = drm_atomic_helper_check(dev, state); 2324 if (ret) 2325 return ret; 2326 2327 for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) { 2328 ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state); 2329 if (ret) 2330 return ret; 2331 2332 ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state); 2333 if (ret) 2334 return ret; 2335 } 2336 2337 ret = drm_dp_mst_atomic_check(state); 2338 if (ret) 2339 return ret; 2340 2341 return 0; 2342} 2343 2344static void 2345nv50_disp_atomic_state_clear(struct drm_atomic_state *state) 2346{ 2347 struct nv50_atom *atom = nv50_atom(state); 2348 struct nv50_outp_atom *outp, *outt; 2349 2350 list_for_each_entry_safe(outp, outt, &atom->outp, head) { 2351 list_del(&outp->head); 2352 kfree(outp); 2353 } 2354 2355 drm_atomic_state_default_clear(state); 2356} 2357 2358static void 2359nv50_disp_atomic_state_free(struct drm_atomic_state *state) 2360{ 2361 struct nv50_atom *atom = nv50_atom(state); 2362 drm_atomic_state_default_release(&atom->state); 2363 kfree(atom); 2364} 2365 2366static struct drm_atomic_state * 2367nv50_disp_atomic_state_alloc(struct drm_device *dev) 2368{ 2369 struct nv50_atom *atom; 2370 if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) || 2371 drm_atomic_state_init(dev, &atom->state) < 0) { 2372 kfree(atom); 2373 return NULL; 2374 } 2375 INIT_LIST_HEAD(&atom->outp); 2376 return &atom->state; 2377} 2378 2379static const struct drm_mode_config_funcs 2380nv50_disp_func = { 2381 .fb_create = nouveau_user_framebuffer_create, 2382 .output_poll_changed = nouveau_fbcon_output_poll_changed, 2383 .atomic_check = nv50_disp_atomic_check, 2384 .atomic_commit = nv50_disp_atomic_commit, 2385 .atomic_state_alloc = nv50_disp_atomic_state_alloc, 2386 .atomic_state_clear = nv50_disp_atomic_state_clear, 2387 .atomic_state_free = nv50_disp_atomic_state_free, 2388}; 2389 2390/****************************************************************************** 2391 * Init 2392 *****************************************************************************/ 2393 2394static void 2395nv50_display_fini(struct drm_device *dev, bool suspend) 2396{ 2397 struct nouveau_encoder *nv_encoder; 2398 struct drm_encoder *encoder; 2399 struct drm_plane *plane; 2400 2401 drm_for_each_plane(plane, dev) { 2402 struct nv50_wndw *wndw = nv50_wndw(plane); 2403 if (plane->funcs != &nv50_wndw) 2404 continue; 2405 nv50_wndw_fini(wndw); 2406 } 2407 2408 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 2409 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { 2410 nv_encoder = nouveau_encoder(encoder); 2411 nv50_mstm_fini(nv_encoder->dp.mstm); 2412 } 2413 } 2414} 2415 2416static int 2417nv50_display_init(struct drm_device *dev, bool resume, bool runtime) 2418{ 2419 struct nv50_core *core = nv50_disp(dev)->core; 2420 struct drm_encoder *encoder; 2421 struct drm_plane *plane; 2422 2423 core->func->init(core); 2424 2425 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 2426 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { 2427 struct nouveau_encoder *nv_encoder = 2428 nouveau_encoder(encoder); 2429 nv50_mstm_init(nv_encoder->dp.mstm, runtime); 2430 } 2431 } 2432 2433 drm_for_each_plane(plane, dev) { 2434 struct nv50_wndw *wndw = nv50_wndw(plane); 2435 if (plane->funcs != &nv50_wndw) 2436 continue; 2437 nv50_wndw_init(wndw); 2438 } 2439 2440 return 0; 2441} 2442 2443static void 2444nv50_display_destroy(struct drm_device *dev) 2445{ 2446 struct nv50_disp *disp = nv50_disp(dev); 2447 2448 nv50_audio_component_fini(nouveau_drm(dev)); 2449 2450 nv50_core_del(&disp->core); 2451 2452 nouveau_bo_unmap(disp->sync); 2453 if (disp->sync) 2454 nouveau_bo_unpin(disp->sync); 2455 nouveau_bo_ref(NULL, &disp->sync); 2456 2457 nouveau_display(dev)->priv = NULL; 2458 mutex_destroy(&disp->mutex); 2459 kfree(disp); 2460} 2461 2462int 2463nv50_display_create(struct drm_device *dev) 2464{ 2465 struct nvif_device *device = &nouveau_drm(dev)->client.device; 2466 struct nouveau_drm *drm = nouveau_drm(dev); 2467 struct dcb_table *dcb = &drm->vbios.dcb; 2468 struct drm_connector *connector, *tmp; 2469 struct nv50_disp *disp; 2470 struct dcb_output *dcbe; 2471 int crtcs, ret, i; 2472 bool has_mst = nv50_has_mst(drm); 2473 2474 disp = kzalloc(sizeof(*disp), GFP_KERNEL); 2475 if (!disp) 2476 return -ENOMEM; 2477 2478 mutex_init(&disp->mutex); 2479 2480 nouveau_display(dev)->priv = disp; 2481 nouveau_display(dev)->dtor = nv50_display_destroy; 2482 nouveau_display(dev)->init = nv50_display_init; 2483 nouveau_display(dev)->fini = nv50_display_fini; 2484 disp->disp = &nouveau_display(dev)->disp; 2485 dev->mode_config.funcs = &nv50_disp_func; 2486 dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true; 2487 dev->mode_config.normalize_zpos = true; 2488 2489 /* small shared memory area we use for notifiers and semaphores */ 2490 ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM, 2491 0, 0x0000, NULL, NULL, &disp->sync); 2492 if (!ret) { 2493 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true); 2494 if (!ret) { 2495 ret = nouveau_bo_map(disp->sync); 2496 if (ret) 2497 nouveau_bo_unpin(disp->sync); 2498 } 2499 if (ret) 2500 nouveau_bo_ref(NULL, &disp->sync); 2501 } 2502 2503 if (ret) 2504 goto out; 2505 2506 /* allocate master evo channel */ 2507 ret = nv50_core_new(drm, &disp->core); 2508 if (ret) 2509 goto out; 2510 2511 /* create crtc objects to represent the hw heads */ 2512 if (disp->disp->object.oclass >= GV100_DISP) 2513 crtcs = nvif_rd32(&device->object, 0x610060) & 0xff; 2514 else 2515 if (disp->disp->object.oclass >= GF110_DISP) 2516 crtcs = nvif_rd32(&device->object, 0x612004) & 0xf; 2517 else 2518 crtcs = 0x3; 2519 2520 for (i = 0; i < fls(crtcs); i++) { 2521 struct nv50_head *head; 2522 2523 if (!(crtcs & (1 << i))) 2524 continue; 2525 2526 head = nv50_head_create(dev, i); 2527 if (IS_ERR(head)) { 2528 ret = PTR_ERR(head); 2529 goto out; 2530 } 2531 2532 if (has_mst) { 2533 head->msto = nv50_msto_new(dev, head, i); 2534 if (IS_ERR(head->msto)) { 2535 ret = PTR_ERR(head->msto); 2536 head->msto = NULL; 2537 goto out; 2538 } 2539 2540 /* 2541 * FIXME: This is a hack to workaround the following 2542 * issues: 2543 * 2544 * https://gitlab.gnome.org/GNOME/mutter/issues/759 2545 * https://gitlab.freedesktop.org/xorg/xserver/merge_requests/277 2546 * 2547 * Once these issues are closed, this should be 2548 * removed 2549 */ 2550 head->msto->encoder.possible_crtcs = crtcs; 2551 } 2552 } 2553 2554 /* create encoder/connector objects based on VBIOS DCB table */ 2555 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) { 2556 connector = nouveau_connector_create(dev, dcbe); 2557 if (IS_ERR(connector)) 2558 continue; 2559 2560 if (dcbe->location == DCB_LOC_ON_CHIP) { 2561 switch (dcbe->type) { 2562 case DCB_OUTPUT_TMDS: 2563 case DCB_OUTPUT_LVDS: 2564 case DCB_OUTPUT_DP: 2565 ret = nv50_sor_create(connector, dcbe); 2566 break; 2567 case DCB_OUTPUT_ANALOG: 2568 ret = nv50_dac_create(connector, dcbe); 2569 break; 2570 default: 2571 ret = -ENODEV; 2572 break; 2573 } 2574 } else { 2575 ret = nv50_pior_create(connector, dcbe); 2576 } 2577 2578 if (ret) { 2579 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n", 2580 dcbe->location, dcbe->type, 2581 ffs(dcbe->or) - 1, ret); 2582 ret = 0; 2583 } 2584 } 2585 2586 /* cull any connectors we created that don't have an encoder */ 2587 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) { 2588 if (connector->possible_encoders) 2589 continue; 2590 2591 NV_WARN(drm, "%s has no encoders, removing\n", 2592 connector->name); 2593 connector->funcs->destroy(connector); 2594 } 2595 2596 /* Disable vblank irqs aggressively for power-saving, safe on nv50+ */ 2597 dev->vblank_disable_immediate = true; 2598 2599 nv50_audio_component_init(drm); 2600 2601out: 2602 if (ret) 2603 nv50_display_destroy(dev); 2604 return ret; 2605} 2606