intel_dp.c revision 235783
1/* 2 * Copyright �� 2008 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Keith Packard <keithp@keithp.com> 25 * 26 */ 27 28#include <sys/cdefs.h> 29__FBSDID("$FreeBSD: head/sys/dev/drm2/i915/intel_dp.c 235783 2012-05-22 11:07:44Z kib $"); 30 31#include <dev/drm2/drmP.h> 32#include <dev/drm2/drm.h> 33#include <dev/drm2/drm_crtc.h> 34#include <dev/drm2/drm_crtc_helper.h> 35#include <dev/drm2/i915/i915_drm.h> 36#include <dev/drm2/i915/i915_drv.h> 37#include <dev/drm2/i915/intel_drv.h> 38#include <dev/drm2/drm_dp_helper.h> 39 40#define DP_RECEIVER_CAP_SIZE 0xf 41#define DP_LINK_STATUS_SIZE 6 42#define DP_LINK_CHECK_TIMEOUT (10 * 1000) 43 44#define DP_LINK_CONFIGURATION_SIZE 9 45 46/* XXXKIB what is the right code for the FreeBSD ? */ 47#define EREMOTEIO ENXIO 48 49struct intel_dp { 50 struct intel_encoder base; 51 uint32_t output_reg; 52 uint32_t DP; 53 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE]; 54 bool has_audio; 55 enum hdmi_force_audio force_audio; 56 uint32_t color_range; 57 int dpms_mode; 58 uint8_t link_bw; 59 uint8_t lane_count; 60 uint8_t dpcd[DP_RECEIVER_CAP_SIZE]; 61 device_t dp_iic_bus; 62 device_t adapter; 63 bool is_pch_edp; 64 uint8_t train_set[4]; 65 int panel_power_up_delay; 66 int panel_power_down_delay; 67 int panel_power_cycle_delay; 68 int backlight_on_delay; 69 int backlight_off_delay; 70 struct drm_display_mode *panel_fixed_mode; /* for eDP */ 71 struct timeout_task panel_vdd_task; 72 bool want_panel_vdd; 73}; 74 75/** 76 * is_edp - is the given port attached to an eDP panel (either CPU or PCH) 77 * @intel_dp: DP struct 78 * 79 * If a CPU or PCH DP output is attached to an eDP panel, this function 80 * will return true, and false otherwise. 81 */ 82static bool is_edp(struct intel_dp *intel_dp) 83{ 84 return intel_dp->base.type == INTEL_OUTPUT_EDP; 85} 86 87/** 88 * is_pch_edp - is the port on the PCH and attached to an eDP panel? 89 * @intel_dp: DP struct 90 * 91 * Returns true if the given DP struct corresponds to a PCH DP port attached 92 * to an eDP panel, false otherwise. Helpful for determining whether we 93 * may need FDI resources for a given DP output or not. 94 */ 95static bool is_pch_edp(struct intel_dp *intel_dp) 96{ 97 return intel_dp->is_pch_edp; 98} 99 100/** 101 * is_cpu_edp - is the port on the CPU and attached to an eDP panel? 102 * @intel_dp: DP struct 103 * 104 * Returns true if the given DP struct corresponds to a CPU eDP port. 105 */ 106static bool is_cpu_edp(struct intel_dp *intel_dp) 107{ 108 return is_edp(intel_dp) && !is_pch_edp(intel_dp); 109} 110 111static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder) 112{ 113 return container_of(encoder, struct intel_dp, base.base); 114} 115 116static struct intel_dp *intel_attached_dp(struct drm_connector *connector) 117{ 118 return container_of(intel_attached_encoder(connector), 119 struct intel_dp, base); 120} 121 122/** 123 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP? 124 * @encoder: DRM encoder 125 * 126 * Return true if @encoder corresponds to a PCH attached eDP panel. Needed 127 * by intel_display.c. 128 */ 129bool intel_encoder_is_pch_edp(struct drm_encoder *encoder) 130{ 131 struct intel_dp *intel_dp; 132 133 if (!encoder) 134 return false; 135 136 intel_dp = enc_to_intel_dp(encoder); 137 138 return is_pch_edp(intel_dp); 139} 140 141static void intel_dp_start_link_train(struct intel_dp *intel_dp); 142static void intel_dp_complete_link_train(struct intel_dp *intel_dp); 143static void intel_dp_link_down(struct intel_dp *intel_dp); 144 145void 146intel_edp_link_config(struct intel_encoder *intel_encoder, 147 int *lane_num, int *link_bw) 148{ 149 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 150 151 *lane_num = intel_dp->lane_count; 152 if (intel_dp->link_bw == DP_LINK_BW_1_62) 153 *link_bw = 162000; 154 else if (intel_dp->link_bw == DP_LINK_BW_2_7) 155 *link_bw = 270000; 156} 157 158static int 159intel_dp_max_lane_count(struct intel_dp *intel_dp) 160{ 161 int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f; 162 switch (max_lane_count) { 163 case 1: case 2: case 4: 164 break; 165 default: 166 max_lane_count = 4; 167 } 168 return max_lane_count; 169} 170 171static int 172intel_dp_max_link_bw(struct intel_dp *intel_dp) 173{ 174 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE]; 175 176 switch (max_link_bw) { 177 case DP_LINK_BW_1_62: 178 case DP_LINK_BW_2_7: 179 break; 180 default: 181 max_link_bw = DP_LINK_BW_1_62; 182 break; 183 } 184 return max_link_bw; 185} 186 187static int 188intel_dp_link_clock(uint8_t link_bw) 189{ 190 if (link_bw == DP_LINK_BW_2_7) 191 return 270000; 192 else 193 return 162000; 194} 195 196/* 197 * The units on the numbers in the next two are... bizarre. Examples will 198 * make it clearer; this one parallels an example in the eDP spec. 199 * 200 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as: 201 * 202 * 270000 * 1 * 8 / 10 == 216000 203 * 204 * The actual data capacity of that configuration is 2.16Gbit/s, so the 205 * units are decakilobits. ->clock in a drm_display_mode is in kilohertz - 206 * or equivalently, kilopixels per second - so for 1680x1050R it'd be 207 * 119000. At 18bpp that's 2142000 kilobits per second. 208 * 209 * Thus the strange-looking division by 10 in intel_dp_link_required, to 210 * get the result in decakilobits instead of kilobits. 211 */ 212 213static int 214intel_dp_link_required(int pixel_clock, int bpp) 215{ 216 return (pixel_clock * bpp + 9) / 10; 217} 218 219static int 220intel_dp_max_data_rate(int max_link_clock, int max_lanes) 221{ 222 return (max_link_clock * max_lanes * 8) / 10; 223} 224 225static bool 226intel_dp_adjust_dithering(struct intel_dp *intel_dp, 227 struct drm_display_mode *mode, 228 struct drm_display_mode *adjusted_mode) 229{ 230 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp)); 231 int max_lanes = intel_dp_max_lane_count(intel_dp); 232 int max_rate, mode_rate; 233 234 mode_rate = intel_dp_link_required(mode->clock, 24); 235 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes); 236 237 if (mode_rate > max_rate) { 238 mode_rate = intel_dp_link_required(mode->clock, 18); 239 if (mode_rate > max_rate) 240 return false; 241 242 if (adjusted_mode) 243 adjusted_mode->private_flags 244 |= INTEL_MODE_DP_FORCE_6BPC; 245 246 return true; 247 } 248 249 return true; 250} 251 252static int 253intel_dp_mode_valid(struct drm_connector *connector, 254 struct drm_display_mode *mode) 255{ 256 struct intel_dp *intel_dp = intel_attached_dp(connector); 257 258 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 259 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay) 260 return MODE_PANEL; 261 262 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay) 263 return MODE_PANEL; 264 } 265 266 if (!intel_dp_adjust_dithering(intel_dp, mode, NULL)) 267 return MODE_CLOCK_HIGH; 268 269 if (mode->clock < 10000) 270 return MODE_CLOCK_LOW; 271 272 return MODE_OK; 273} 274 275static uint32_t 276pack_aux(uint8_t *src, int src_bytes) 277{ 278 int i; 279 uint32_t v = 0; 280 281 if (src_bytes > 4) 282 src_bytes = 4; 283 for (i = 0; i < src_bytes; i++) 284 v |= ((uint32_t) src[i]) << ((3-i) * 8); 285 return v; 286} 287 288static void 289unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes) 290{ 291 int i; 292 if (dst_bytes > 4) 293 dst_bytes = 4; 294 for (i = 0; i < dst_bytes; i++) 295 dst[i] = src >> ((3-i) * 8); 296} 297 298/* hrawclock is 1/4 the FSB frequency */ 299static int 300intel_hrawclk(struct drm_device *dev) 301{ 302 struct drm_i915_private *dev_priv = dev->dev_private; 303 uint32_t clkcfg; 304 305 clkcfg = I915_READ(CLKCFG); 306 switch (clkcfg & CLKCFG_FSB_MASK) { 307 case CLKCFG_FSB_400: 308 return 100; 309 case CLKCFG_FSB_533: 310 return 133; 311 case CLKCFG_FSB_667: 312 return 166; 313 case CLKCFG_FSB_800: 314 return 200; 315 case CLKCFG_FSB_1067: 316 return 266; 317 case CLKCFG_FSB_1333: 318 return 333; 319 /* these two are just a guess; one of them might be right */ 320 case CLKCFG_FSB_1600: 321 case CLKCFG_FSB_1600_ALT: 322 return 400; 323 default: 324 return 133; 325 } 326} 327 328static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp) 329{ 330 struct drm_device *dev = intel_dp->base.base.dev; 331 struct drm_i915_private *dev_priv = dev->dev_private; 332 333 return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0; 334} 335 336static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp) 337{ 338 struct drm_device *dev = intel_dp->base.base.dev; 339 struct drm_i915_private *dev_priv = dev->dev_private; 340 341 return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0; 342} 343 344static void 345intel_dp_check_edp(struct intel_dp *intel_dp) 346{ 347 struct drm_device *dev = intel_dp->base.base.dev; 348 struct drm_i915_private *dev_priv = dev->dev_private; 349 350 if (!is_edp(intel_dp)) 351 return; 352 if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) { 353 printf("eDP powered off while attempting aux channel communication.\n"); 354 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n", 355 I915_READ(PCH_PP_STATUS), 356 I915_READ(PCH_PP_CONTROL)); 357 } 358} 359 360static int 361intel_dp_aux_ch(struct intel_dp *intel_dp, 362 uint8_t *send, int send_bytes, 363 uint8_t *recv, int recv_size) 364{ 365 uint32_t output_reg = intel_dp->output_reg; 366 struct drm_device *dev = intel_dp->base.base.dev; 367 struct drm_i915_private *dev_priv = dev->dev_private; 368 uint32_t ch_ctl = output_reg + 0x10; 369 uint32_t ch_data = ch_ctl + 4; 370 int i; 371 int recv_bytes; 372 uint32_t status; 373 uint32_t aux_clock_divider; 374 int try, precharge = 5; 375 376 intel_dp_check_edp(intel_dp); 377 /* The clock divider is based off the hrawclk, 378 * and would like to run at 2MHz. So, take the 379 * hrawclk value and divide by 2 and use that 380 * 381 * Note that PCH attached eDP panels should use a 125MHz input 382 * clock divider. 383 */ 384 if (is_cpu_edp(intel_dp)) { 385 if (IS_GEN6(dev) || IS_GEN7(dev)) 386 aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */ 387 else 388 aux_clock_divider = 225; /* eDP input clock at 450Mhz */ 389 } else if (HAS_PCH_SPLIT(dev)) 390 aux_clock_divider = 63; /* IRL input clock fixed at 125Mhz */ 391 else 392 aux_clock_divider = intel_hrawclk(dev) / 2; 393 394 /* Try to wait for any previous AUX channel activity */ 395 for (try = 0; try < 3; try++) { 396 status = I915_READ(ch_ctl); 397 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 398 break; 399 drm_msleep(1, "915ach"); 400 } 401 402 if (try == 3) { 403 printf("dp_aux_ch not started status 0x%08x\n", 404 I915_READ(ch_ctl)); 405 return -EBUSY; 406 } 407 408 /* Must try at least 3 times according to DP spec */ 409 for (try = 0; try < 5; try++) { 410 /* Load the send data into the aux channel data registers */ 411 for (i = 0; i < send_bytes; i += 4) 412 I915_WRITE(ch_data + i, 413 pack_aux(send + i, send_bytes - i)); 414 415 /* Send the command and wait for it to complete */ 416 I915_WRITE(ch_ctl, 417 DP_AUX_CH_CTL_SEND_BUSY | 418 DP_AUX_CH_CTL_TIME_OUT_400us | 419 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) | 420 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) | 421 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) | 422 DP_AUX_CH_CTL_DONE | 423 DP_AUX_CH_CTL_TIME_OUT_ERROR | 424 DP_AUX_CH_CTL_RECEIVE_ERROR); 425 for (;;) { 426 status = I915_READ(ch_ctl); 427 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 428 break; 429 DELAY(100); 430 } 431 432 /* Clear done status and any errors */ 433 I915_WRITE(ch_ctl, 434 status | 435 DP_AUX_CH_CTL_DONE | 436 DP_AUX_CH_CTL_TIME_OUT_ERROR | 437 DP_AUX_CH_CTL_RECEIVE_ERROR); 438 439 if (status & (DP_AUX_CH_CTL_TIME_OUT_ERROR | 440 DP_AUX_CH_CTL_RECEIVE_ERROR)) 441 continue; 442 if (status & DP_AUX_CH_CTL_DONE) 443 break; 444 } 445 446 if ((status & DP_AUX_CH_CTL_DONE) == 0) { 447 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status); 448 return -EBUSY; 449 } 450 451 /* Check for timeout or receive error. 452 * Timeouts occur when the sink is not connected 453 */ 454 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) { 455 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status); 456 return -EIO; 457 } 458 459 /* Timeouts occur when the device isn't connected, so they're 460 * "normal" -- don't fill the kernel log with these */ 461 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) { 462 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status); 463 return -ETIMEDOUT; 464 } 465 466 /* Unload any bytes sent back from the other side */ 467 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >> 468 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT); 469 if (recv_bytes > recv_size) 470 recv_bytes = recv_size; 471 472 for (i = 0; i < recv_bytes; i += 4) 473 unpack_aux(I915_READ(ch_data + i), 474 recv + i, recv_bytes - i); 475 476 return recv_bytes; 477} 478 479/* Write data to the aux channel in native mode */ 480static int 481intel_dp_aux_native_write(struct intel_dp *intel_dp, 482 uint16_t address, uint8_t *send, int send_bytes) 483{ 484 int ret; 485 uint8_t msg[20]; 486 int msg_bytes; 487 uint8_t ack; 488 489 intel_dp_check_edp(intel_dp); 490 if (send_bytes > 16) 491 return -1; 492 msg[0] = AUX_NATIVE_WRITE << 4; 493 msg[1] = address >> 8; 494 msg[2] = address & 0xff; 495 msg[3] = send_bytes - 1; 496 memcpy(&msg[4], send, send_bytes); 497 msg_bytes = send_bytes + 4; 498 for (;;) { 499 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1); 500 if (ret < 0) 501 return ret; 502 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 503 break; 504 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 505 DELAY(100); 506 else 507 return -EIO; 508 } 509 return send_bytes; 510} 511 512/* Write a single byte to the aux channel in native mode */ 513static int 514intel_dp_aux_native_write_1(struct intel_dp *intel_dp, 515 uint16_t address, uint8_t byte) 516{ 517 return intel_dp_aux_native_write(intel_dp, address, &byte, 1); 518} 519 520/* read bytes from a native aux channel */ 521static int 522intel_dp_aux_native_read(struct intel_dp *intel_dp, 523 uint16_t address, uint8_t *recv, int recv_bytes) 524{ 525 uint8_t msg[4]; 526 int msg_bytes; 527 uint8_t reply[20]; 528 int reply_bytes; 529 uint8_t ack; 530 int ret; 531 532 intel_dp_check_edp(intel_dp); 533 msg[0] = AUX_NATIVE_READ << 4; 534 msg[1] = address >> 8; 535 msg[2] = address & 0xff; 536 msg[3] = recv_bytes - 1; 537 538 msg_bytes = 4; 539 reply_bytes = recv_bytes + 1; 540 541 for (;;) { 542 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, 543 reply, reply_bytes); 544 if (ret == 0) 545 return -EPROTO; 546 if (ret < 0) 547 return ret; 548 ack = reply[0]; 549 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) { 550 memcpy(recv, reply + 1, ret - 1); 551 return ret - 1; 552 } 553 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 554 DELAY(100); 555 else 556 return -EIO; 557 } 558} 559 560static int 561intel_dp_i2c_aux_ch(device_t idev, int mode, uint8_t write_byte, 562 uint8_t *read_byte) 563{ 564 struct iic_dp_aux_data *data; 565 struct intel_dp *intel_dp; 566 uint16_t address; 567 uint8_t msg[5]; 568 uint8_t reply[2]; 569 unsigned retry; 570 int msg_bytes; 571 int reply_bytes; 572 int ret; 573 574 data = device_get_softc(idev); 575 intel_dp = data->priv; 576 address = data->address; 577 578 intel_dp_check_edp(intel_dp); 579 /* Set up the command byte */ 580 if (mode & MODE_I2C_READ) 581 msg[0] = AUX_I2C_READ << 4; 582 else 583 msg[0] = AUX_I2C_WRITE << 4; 584 585 if (!(mode & MODE_I2C_STOP)) 586 msg[0] |= AUX_I2C_MOT << 4; 587 588 msg[1] = address >> 8; 589 msg[2] = address; 590 591 switch (mode) { 592 case MODE_I2C_WRITE: 593 msg[3] = 0; 594 msg[4] = write_byte; 595 msg_bytes = 5; 596 reply_bytes = 1; 597 break; 598 case MODE_I2C_READ: 599 msg[3] = 0; 600 msg_bytes = 4; 601 reply_bytes = 2; 602 break; 603 default: 604 msg_bytes = 3; 605 reply_bytes = 1; 606 break; 607 } 608 609 for (retry = 0; retry < 5; retry++) { 610 ret = intel_dp_aux_ch(intel_dp, 611 msg, msg_bytes, 612 reply, reply_bytes); 613 if (ret < 0) { 614 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 615 return (-ret); 616 } 617 618 switch (reply[0] & AUX_NATIVE_REPLY_MASK) { 619 case AUX_NATIVE_REPLY_ACK: 620 /* I2C-over-AUX Reply field is only valid 621 * when paired with AUX ACK. 622 */ 623 break; 624 case AUX_NATIVE_REPLY_NACK: 625 DRM_DEBUG_KMS("aux_ch native nack\n"); 626 return (EREMOTEIO); 627 case AUX_NATIVE_REPLY_DEFER: 628 DELAY(100); 629 continue; 630 default: 631 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", 632 reply[0]); 633 return (EREMOTEIO); 634 } 635 636 switch (reply[0] & AUX_I2C_REPLY_MASK) { 637 case AUX_I2C_REPLY_ACK: 638 if (mode == MODE_I2C_READ) { 639 *read_byte = reply[1]; 640 } 641 return (0/*reply_bytes - 1*/); 642 case AUX_I2C_REPLY_NACK: 643 DRM_DEBUG_KMS("aux_i2c nack\n"); 644 return (EREMOTEIO); 645 case AUX_I2C_REPLY_DEFER: 646 DRM_DEBUG_KMS("aux_i2c defer\n"); 647 DELAY(100); 648 break; 649 default: 650 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]); 651 return (EREMOTEIO); 652 } 653 } 654 655 DRM_ERROR("too many retries, giving up\n"); 656 return (EREMOTEIO); 657} 658 659static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp); 660static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync); 661 662static int 663intel_dp_i2c_init(struct intel_dp *intel_dp, 664 struct intel_connector *intel_connector, const char *name) 665{ 666 int ret; 667 668 DRM_DEBUG_KMS("i2c_init %s\n", name); 669 670 ironlake_edp_panel_vdd_on(intel_dp); 671 ret = iic_dp_aux_add_bus(intel_connector->base.dev->device, name, 672 intel_dp_i2c_aux_ch, intel_dp, &intel_dp->dp_iic_bus, 673 &intel_dp->adapter); 674 ironlake_edp_panel_vdd_off(intel_dp, false); 675 return (ret); 676} 677 678static bool 679intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode, 680 struct drm_display_mode *adjusted_mode) 681{ 682 struct drm_device *dev = encoder->dev; 683 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 684 int lane_count, clock; 685 int max_lane_count = intel_dp_max_lane_count(intel_dp); 686 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0; 687 int bpp; 688 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; 689 690 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 691 intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode); 692 intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN, 693 mode, adjusted_mode); 694 /* 695 * the mode->clock is used to calculate the Data&Link M/N 696 * of the pipe. For the eDP the fixed clock should be used. 697 */ 698 mode->clock = intel_dp->panel_fixed_mode->clock; 699 } 700 701 if (!intel_dp_adjust_dithering(intel_dp, mode, adjusted_mode)) 702 return false; 703 704 bpp = adjusted_mode->private_flags & INTEL_MODE_DP_FORCE_6BPC ? 18 : 24; 705 706 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) { 707 for (clock = 0; clock <= max_clock; clock++) { 708 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count); 709 710 if (intel_dp_link_required(mode->clock, bpp) 711 <= link_avail) { 712 intel_dp->link_bw = bws[clock]; 713 intel_dp->lane_count = lane_count; 714 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw); 715 DRM_DEBUG_KMS("Display port link bw %02x lane " 716 "count %d clock %d\n", 717 intel_dp->link_bw, intel_dp->lane_count, 718 adjusted_mode->clock); 719 return true; 720 } 721 } 722 } 723 724 return false; 725} 726 727struct intel_dp_m_n { 728 uint32_t tu; 729 uint32_t gmch_m; 730 uint32_t gmch_n; 731 uint32_t link_m; 732 uint32_t link_n; 733}; 734 735static void 736intel_reduce_ratio(uint32_t *num, uint32_t *den) 737{ 738 while (*num > 0xffffff || *den > 0xffffff) { 739 *num >>= 1; 740 *den >>= 1; 741 } 742} 743 744static void 745intel_dp_compute_m_n(int bpp, 746 int nlanes, 747 int pixel_clock, 748 int link_clock, 749 struct intel_dp_m_n *m_n) 750{ 751 m_n->tu = 64; 752 m_n->gmch_m = (pixel_clock * bpp) >> 3; 753 m_n->gmch_n = link_clock * nlanes; 754 intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n); 755 m_n->link_m = pixel_clock; 756 m_n->link_n = link_clock; 757 intel_reduce_ratio(&m_n->link_m, &m_n->link_n); 758} 759 760void 761intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode, 762 struct drm_display_mode *adjusted_mode) 763{ 764 struct drm_device *dev = crtc->dev; 765 struct drm_mode_config *mode_config = &dev->mode_config; 766 struct drm_encoder *encoder; 767 struct drm_i915_private *dev_priv = dev->dev_private; 768 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 769 int lane_count = 4; 770 struct intel_dp_m_n m_n; 771 int pipe = intel_crtc->pipe; 772 773 /* 774 * Find the lane count in the intel_encoder private 775 */ 776 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 777 struct intel_dp *intel_dp; 778 779 if (encoder->crtc != crtc) 780 continue; 781 782 intel_dp = enc_to_intel_dp(encoder); 783 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || 784 intel_dp->base.type == INTEL_OUTPUT_EDP) 785 { 786 lane_count = intel_dp->lane_count; 787 break; 788 } 789 } 790 791 /* 792 * Compute the GMCH and Link ratios. The '3' here is 793 * the number of bytes_per_pixel post-LUT, which we always 794 * set up for 8-bits of R/G/B, or 3 bytes total. 795 */ 796 intel_dp_compute_m_n(intel_crtc->bpp, lane_count, 797 mode->clock, adjusted_mode->clock, &m_n); 798 799 if (HAS_PCH_SPLIT(dev)) { 800 I915_WRITE(TRANSDATA_M1(pipe), 801 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 802 m_n.gmch_m); 803 I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n); 804 I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m); 805 I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n); 806 } else { 807 I915_WRITE(PIPE_GMCH_DATA_M(pipe), 808 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 809 m_n.gmch_m); 810 I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n); 811 I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m); 812 I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n); 813 } 814} 815 816static void ironlake_edp_pll_on(struct drm_encoder *encoder); 817static void ironlake_edp_pll_off(struct drm_encoder *encoder); 818 819static void 820intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 821 struct drm_display_mode *adjusted_mode) 822{ 823 struct drm_device *dev = encoder->dev; 824 struct drm_i915_private *dev_priv = dev->dev_private; 825 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 826 struct drm_crtc *crtc = intel_dp->base.base.crtc; 827 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 828 829 /* Turn on the eDP PLL if needed */ 830 if (is_edp(intel_dp)) { 831 if (!is_pch_edp(intel_dp)) 832 ironlake_edp_pll_on(encoder); 833 else 834 ironlake_edp_pll_off(encoder); 835 } 836 837 /* 838 * There are four kinds of DP registers: 839 * 840 * IBX PCH 841 * SNB CPU 842 * IVB CPU 843 * CPT PCH 844 * 845 * IBX PCH and CPU are the same for almost everything, 846 * except that the CPU DP PLL is configured in this 847 * register 848 * 849 * CPT PCH is quite different, having many bits moved 850 * to the TRANS_DP_CTL register instead. That 851 * configuration happens (oddly) in ironlake_pch_enable 852 */ 853 854 /* Preserve the BIOS-computed detected bit. This is 855 * supposed to be read-only. 856 */ 857 intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED; 858 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 859 860 /* Handle DP bits in common between all three register formats */ 861 862 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 863 864 switch (intel_dp->lane_count) { 865 case 1: 866 intel_dp->DP |= DP_PORT_WIDTH_1; 867 break; 868 case 2: 869 intel_dp->DP |= DP_PORT_WIDTH_2; 870 break; 871 case 4: 872 intel_dp->DP |= DP_PORT_WIDTH_4; 873 break; 874 } 875 if (intel_dp->has_audio) { 876 DRM_DEBUG_KMS("Enabling DP audio on pipe %c\n", 877 pipe_name(intel_crtc->pipe)); 878 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 879 intel_write_eld(encoder, adjusted_mode); 880 } 881 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 882 intel_dp->link_configuration[0] = intel_dp->link_bw; 883 intel_dp->link_configuration[1] = intel_dp->lane_count; 884 /* 885 * Check for DPCD version > 1.1 and enhanced framing support 886 */ 887 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 888 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { 889 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 890 } 891 892 /* Split out the IBX/CPU vs CPT settings */ 893 894 if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) { 895 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 896 intel_dp->DP |= DP_SYNC_HS_HIGH; 897 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 898 intel_dp->DP |= DP_SYNC_VS_HIGH; 899 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 900 901 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN) 902 intel_dp->DP |= DP_ENHANCED_FRAMING; 903 904 intel_dp->DP |= intel_crtc->pipe << 29; 905 906 /* don't miss out required setting for eDP */ 907 intel_dp->DP |= DP_PLL_ENABLE; 908 if (adjusted_mode->clock < 200000) 909 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 910 else 911 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 912 } else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) { 913 intel_dp->DP |= intel_dp->color_range; 914 915 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 916 intel_dp->DP |= DP_SYNC_HS_HIGH; 917 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 918 intel_dp->DP |= DP_SYNC_VS_HIGH; 919 intel_dp->DP |= DP_LINK_TRAIN_OFF; 920 921 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN) 922 intel_dp->DP |= DP_ENHANCED_FRAMING; 923 924 if (intel_crtc->pipe == 1) 925 intel_dp->DP |= DP_PIPEB_SELECT; 926 927 if (is_cpu_edp(intel_dp)) { 928 /* don't miss out required setting for eDP */ 929 intel_dp->DP |= DP_PLL_ENABLE; 930 if (adjusted_mode->clock < 200000) 931 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 932 else 933 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 934 } 935 } else { 936 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 937 } 938} 939 940#define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK) 941#define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE) 942 943#define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK) 944#define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE) 945 946#define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK) 947#define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE) 948 949static void ironlake_wait_panel_status(struct intel_dp *intel_dp, 950 u32 mask, 951 u32 value) 952{ 953 struct drm_device *dev = intel_dp->base.base.dev; 954 struct drm_i915_private *dev_priv = dev->dev_private; 955 956 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n", 957 mask, value, 958 I915_READ(PCH_PP_STATUS), 959 I915_READ(PCH_PP_CONTROL)); 960 961 if (_intel_wait_for(dev, 962 (I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10, "915iwp")) { 963 DRM_ERROR("Panel status timeout: status %08x control %08x\n", 964 I915_READ(PCH_PP_STATUS), 965 I915_READ(PCH_PP_CONTROL)); 966 } 967} 968 969static void ironlake_wait_panel_on(struct intel_dp *intel_dp) 970{ 971 DRM_DEBUG_KMS("Wait for panel power on\n"); 972 ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE); 973} 974 975static void ironlake_wait_panel_off(struct intel_dp *intel_dp) 976{ 977 DRM_DEBUG_KMS("Wait for panel power off time\n"); 978 ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE); 979} 980 981static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp) 982{ 983 DRM_DEBUG_KMS("Wait for panel power cycle\n"); 984 ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE); 985} 986 987 988/* Read the current pp_control value, unlocking the register if it 989 * is locked 990 */ 991 992static u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv) 993{ 994 u32 control = I915_READ(PCH_PP_CONTROL); 995 996 control &= ~PANEL_UNLOCK_MASK; 997 control |= PANEL_UNLOCK_REGS; 998 return control; 999} 1000 1001static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp) 1002{ 1003 struct drm_device *dev = intel_dp->base.base.dev; 1004 struct drm_i915_private *dev_priv = dev->dev_private; 1005 u32 pp; 1006 1007 if (!is_edp(intel_dp)) 1008 return; 1009 DRM_DEBUG_KMS("Turn eDP VDD on\n"); 1010 1011 if (intel_dp->want_panel_vdd) 1012 printf("eDP VDD already requested on\n"); 1013 1014 intel_dp->want_panel_vdd = true; 1015 1016 if (ironlake_edp_have_panel_vdd(intel_dp)) { 1017 DRM_DEBUG_KMS("eDP VDD already on\n"); 1018 return; 1019 } 1020 1021 if (!ironlake_edp_have_panel_power(intel_dp)) 1022 ironlake_wait_panel_power_cycle(intel_dp); 1023 1024 pp = ironlake_get_pp_control(dev_priv); 1025 pp |= EDP_FORCE_VDD; 1026 I915_WRITE(PCH_PP_CONTROL, pp); 1027 POSTING_READ(PCH_PP_CONTROL); 1028 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1029 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1030 1031 /* 1032 * If the panel wasn't on, delay before accessing aux channel 1033 */ 1034 if (!ironlake_edp_have_panel_power(intel_dp)) { 1035 DRM_DEBUG_KMS("eDP was not running\n"); 1036 drm_msleep(intel_dp->panel_power_up_delay, "915edpon"); 1037 } 1038} 1039 1040static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp) 1041{ 1042 struct drm_device *dev = intel_dp->base.base.dev; 1043 struct drm_i915_private *dev_priv = dev->dev_private; 1044 u32 pp; 1045 1046 if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) { 1047 pp = ironlake_get_pp_control(dev_priv); 1048 pp &= ~EDP_FORCE_VDD; 1049 I915_WRITE(PCH_PP_CONTROL, pp); 1050 POSTING_READ(PCH_PP_CONTROL); 1051 1052 /* Make sure sequencer is idle before allowing subsequent activity */ 1053 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1054 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1055 1056 drm_msleep(intel_dp->panel_power_down_delay, "915vddo"); 1057 } 1058} 1059 1060static void ironlake_panel_vdd_work(void *arg, int pending __unused) 1061{ 1062 struct intel_dp *intel_dp = arg; 1063 struct drm_device *dev = intel_dp->base.base.dev; 1064 1065 sx_xlock(&dev->mode_config.mutex); 1066 ironlake_panel_vdd_off_sync(intel_dp); 1067 sx_xunlock(&dev->mode_config.mutex); 1068} 1069 1070static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync) 1071{ 1072 if (!is_edp(intel_dp)) 1073 return; 1074 1075 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd); 1076 if (!intel_dp->want_panel_vdd) 1077 printf("eDP VDD not forced on\n"); 1078 1079 intel_dp->want_panel_vdd = false; 1080 1081 if (sync) { 1082 ironlake_panel_vdd_off_sync(intel_dp); 1083 } else { 1084 /* 1085 * Queue the timer to fire a long 1086 * time from now (relative to the power down delay) 1087 * to keep the panel power up across a sequence of operations 1088 */ 1089 struct drm_i915_private *dev_priv = intel_dp->base.base.dev->dev_private; 1090 taskqueue_enqueue_timeout(dev_priv->tq, 1091 &intel_dp->panel_vdd_task, 1092 msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5)); 1093 } 1094} 1095 1096static void ironlake_edp_panel_on(struct intel_dp *intel_dp) 1097{ 1098 struct drm_device *dev = intel_dp->base.base.dev; 1099 struct drm_i915_private *dev_priv = dev->dev_private; 1100 u32 pp; 1101 1102 if (!is_edp(intel_dp)) 1103 return; 1104 1105 DRM_DEBUG_KMS("Turn eDP power on\n"); 1106 1107 if (ironlake_edp_have_panel_power(intel_dp)) { 1108 DRM_DEBUG_KMS("eDP power already on\n"); 1109 return; 1110 } 1111 1112 ironlake_wait_panel_power_cycle(intel_dp); 1113 1114 pp = ironlake_get_pp_control(dev_priv); 1115 if (IS_GEN5(dev)) { 1116 /* ILK workaround: disable reset around power sequence */ 1117 pp &= ~PANEL_POWER_RESET; 1118 I915_WRITE(PCH_PP_CONTROL, pp); 1119 POSTING_READ(PCH_PP_CONTROL); 1120 } 1121 1122 pp |= POWER_TARGET_ON; 1123 if (!IS_GEN5(dev)) 1124 pp |= PANEL_POWER_RESET; 1125 1126 I915_WRITE(PCH_PP_CONTROL, pp); 1127 POSTING_READ(PCH_PP_CONTROL); 1128 1129 ironlake_wait_panel_on(intel_dp); 1130 1131 if (IS_GEN5(dev)) { 1132 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1133 I915_WRITE(PCH_PP_CONTROL, pp); 1134 POSTING_READ(PCH_PP_CONTROL); 1135 } 1136} 1137 1138static void ironlake_edp_panel_off(struct intel_dp *intel_dp) 1139{ 1140 struct drm_device *dev = intel_dp->base.base.dev; 1141 struct drm_i915_private *dev_priv = dev->dev_private; 1142 u32 pp; 1143 1144 if (!is_edp(intel_dp)) 1145 return; 1146 1147 DRM_DEBUG_KMS("Turn eDP power off\n"); 1148 1149 if (intel_dp->want_panel_vdd) 1150 printf("Cannot turn power off while VDD is on\n"); 1151 1152 pp = ironlake_get_pp_control(dev_priv); 1153 pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE); 1154 I915_WRITE(PCH_PP_CONTROL, pp); 1155 POSTING_READ(PCH_PP_CONTROL); 1156 1157 ironlake_wait_panel_off(intel_dp); 1158} 1159 1160static void ironlake_edp_backlight_on(struct intel_dp *intel_dp) 1161{ 1162 struct drm_device *dev = intel_dp->base.base.dev; 1163 struct drm_i915_private *dev_priv = dev->dev_private; 1164 u32 pp; 1165 1166 if (!is_edp(intel_dp)) 1167 return; 1168 1169 DRM_DEBUG_KMS("\n"); 1170 /* 1171 * If we enable the backlight right away following a panel power 1172 * on, we may see slight flicker as the panel syncs with the eDP 1173 * link. So delay a bit to make sure the image is solid before 1174 * allowing it to appear. 1175 */ 1176 drm_msleep(intel_dp->backlight_on_delay, "915ebo"); 1177 pp = ironlake_get_pp_control(dev_priv); 1178 pp |= EDP_BLC_ENABLE; 1179 I915_WRITE(PCH_PP_CONTROL, pp); 1180 POSTING_READ(PCH_PP_CONTROL); 1181} 1182 1183static void ironlake_edp_backlight_off(struct intel_dp *intel_dp) 1184{ 1185 struct drm_device *dev = intel_dp->base.base.dev; 1186 struct drm_i915_private *dev_priv = dev->dev_private; 1187 u32 pp; 1188 1189 if (!is_edp(intel_dp)) 1190 return; 1191 1192 DRM_DEBUG_KMS("\n"); 1193 pp = ironlake_get_pp_control(dev_priv); 1194 pp &= ~EDP_BLC_ENABLE; 1195 I915_WRITE(PCH_PP_CONTROL, pp); 1196 POSTING_READ(PCH_PP_CONTROL); 1197 drm_msleep(intel_dp->backlight_off_delay, "915bo1"); 1198} 1199 1200static void ironlake_edp_pll_on(struct drm_encoder *encoder) 1201{ 1202 struct drm_device *dev = encoder->dev; 1203 struct drm_i915_private *dev_priv = dev->dev_private; 1204 u32 dpa_ctl; 1205 1206 DRM_DEBUG_KMS("\n"); 1207 dpa_ctl = I915_READ(DP_A); 1208 dpa_ctl |= DP_PLL_ENABLE; 1209 I915_WRITE(DP_A, dpa_ctl); 1210 POSTING_READ(DP_A); 1211 DELAY(200); 1212} 1213 1214static void ironlake_edp_pll_off(struct drm_encoder *encoder) 1215{ 1216 struct drm_device *dev = encoder->dev; 1217 struct drm_i915_private *dev_priv = dev->dev_private; 1218 u32 dpa_ctl; 1219 1220 dpa_ctl = I915_READ(DP_A); 1221 dpa_ctl &= ~DP_PLL_ENABLE; 1222 I915_WRITE(DP_A, dpa_ctl); 1223 POSTING_READ(DP_A); 1224 DELAY(200); 1225} 1226 1227/* If the sink supports it, try to set the power state appropriately */ 1228static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode) 1229{ 1230 int ret, i; 1231 1232 /* Should have a valid DPCD by this point */ 1233 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 1234 return; 1235 1236 if (mode != DRM_MODE_DPMS_ON) { 1237 ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER, 1238 DP_SET_POWER_D3); 1239 if (ret != 1) 1240 DRM_DEBUG("failed to write sink power state\n"); 1241 } else { 1242 /* 1243 * When turning on, we need to retry for 1ms to give the sink 1244 * time to wake up. 1245 */ 1246 for (i = 0; i < 3; i++) { 1247 ret = intel_dp_aux_native_write_1(intel_dp, 1248 DP_SET_POWER, 1249 DP_SET_POWER_D0); 1250 if (ret == 1) 1251 break; 1252 drm_msleep(1, "915dps"); 1253 } 1254 } 1255} 1256 1257static void intel_dp_prepare(struct drm_encoder *encoder) 1258{ 1259 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1260 1261 ironlake_edp_backlight_off(intel_dp); 1262 ironlake_edp_panel_off(intel_dp); 1263 1264 /* Wake up the sink first */ 1265 ironlake_edp_panel_vdd_on(intel_dp); 1266 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1267 intel_dp_link_down(intel_dp); 1268 ironlake_edp_panel_vdd_off(intel_dp, false); 1269 1270 /* Make sure the panel is off before trying to 1271 * change the mode 1272 */ 1273} 1274 1275static void intel_dp_commit(struct drm_encoder *encoder) 1276{ 1277 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1278 struct drm_device *dev = encoder->dev; 1279 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1280 1281 ironlake_edp_panel_vdd_on(intel_dp); 1282 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1283 intel_dp_start_link_train(intel_dp); 1284 ironlake_edp_panel_on(intel_dp); 1285 ironlake_edp_panel_vdd_off(intel_dp, true); 1286 intel_dp_complete_link_train(intel_dp); 1287 ironlake_edp_backlight_on(intel_dp); 1288 1289 intel_dp->dpms_mode = DRM_MODE_DPMS_ON; 1290 1291 if (HAS_PCH_CPT(dev)) 1292 intel_cpt_verify_modeset(dev, intel_crtc->pipe); 1293} 1294 1295static void 1296intel_dp_dpms(struct drm_encoder *encoder, int mode) 1297{ 1298 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1299 struct drm_device *dev = encoder->dev; 1300 struct drm_i915_private *dev_priv = dev->dev_private; 1301 uint32_t dp_reg = I915_READ(intel_dp->output_reg); 1302 1303 if (mode != DRM_MODE_DPMS_ON) { 1304 ironlake_edp_backlight_off(intel_dp); 1305 ironlake_edp_panel_off(intel_dp); 1306 1307 ironlake_edp_panel_vdd_on(intel_dp); 1308 intel_dp_sink_dpms(intel_dp, mode); 1309 intel_dp_link_down(intel_dp); 1310 ironlake_edp_panel_vdd_off(intel_dp, false); 1311 1312 if (is_cpu_edp(intel_dp)) 1313 ironlake_edp_pll_off(encoder); 1314 } else { 1315 if (is_cpu_edp(intel_dp)) 1316 ironlake_edp_pll_on(encoder); 1317 1318 ironlake_edp_panel_vdd_on(intel_dp); 1319 intel_dp_sink_dpms(intel_dp, mode); 1320 if (!(dp_reg & DP_PORT_EN)) { 1321 intel_dp_start_link_train(intel_dp); 1322 ironlake_edp_panel_on(intel_dp); 1323 ironlake_edp_panel_vdd_off(intel_dp, true); 1324 intel_dp_complete_link_train(intel_dp); 1325 } else 1326 ironlake_edp_panel_vdd_off(intel_dp, false); 1327 ironlake_edp_backlight_on(intel_dp); 1328 } 1329 intel_dp->dpms_mode = mode; 1330} 1331/* 1332 * Native read with retry for link status and receiver capability reads for 1333 * cases where the sink may still be asleep. 1334 */ 1335static bool 1336intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address, 1337 uint8_t *recv, int recv_bytes) 1338{ 1339 int ret, i; 1340 1341 /* 1342 * Sinks are *supposed* to come up within 1ms from an off state, 1343 * but we're also supposed to retry 3 times per the spec. 1344 */ 1345 for (i = 0; i < 3; i++) { 1346 ret = intel_dp_aux_native_read(intel_dp, address, recv, 1347 recv_bytes); 1348 if (ret == recv_bytes) 1349 return true; 1350 drm_msleep(1, "915dpl"); 1351 } 1352 1353 return false; 1354} 1355 1356/* 1357 * Fetch AUX CH registers 0x202 - 0x207 which contain 1358 * link status information 1359 */ 1360static bool 1361intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1362{ 1363 return intel_dp_aux_native_read_retry(intel_dp, 1364 DP_LANE0_1_STATUS, 1365 link_status, 1366 DP_LINK_STATUS_SIZE); 1367} 1368 1369static uint8_t 1370intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1371 int r) 1372{ 1373 return link_status[r - DP_LANE0_1_STATUS]; 1374} 1375 1376static uint8_t 1377intel_get_adjust_request_voltage(uint8_t adjust_request[2], 1378 int lane) 1379{ 1380 int s = ((lane & 1) ? 1381 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 1382 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 1383 uint8_t l = adjust_request[lane>>1]; 1384 1385 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 1386} 1387 1388static uint8_t 1389intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2], 1390 int lane) 1391{ 1392 int s = ((lane & 1) ? 1393 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 1394 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 1395 uint8_t l = adjust_request[lane>>1]; 1396 1397 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1398} 1399 1400 1401#if 0 1402static char *voltage_names[] = { 1403 "0.4V", "0.6V", "0.8V", "1.2V" 1404}; 1405static char *pre_emph_names[] = { 1406 "0dB", "3.5dB", "6dB", "9.5dB" 1407}; 1408static char *link_train_names[] = { 1409 "pattern 1", "pattern 2", "idle", "off" 1410}; 1411#endif 1412 1413/* 1414 * These are source-specific values; current Intel hardware supports 1415 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB 1416 */ 1417 1418static uint8_t 1419intel_dp_voltage_max(struct intel_dp *intel_dp) 1420{ 1421 struct drm_device *dev = intel_dp->base.base.dev; 1422 1423 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) 1424 return DP_TRAIN_VOLTAGE_SWING_800; 1425 else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) 1426 return DP_TRAIN_VOLTAGE_SWING_1200; 1427 else 1428 return DP_TRAIN_VOLTAGE_SWING_800; 1429} 1430 1431static uint8_t 1432intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing) 1433{ 1434 struct drm_device *dev = intel_dp->base.base.dev; 1435 1436 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1437 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1438 case DP_TRAIN_VOLTAGE_SWING_400: 1439 return DP_TRAIN_PRE_EMPHASIS_6; 1440 case DP_TRAIN_VOLTAGE_SWING_600: 1441 case DP_TRAIN_VOLTAGE_SWING_800: 1442 return DP_TRAIN_PRE_EMPHASIS_3_5; 1443 default: 1444 return DP_TRAIN_PRE_EMPHASIS_0; 1445 } 1446 } else { 1447 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1448 case DP_TRAIN_VOLTAGE_SWING_400: 1449 return DP_TRAIN_PRE_EMPHASIS_6; 1450 case DP_TRAIN_VOLTAGE_SWING_600: 1451 return DP_TRAIN_PRE_EMPHASIS_6; 1452 case DP_TRAIN_VOLTAGE_SWING_800: 1453 return DP_TRAIN_PRE_EMPHASIS_3_5; 1454 case DP_TRAIN_VOLTAGE_SWING_1200: 1455 default: 1456 return DP_TRAIN_PRE_EMPHASIS_0; 1457 } 1458 } 1459} 1460 1461static void 1462intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1463{ 1464 uint8_t v = 0; 1465 uint8_t p = 0; 1466 int lane; 1467 uint8_t *adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS); 1468 uint8_t voltage_max; 1469 uint8_t preemph_max; 1470 1471 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1472 uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane); 1473 uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane); 1474 1475 if (this_v > v) 1476 v = this_v; 1477 if (this_p > p) 1478 p = this_p; 1479 } 1480 1481 voltage_max = intel_dp_voltage_max(intel_dp); 1482 if (v >= voltage_max) 1483 v = voltage_max | DP_TRAIN_MAX_SWING_REACHED; 1484 1485 preemph_max = intel_dp_pre_emphasis_max(intel_dp, v); 1486 if (p >= preemph_max) 1487 p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1488 1489 for (lane = 0; lane < 4; lane++) 1490 intel_dp->train_set[lane] = v | p; 1491} 1492 1493static uint32_t 1494intel_dp_signal_levels(uint8_t train_set) 1495{ 1496 uint32_t signal_levels = 0; 1497 1498 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 1499 case DP_TRAIN_VOLTAGE_SWING_400: 1500 default: 1501 signal_levels |= DP_VOLTAGE_0_4; 1502 break; 1503 case DP_TRAIN_VOLTAGE_SWING_600: 1504 signal_levels |= DP_VOLTAGE_0_6; 1505 break; 1506 case DP_TRAIN_VOLTAGE_SWING_800: 1507 signal_levels |= DP_VOLTAGE_0_8; 1508 break; 1509 case DP_TRAIN_VOLTAGE_SWING_1200: 1510 signal_levels |= DP_VOLTAGE_1_2; 1511 break; 1512 } 1513 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1514 case DP_TRAIN_PRE_EMPHASIS_0: 1515 default: 1516 signal_levels |= DP_PRE_EMPHASIS_0; 1517 break; 1518 case DP_TRAIN_PRE_EMPHASIS_3_5: 1519 signal_levels |= DP_PRE_EMPHASIS_3_5; 1520 break; 1521 case DP_TRAIN_PRE_EMPHASIS_6: 1522 signal_levels |= DP_PRE_EMPHASIS_6; 1523 break; 1524 case DP_TRAIN_PRE_EMPHASIS_9_5: 1525 signal_levels |= DP_PRE_EMPHASIS_9_5; 1526 break; 1527 } 1528 return signal_levels; 1529} 1530 1531/* Gen6's DP voltage swing and pre-emphasis control */ 1532static uint32_t 1533intel_gen6_edp_signal_levels(uint8_t train_set) 1534{ 1535 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1536 DP_TRAIN_PRE_EMPHASIS_MASK); 1537 switch (signal_levels) { 1538 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: 1539 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: 1540 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1541 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: 1542 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1543 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: 1544 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6: 1545 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1546 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: 1547 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: 1548 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1549 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: 1550 case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0: 1551 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1552 default: 1553 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" 1554 "0x%x\n", signal_levels); 1555 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1556 } 1557} 1558 1559/* Gen7's DP voltage swing and pre-emphasis control */ 1560static uint32_t 1561intel_gen7_edp_signal_levels(uint8_t train_set) 1562{ 1563 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1564 DP_TRAIN_PRE_EMPHASIS_MASK); 1565 switch (signal_levels) { 1566 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: 1567 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1568 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: 1569 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1570 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: 1571 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1572 1573 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: 1574 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1575 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: 1576 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1577 1578 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: 1579 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1580 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: 1581 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1582 1583 default: 1584 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" 1585 "0x%x\n", signal_levels); 1586 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1587 } 1588} 1589 1590static uint8_t 1591intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1592 int lane) 1593{ 1594 int s = (lane & 1) * 4; 1595 uint8_t l = link_status[lane>>1]; 1596 1597 return (l >> s) & 0xf; 1598} 1599 1600/* Check for clock recovery is done on all channels */ 1601static bool 1602intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count) 1603{ 1604 int lane; 1605 uint8_t lane_status; 1606 1607 for (lane = 0; lane < lane_count; lane++) { 1608 lane_status = intel_get_lane_status(link_status, lane); 1609 if ((lane_status & DP_LANE_CR_DONE) == 0) 1610 return false; 1611 } 1612 return true; 1613} 1614 1615/* Check to see if channel eq is done on all channels */ 1616#define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\ 1617 DP_LANE_CHANNEL_EQ_DONE|\ 1618 DP_LANE_SYMBOL_LOCKED) 1619static bool 1620intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1621{ 1622 uint8_t lane_align; 1623 uint8_t lane_status; 1624 int lane; 1625 1626 lane_align = intel_dp_link_status(link_status, 1627 DP_LANE_ALIGN_STATUS_UPDATED); 1628 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 1629 return false; 1630 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1631 lane_status = intel_get_lane_status(link_status, lane); 1632 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS) 1633 return false; 1634 } 1635 return true; 1636} 1637 1638static bool 1639intel_dp_set_link_train(struct intel_dp *intel_dp, 1640 uint32_t dp_reg_value, 1641 uint8_t dp_train_pat) 1642{ 1643 struct drm_device *dev = intel_dp->base.base.dev; 1644 struct drm_i915_private *dev_priv = dev->dev_private; 1645 int ret; 1646 1647 I915_WRITE(intel_dp->output_reg, dp_reg_value); 1648 POSTING_READ(intel_dp->output_reg); 1649 1650 intel_dp_aux_native_write_1(intel_dp, 1651 DP_TRAINING_PATTERN_SET, 1652 dp_train_pat); 1653 1654 ret = intel_dp_aux_native_write(intel_dp, 1655 DP_TRAINING_LANE0_SET, 1656 intel_dp->train_set, 1657 intel_dp->lane_count); 1658 if (ret != intel_dp->lane_count) 1659 return false; 1660 1661 return true; 1662} 1663 1664/* Enable corresponding port and start training pattern 1 */ 1665static void 1666intel_dp_start_link_train(struct intel_dp *intel_dp) 1667{ 1668 struct drm_device *dev = intel_dp->base.base.dev; 1669 struct drm_i915_private *dev_priv = dev->dev_private; 1670 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1671 int i; 1672 uint8_t voltage; 1673 bool clock_recovery = false; 1674 int voltage_tries, loop_tries; 1675 u32 reg; 1676 uint32_t DP = intel_dp->DP; 1677 1678 /* Enable output, wait for it to become active */ 1679 I915_WRITE(intel_dp->output_reg, intel_dp->DP); 1680 POSTING_READ(intel_dp->output_reg); 1681 intel_wait_for_vblank(dev, intel_crtc->pipe); 1682 1683 /* Write the link configuration data */ 1684 intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET, 1685 intel_dp->link_configuration, 1686 DP_LINK_CONFIGURATION_SIZE); 1687 1688 DP |= DP_PORT_EN; 1689 1690 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1691 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1692 else 1693 DP &= ~DP_LINK_TRAIN_MASK; 1694 memset(intel_dp->train_set, 0, 4); 1695 voltage = 0xff; 1696 voltage_tries = 0; 1697 loop_tries = 0; 1698 clock_recovery = false; 1699 for (;;) { 1700 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1701 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1702 uint32_t signal_levels; 1703 1704 1705 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1706 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); 1707 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; 1708 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { 1709 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1710 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1711 } else { 1712 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); 1713 DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels); 1714 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1715 } 1716 1717 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1718 reg = DP | DP_LINK_TRAIN_PAT_1_CPT; 1719 else 1720 reg = DP | DP_LINK_TRAIN_PAT_1; 1721 1722 if (!intel_dp_set_link_train(intel_dp, reg, 1723 DP_TRAINING_PATTERN_1)) 1724 break; 1725 /* Set training pattern 1 */ 1726 1727 DELAY(100); 1728 if (!intel_dp_get_link_status(intel_dp, link_status)) { 1729 DRM_ERROR("failed to get link status\n"); 1730 break; 1731 } 1732 1733 if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { 1734 DRM_DEBUG_KMS("clock recovery OK\n"); 1735 clock_recovery = true; 1736 break; 1737 } 1738 1739 /* Check to see if we've tried the max voltage */ 1740 for (i = 0; i < intel_dp->lane_count; i++) 1741 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 1742 break; 1743 if (i == intel_dp->lane_count) { 1744 ++loop_tries; 1745 if (loop_tries == 5) { 1746 DRM_DEBUG_KMS("too many full retries, give up\n"); 1747 break; 1748 } 1749 memset(intel_dp->train_set, 0, 4); 1750 voltage_tries = 0; 1751 continue; 1752 } 1753 1754 /* Check to see if we've tried the same voltage 5 times */ 1755 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 1756 ++voltage_tries; 1757 if (voltage_tries == 5) { 1758 DRM_DEBUG_KMS("too many voltage retries, give up\n"); 1759 break; 1760 } 1761 } else 1762 voltage_tries = 0; 1763 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 1764 1765 /* Compute new intel_dp->train_set as requested by target */ 1766 intel_get_adjust_train(intel_dp, link_status); 1767 } 1768 1769 intel_dp->DP = DP; 1770} 1771 1772static void 1773intel_dp_complete_link_train(struct intel_dp *intel_dp) 1774{ 1775 struct drm_device *dev = intel_dp->base.base.dev; 1776 struct drm_i915_private *dev_priv = dev->dev_private; 1777 bool channel_eq = false; 1778 int tries, cr_tries; 1779 u32 reg; 1780 uint32_t DP = intel_dp->DP; 1781 1782 /* channel equalization */ 1783 tries = 0; 1784 cr_tries = 0; 1785 channel_eq = false; 1786 for (;;) { 1787 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1788 uint32_t signal_levels; 1789 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1790 1791 if (cr_tries > 5) { 1792 DRM_ERROR("failed to train DP, aborting\n"); 1793 intel_dp_link_down(intel_dp); 1794 break; 1795 } 1796 1797 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1798 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); 1799 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; 1800 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { 1801 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1802 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1803 } else { 1804 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); 1805 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1806 } 1807 1808 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1809 reg = DP | DP_LINK_TRAIN_PAT_2_CPT; 1810 else 1811 reg = DP | DP_LINK_TRAIN_PAT_2; 1812 1813 /* channel eq pattern */ 1814 if (!intel_dp_set_link_train(intel_dp, reg, 1815 DP_TRAINING_PATTERN_2)) 1816 break; 1817 1818 DELAY(400); 1819 if (!intel_dp_get_link_status(intel_dp, link_status)) 1820 break; 1821 1822 /* Make sure clock is still ok */ 1823 if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { 1824 intel_dp_start_link_train(intel_dp); 1825 cr_tries++; 1826 continue; 1827 } 1828 1829 if (intel_channel_eq_ok(intel_dp, link_status)) { 1830 channel_eq = true; 1831 break; 1832 } 1833 1834 /* Try 5 times, then try clock recovery if that fails */ 1835 if (tries > 5) { 1836 intel_dp_link_down(intel_dp); 1837 intel_dp_start_link_train(intel_dp); 1838 tries = 0; 1839 cr_tries++; 1840 continue; 1841 } 1842 1843 /* Compute new intel_dp->train_set as requested by target */ 1844 intel_get_adjust_train(intel_dp, link_status); 1845 ++tries; 1846 } 1847 1848 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1849 reg = DP | DP_LINK_TRAIN_OFF_CPT; 1850 else 1851 reg = DP | DP_LINK_TRAIN_OFF; 1852 1853 I915_WRITE(intel_dp->output_reg, reg); 1854 POSTING_READ(intel_dp->output_reg); 1855 intel_dp_aux_native_write_1(intel_dp, 1856 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE); 1857} 1858 1859static void 1860intel_dp_link_down(struct intel_dp *intel_dp) 1861{ 1862 struct drm_device *dev = intel_dp->base.base.dev; 1863 struct drm_i915_private *dev_priv = dev->dev_private; 1864 uint32_t DP = intel_dp->DP; 1865 1866 if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0) 1867 return; 1868 1869 DRM_DEBUG_KMS("\n"); 1870 1871 if (is_edp(intel_dp)) { 1872 DP &= ~DP_PLL_ENABLE; 1873 I915_WRITE(intel_dp->output_reg, DP); 1874 POSTING_READ(intel_dp->output_reg); 1875 DELAY(100); 1876 } 1877 1878 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) { 1879 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1880 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT); 1881 } else { 1882 DP &= ~DP_LINK_TRAIN_MASK; 1883 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE); 1884 } 1885 POSTING_READ(intel_dp->output_reg); 1886 1887 drm_msleep(17, "915dlo"); 1888 1889 if (is_edp(intel_dp)) { 1890 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1891 DP |= DP_LINK_TRAIN_OFF_CPT; 1892 else 1893 DP |= DP_LINK_TRAIN_OFF; 1894 } 1895 1896 1897 if (!HAS_PCH_CPT(dev) && 1898 I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) { 1899 struct drm_crtc *crtc = intel_dp->base.base.crtc; 1900 1901 /* Hardware workaround: leaving our transcoder select 1902 * set to transcoder B while it's off will prevent the 1903 * corresponding HDMI output on transcoder A. 1904 * 1905 * Combine this with another hardware workaround: 1906 * transcoder select bit can only be cleared while the 1907 * port is enabled. 1908 */ 1909 DP &= ~DP_PIPEB_SELECT; 1910 I915_WRITE(intel_dp->output_reg, DP); 1911 1912 /* Changes to enable or select take place the vblank 1913 * after being written. 1914 */ 1915 if (crtc == NULL) { 1916 /* We can arrive here never having been attached 1917 * to a CRTC, for instance, due to inheriting 1918 * random state from the BIOS. 1919 * 1920 * If the pipe is not running, play safe and 1921 * wait for the clocks to stabilise before 1922 * continuing. 1923 */ 1924 POSTING_READ(intel_dp->output_reg); 1925 drm_msleep(50, "915dla"); 1926 } else 1927 intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe); 1928 } 1929 1930 DP &= ~DP_AUDIO_OUTPUT_ENABLE; 1931 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN); 1932 POSTING_READ(intel_dp->output_reg); 1933 drm_msleep(intel_dp->panel_power_down_delay, "915ldo"); 1934} 1935 1936static bool 1937intel_dp_get_dpcd(struct intel_dp *intel_dp) 1938{ 1939 if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd, 1940 sizeof(intel_dp->dpcd)) && 1941 (intel_dp->dpcd[DP_DPCD_REV] != 0)) { 1942 return true; 1943 } 1944 1945 return false; 1946} 1947 1948static bool 1949intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector) 1950{ 1951 int ret; 1952 1953 ret = intel_dp_aux_native_read_retry(intel_dp, 1954 DP_DEVICE_SERVICE_IRQ_VECTOR, 1955 sink_irq_vector, 1); 1956 if (!ret) 1957 return false; 1958 1959 return true; 1960} 1961 1962static void 1963intel_dp_handle_test_request(struct intel_dp *intel_dp) 1964{ 1965 /* NAK by default */ 1966 intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK); 1967} 1968 1969/* 1970 * According to DP spec 1971 * 5.1.2: 1972 * 1. Read DPCD 1973 * 2. Configure link according to Receiver Capabilities 1974 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3 1975 * 4. Check link status on receipt of hot-plug interrupt 1976 */ 1977 1978static void 1979intel_dp_check_link_status(struct intel_dp *intel_dp) 1980{ 1981 u8 sink_irq_vector; 1982 u8 link_status[DP_LINK_STATUS_SIZE]; 1983 1984 if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON) 1985 return; 1986 1987 if (!intel_dp->base.base.crtc) 1988 return; 1989 1990 /* Try to read receiver status if the link appears to be up */ 1991 if (!intel_dp_get_link_status(intel_dp, link_status)) { 1992 intel_dp_link_down(intel_dp); 1993 return; 1994 } 1995 1996 /* Now read the DPCD to see if it's actually running */ 1997 if (!intel_dp_get_dpcd(intel_dp)) { 1998 intel_dp_link_down(intel_dp); 1999 return; 2000 } 2001 2002 /* Try to read the source of the interrupt */ 2003 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 2004 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) { 2005 /* Clear interrupt source */ 2006 intel_dp_aux_native_write_1(intel_dp, 2007 DP_DEVICE_SERVICE_IRQ_VECTOR, 2008 sink_irq_vector); 2009 2010 if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST) 2011 intel_dp_handle_test_request(intel_dp); 2012 if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ)) 2013 DRM_DEBUG_KMS("CP or sink specific irq unhandled\n"); 2014 } 2015 2016 if (!intel_channel_eq_ok(intel_dp, link_status)) { 2017 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n", 2018 drm_get_encoder_name(&intel_dp->base.base)); 2019 intel_dp_start_link_train(intel_dp); 2020 intel_dp_complete_link_train(intel_dp); 2021 } 2022} 2023 2024static enum drm_connector_status 2025intel_dp_detect_dpcd(struct intel_dp *intel_dp) 2026{ 2027 if (intel_dp_get_dpcd(intel_dp)) 2028 return connector_status_connected; 2029 return connector_status_disconnected; 2030} 2031 2032static enum drm_connector_status 2033ironlake_dp_detect(struct intel_dp *intel_dp) 2034{ 2035 enum drm_connector_status status; 2036 2037 /* Can't disconnect eDP, but you can close the lid... */ 2038 if (is_edp(intel_dp)) { 2039 status = intel_panel_detect(intel_dp->base.base.dev); 2040 if (status == connector_status_unknown) 2041 status = connector_status_connected; 2042 return status; 2043 } 2044 2045 return intel_dp_detect_dpcd(intel_dp); 2046} 2047 2048static enum drm_connector_status 2049g4x_dp_detect(struct intel_dp *intel_dp) 2050{ 2051 struct drm_device *dev = intel_dp->base.base.dev; 2052 struct drm_i915_private *dev_priv = dev->dev_private; 2053 uint32_t temp, bit; 2054 2055 switch (intel_dp->output_reg) { 2056 case DP_B: 2057 bit = DPB_HOTPLUG_INT_STATUS; 2058 break; 2059 case DP_C: 2060 bit = DPC_HOTPLUG_INT_STATUS; 2061 break; 2062 case DP_D: 2063 bit = DPD_HOTPLUG_INT_STATUS; 2064 break; 2065 default: 2066 return connector_status_unknown; 2067 } 2068 2069 temp = I915_READ(PORT_HOTPLUG_STAT); 2070 2071 if ((temp & bit) == 0) 2072 return connector_status_disconnected; 2073 2074 return intel_dp_detect_dpcd(intel_dp); 2075} 2076 2077static struct edid * 2078intel_dp_get_edid(struct drm_connector *connector, device_t adapter) 2079{ 2080 struct intel_dp *intel_dp = intel_attached_dp(connector); 2081 struct edid *edid; 2082 2083 ironlake_edp_panel_vdd_on(intel_dp); 2084 edid = drm_get_edid(connector, adapter); 2085 ironlake_edp_panel_vdd_off(intel_dp, false); 2086 return edid; 2087} 2088 2089static int 2090intel_dp_get_edid_modes(struct drm_connector *connector, device_t adapter) 2091{ 2092 struct intel_dp *intel_dp = intel_attached_dp(connector); 2093 int ret; 2094 2095 ironlake_edp_panel_vdd_on(intel_dp); 2096 ret = intel_ddc_get_modes(connector, adapter); 2097 ironlake_edp_panel_vdd_off(intel_dp, false); 2098 return ret; 2099} 2100 2101 2102/** 2103 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection. 2104 * 2105 * \return true if DP port is connected. 2106 * \return false if DP port is disconnected. 2107 */ 2108static enum drm_connector_status 2109intel_dp_detect(struct drm_connector *connector, bool force) 2110{ 2111 struct intel_dp *intel_dp = intel_attached_dp(connector); 2112 struct drm_device *dev = intel_dp->base.base.dev; 2113 enum drm_connector_status status; 2114 struct edid *edid = NULL; 2115 2116 intel_dp->has_audio = false; 2117 2118 if (HAS_PCH_SPLIT(dev)) 2119 status = ironlake_dp_detect(intel_dp); 2120 else 2121 status = g4x_dp_detect(intel_dp); 2122 if (status != connector_status_connected) 2123 return status; 2124 2125 if (intel_dp->force_audio != HDMI_AUDIO_AUTO) { 2126 intel_dp->has_audio = (intel_dp->force_audio == HDMI_AUDIO_ON); 2127 } else { 2128 edid = intel_dp_get_edid(connector, intel_dp->adapter); 2129 if (edid) { 2130 intel_dp->has_audio = drm_detect_monitor_audio(edid); 2131 connector->display_info.raw_edid = NULL; 2132 free(edid, DRM_MEM_KMS); 2133 } 2134 } 2135 2136 return connector_status_connected; 2137} 2138 2139static int intel_dp_get_modes(struct drm_connector *connector) 2140{ 2141 struct intel_dp *intel_dp = intel_attached_dp(connector); 2142 struct drm_device *dev = intel_dp->base.base.dev; 2143 struct drm_i915_private *dev_priv = dev->dev_private; 2144 int ret; 2145 2146 /* We should parse the EDID data and find out if it has an audio sink 2147 */ 2148 2149 ret = intel_dp_get_edid_modes(connector, intel_dp->adapter); 2150 if (ret) { 2151 if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) { 2152 struct drm_display_mode *newmode; 2153 list_for_each_entry(newmode, &connector->probed_modes, 2154 head) { 2155 if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) { 2156 intel_dp->panel_fixed_mode = 2157 drm_mode_duplicate(dev, newmode); 2158 break; 2159 } 2160 } 2161 } 2162 return ret; 2163 } 2164 2165 /* if eDP has no EDID, try to use fixed panel mode from VBT */ 2166 if (is_edp(intel_dp)) { 2167 /* initialize panel mode from VBT if available for eDP */ 2168 if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) { 2169 intel_dp->panel_fixed_mode = 2170 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode); 2171 if (intel_dp->panel_fixed_mode) { 2172 intel_dp->panel_fixed_mode->type |= 2173 DRM_MODE_TYPE_PREFERRED; 2174 } 2175 } 2176 if (intel_dp->panel_fixed_mode) { 2177 struct drm_display_mode *mode; 2178 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode); 2179 drm_mode_probed_add(connector, mode); 2180 return 1; 2181 } 2182 } 2183 return 0; 2184} 2185 2186static bool 2187intel_dp_detect_audio(struct drm_connector *connector) 2188{ 2189 struct intel_dp *intel_dp = intel_attached_dp(connector); 2190 struct edid *edid; 2191 bool has_audio = false; 2192 2193 edid = intel_dp_get_edid(connector, intel_dp->adapter); 2194 if (edid) { 2195 has_audio = drm_detect_monitor_audio(edid); 2196 2197 connector->display_info.raw_edid = NULL; 2198 free(edid, DRM_MEM_KMS); 2199 } 2200 2201 return has_audio; 2202} 2203 2204static int 2205intel_dp_set_property(struct drm_connector *connector, 2206 struct drm_property *property, 2207 uint64_t val) 2208{ 2209 struct drm_i915_private *dev_priv = connector->dev->dev_private; 2210 struct intel_dp *intel_dp = intel_attached_dp(connector); 2211 int ret; 2212 2213 ret = drm_connector_property_set_value(connector, property, val); 2214 if (ret) 2215 return ret; 2216 2217 if (property == dev_priv->force_audio_property) { 2218 int i = val; 2219 bool has_audio; 2220 2221 if (i == intel_dp->force_audio) 2222 return 0; 2223 2224 intel_dp->force_audio = i; 2225 2226 if (i == HDMI_AUDIO_AUTO) 2227 has_audio = intel_dp_detect_audio(connector); 2228 else 2229 has_audio = (i == HDMI_AUDIO_ON); 2230 2231 if (has_audio == intel_dp->has_audio) 2232 return 0; 2233 2234 intel_dp->has_audio = has_audio; 2235 goto done; 2236 } 2237 2238 if (property == dev_priv->broadcast_rgb_property) { 2239 if (val == !!intel_dp->color_range) 2240 return 0; 2241 2242 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0; 2243 goto done; 2244 } 2245 2246 return -EINVAL; 2247 2248done: 2249 if (intel_dp->base.base.crtc) { 2250 struct drm_crtc *crtc = intel_dp->base.base.crtc; 2251 drm_crtc_helper_set_mode(crtc, &crtc->mode, 2252 crtc->x, crtc->y, 2253 crtc->fb); 2254 } 2255 2256 return 0; 2257} 2258 2259static void 2260intel_dp_destroy(struct drm_connector *connector) 2261{ 2262 struct drm_device *dev = connector->dev; 2263 2264 if (intel_dpd_is_edp(dev)) 2265 intel_panel_destroy_backlight(dev); 2266 2267#if 0 2268 drm_sysfs_connector_remove(connector); 2269#endif 2270 drm_connector_cleanup(connector); 2271 free(connector, DRM_MEM_KMS); 2272} 2273 2274static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 2275{ 2276 struct drm_device *dev; 2277 struct intel_dp *intel_dp; 2278 2279 intel_dp = enc_to_intel_dp(encoder); 2280 dev = encoder->dev; 2281 2282 if (intel_dp->dp_iic_bus != NULL) { 2283 if (intel_dp->adapter != NULL) { 2284 device_delete_child(intel_dp->dp_iic_bus, 2285 intel_dp->adapter); 2286 } 2287 device_delete_child(dev->device, intel_dp->dp_iic_bus); 2288 } 2289 drm_encoder_cleanup(encoder); 2290 if (is_edp(intel_dp)) { 2291 struct drm_i915_private *dev_priv = intel_dp->base.base.dev->dev_private; 2292 2293 taskqueue_cancel_timeout(dev_priv->tq, 2294 &intel_dp->panel_vdd_task, NULL); 2295 taskqueue_drain_timeout(dev_priv->tq, 2296 &intel_dp->panel_vdd_task); 2297 ironlake_panel_vdd_off_sync(intel_dp); 2298 } 2299 free(intel_dp, DRM_MEM_KMS); 2300} 2301 2302static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { 2303 .dpms = intel_dp_dpms, 2304 .mode_fixup = intel_dp_mode_fixup, 2305 .prepare = intel_dp_prepare, 2306 .mode_set = intel_dp_mode_set, 2307 .commit = intel_dp_commit, 2308}; 2309 2310static const struct drm_connector_funcs intel_dp_connector_funcs = { 2311 .dpms = drm_helper_connector_dpms, 2312 .detect = intel_dp_detect, 2313 .fill_modes = drm_helper_probe_single_connector_modes, 2314 .set_property = intel_dp_set_property, 2315 .destroy = intel_dp_destroy, 2316}; 2317 2318static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = { 2319 .get_modes = intel_dp_get_modes, 2320 .mode_valid = intel_dp_mode_valid, 2321 .best_encoder = intel_best_encoder, 2322}; 2323 2324static const struct drm_encoder_funcs intel_dp_enc_funcs = { 2325 .destroy = intel_dp_encoder_destroy, 2326}; 2327 2328static void 2329intel_dp_hot_plug(struct intel_encoder *intel_encoder) 2330{ 2331 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 2332 2333 intel_dp_check_link_status(intel_dp); 2334} 2335 2336/* Return which DP Port should be selected for Transcoder DP control */ 2337int 2338intel_trans_dp_port_sel(struct drm_crtc *crtc) 2339{ 2340 struct drm_device *dev = crtc->dev; 2341 struct drm_mode_config *mode_config = &dev->mode_config; 2342 struct drm_encoder *encoder; 2343 2344 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 2345 struct intel_dp *intel_dp; 2346 2347 if (encoder->crtc != crtc) 2348 continue; 2349 2350 intel_dp = enc_to_intel_dp(encoder); 2351 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || 2352 intel_dp->base.type == INTEL_OUTPUT_EDP) 2353 return intel_dp->output_reg; 2354 } 2355 2356 return -1; 2357} 2358 2359/* check the VBT to see whether the eDP is on DP-D port */ 2360bool intel_dpd_is_edp(struct drm_device *dev) 2361{ 2362 struct drm_i915_private *dev_priv = dev->dev_private; 2363 struct child_device_config *p_child; 2364 int i; 2365 2366 if (!dev_priv->child_dev_num) 2367 return false; 2368 2369 for (i = 0; i < dev_priv->child_dev_num; i++) { 2370 p_child = dev_priv->child_dev + i; 2371 2372 if (p_child->dvo_port == PORT_IDPD && 2373 p_child->device_type == DEVICE_TYPE_eDP) 2374 return true; 2375 } 2376 return false; 2377} 2378 2379static void 2380intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector) 2381{ 2382 intel_attach_force_audio_property(connector); 2383 intel_attach_broadcast_rgb_property(connector); 2384} 2385 2386void 2387intel_dp_init(struct drm_device *dev, int output_reg) 2388{ 2389 struct drm_i915_private *dev_priv = dev->dev_private; 2390 struct drm_connector *connector; 2391 struct intel_dp *intel_dp; 2392 struct intel_encoder *intel_encoder; 2393 struct intel_connector *intel_connector; 2394 const char *name = NULL; 2395 int type; 2396 2397 intel_dp = malloc(sizeof(struct intel_dp), DRM_MEM_KMS, 2398 M_WAITOK | M_ZERO); 2399 2400 intel_dp->output_reg = output_reg; 2401 intel_dp->dpms_mode = -1; 2402 2403 intel_connector = malloc(sizeof(struct intel_connector), DRM_MEM_KMS, 2404 M_WAITOK | M_ZERO); 2405 intel_encoder = &intel_dp->base; 2406 2407 if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D) 2408 if (intel_dpd_is_edp(dev)) 2409 intel_dp->is_pch_edp = true; 2410 2411 if (output_reg == DP_A || is_pch_edp(intel_dp)) { 2412 type = DRM_MODE_CONNECTOR_eDP; 2413 intel_encoder->type = INTEL_OUTPUT_EDP; 2414 } else { 2415 type = DRM_MODE_CONNECTOR_DisplayPort; 2416 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT; 2417 } 2418 2419 connector = &intel_connector->base; 2420 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type); 2421 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs); 2422 2423 connector->polled = DRM_CONNECTOR_POLL_HPD; 2424 2425 if (output_reg == DP_B || output_reg == PCH_DP_B) 2426 intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT); 2427 else if (output_reg == DP_C || output_reg == PCH_DP_C) 2428 intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT); 2429 else if (output_reg == DP_D || output_reg == PCH_DP_D) 2430 intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT); 2431 2432 if (is_edp(intel_dp)) { 2433 intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT); 2434 TIMEOUT_TASK_INIT(dev_priv->tq, &intel_dp->panel_vdd_task, 0, 2435 ironlake_panel_vdd_work, intel_dp); 2436 } 2437 2438 intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2); 2439 connector->interlace_allowed = true; 2440 connector->doublescan_allowed = 0; 2441 2442 drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs, 2443 DRM_MODE_ENCODER_TMDS); 2444 drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs); 2445 2446 intel_connector_attach_encoder(intel_connector, intel_encoder); 2447#if 0 2448 drm_sysfs_connector_add(connector); 2449#endif 2450 2451 /* Set up the DDC bus. */ 2452 switch (output_reg) { 2453 case DP_A: 2454 name = "DPDDC-A"; 2455 break; 2456 case DP_B: 2457 case PCH_DP_B: 2458 dev_priv->hotplug_supported_mask |= 2459 HDMIB_HOTPLUG_INT_STATUS; 2460 name = "DPDDC-B"; 2461 break; 2462 case DP_C: 2463 case PCH_DP_C: 2464 dev_priv->hotplug_supported_mask |= 2465 HDMIC_HOTPLUG_INT_STATUS; 2466 name = "DPDDC-C"; 2467 break; 2468 case DP_D: 2469 case PCH_DP_D: 2470 dev_priv->hotplug_supported_mask |= 2471 HDMID_HOTPLUG_INT_STATUS; 2472 name = "DPDDC-D"; 2473 break; 2474 } 2475 2476 /* Cache some DPCD data in the eDP case */ 2477 if (is_edp(intel_dp)) { 2478 bool ret; 2479 struct edp_power_seq cur, vbt; 2480 u32 pp_on, pp_off, pp_div; 2481 2482 pp_on = I915_READ(PCH_PP_ON_DELAYS); 2483 pp_off = I915_READ(PCH_PP_OFF_DELAYS); 2484 pp_div = I915_READ(PCH_PP_DIVISOR); 2485 2486 /* Pull timing values out of registers */ 2487 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >> 2488 PANEL_POWER_UP_DELAY_SHIFT; 2489 2490 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> 2491 PANEL_LIGHT_ON_DELAY_SHIFT; 2492 2493 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> 2494 PANEL_LIGHT_OFF_DELAY_SHIFT; 2495 2496 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >> 2497 PANEL_POWER_DOWN_DELAY_SHIFT; 2498 2499 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >> 2500 PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000; 2501 2502 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2503 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12); 2504 2505 vbt = dev_priv->edp.pps; 2506 2507 DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2508 vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12); 2509 2510#define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10) 2511 2512 intel_dp->panel_power_up_delay = get_delay(t1_t3); 2513 intel_dp->backlight_on_delay = get_delay(t8); 2514 intel_dp->backlight_off_delay = get_delay(t9); 2515 intel_dp->panel_power_down_delay = get_delay(t10); 2516 intel_dp->panel_power_cycle_delay = get_delay(t11_t12); 2517 2518 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n", 2519 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay, 2520 intel_dp->panel_power_cycle_delay); 2521 2522 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", 2523 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); 2524 2525 ironlake_edp_panel_vdd_on(intel_dp); 2526 ret = intel_dp_get_dpcd(intel_dp); 2527 ironlake_edp_panel_vdd_off(intel_dp, false); 2528 2529 if (ret) { 2530 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) 2531 dev_priv->no_aux_handshake = 2532 intel_dp->dpcd[DP_MAX_DOWNSPREAD] & 2533 DP_NO_AUX_HANDSHAKE_LINK_TRAINING; 2534 } else { 2535 /* if this fails, presume the device is a ghost */ 2536 DRM_INFO("failed to retrieve link info, disabling eDP\n"); 2537 intel_dp_encoder_destroy(&intel_dp->base.base); 2538 intel_dp_destroy(&intel_connector->base); 2539 return; 2540 } 2541 } 2542 2543 intel_dp_i2c_init(intel_dp, intel_connector, name); 2544 2545 intel_encoder->hot_plug = intel_dp_hot_plug; 2546 2547 if (is_edp(intel_dp)) { 2548 dev_priv->int_edp_connector = connector; 2549 intel_panel_setup_backlight(dev); 2550 } 2551 2552 intel_dp_add_properties(intel_dp, connector); 2553 2554 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written 2555 * 0xd. Failure to do so will result in spurious interrupts being 2556 * generated on the port when a cable is not attached. 2557 */ 2558 if (IS_G4X(dev) && !IS_GM45(dev)) { 2559 u32 temp = I915_READ(PEG_BAND_GAP_DATA); 2560 I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd); 2561 } 2562} 2563