• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /freebsd-12-stable/sys/dev/drm2/i915/

Lines Matching refs:intel_dp

43  * @intel_dp: DP struct
48 static bool is_edp(struct intel_dp *intel_dp)
50 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
57 * @intel_dp: DP struct
63 static bool is_pch_edp(struct intel_dp *intel_dp)
65 return intel_dp->is_pch_edp;
70 * @intel_dp: DP struct
74 static bool is_cpu_edp(struct intel_dp *intel_dp)
76 return is_edp(intel_dp) && !is_pch_edp(intel_dp);
79 static struct drm_device *intel_dp_to_dev(struct intel_dp *intel_dp)
81 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
86 static struct intel_dp *intel_attached_dp(struct drm_connector *connector)
100 struct intel_dp *intel_dp;
105 intel_dp = enc_to_intel_dp(encoder);
107 return is_pch_edp(intel_dp);
110 static void intel_dp_link_down(struct intel_dp *intel_dp);
116 struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
118 *lane_num = intel_dp->lane_count;
119 *link_bw = drm_dp_bw_code_to_link_rate(intel_dp->link_bw);
126 struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
127 struct intel_connector *intel_connector = intel_dp->attached_connector;
136 intel_dp_max_link_bw(struct intel_dp *intel_dp)
138 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
190 intel_dp_adjust_dithering(struct intel_dp *intel_dp,
194 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
195 int max_lanes = drm_dp_max_lane_count(intel_dp->dpcd);
220 struct intel_dp *intel_dp = intel_attached_dp(connector);
224 if (is_edp(intel_dp) && fixed_mode) {
232 if (!intel_dp_adjust_dithering(intel_dp, mode, false))
301 static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
303 struct drm_device *dev = intel_dp_to_dev(intel_dp);
309 static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
311 struct drm_device *dev = intel_dp_to_dev(intel_dp);
318 intel_dp_check_edp(struct intel_dp *intel_dp)
320 struct drm_device *dev = intel_dp_to_dev(intel_dp);
323 if (!is_edp(intel_dp))
325 if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) {
334 intel_dp_aux_ch(struct intel_dp *intel_dp,
338 uint32_t output_reg = intel_dp->output_reg;
339 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
373 intel_dp_check_edp(intel_dp);
381 if (is_cpu_edp(intel_dp)) {
487 intel_dp_aux_native_write(struct intel_dp *intel_dp,
495 intel_dp_check_edp(intel_dp);
505 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
520 intel_dp_aux_native_write_1(struct intel_dp *intel_dp,
523 return intel_dp_aux_native_write(intel_dp, address, &byte, 1);
528 intel_dp_aux_native_read(struct intel_dp *intel_dp,
538 intel_dp_check_edp(intel_dp);
548 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes,
571 struct intel_dp *intel_dp = algo_data->priv;
580 intel_dp_check_edp(intel_dp);
612 ret = intel_dp_aux_ch(intel_dp,
662 intel_dp_i2c_init(struct intel_dp *intel_dp,
669 ironlake_edp_panel_vdd_on(intel_dp);
671 intel_dp_i2c_aux_ch, intel_dp, &intel_dp->dp_iic_bus,
672 &intel_dp->adapter);
673 ironlake_edp_panel_vdd_off(intel_dp, false);
683 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
684 struct intel_connector *intel_connector = intel_dp->attached_connector;
686 int max_lane_count = drm_dp_max_lane_count(intel_dp->dpcd);
687 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
691 if (is_edp(intel_dp) && intel_connector->panel.fixed_mode) {
706 if (!intel_dp_adjust_dithering(intel_dp, adjusted_mode, true))
717 intel_dp->link_bw = bws[clock];
718 intel_dp->lane_count = lane_count;
719 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
722 intel_dp->link_bw, intel_dp->lane_count,
773 struct intel_dp *intel_dp;
786 intel_dp = enc_to_intel_dp(&intel_encoder->base);
791 lane_count = intel_dp->lane_count;
838 void intel_dp_init_link_config(struct intel_dp *intel_dp)
840 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
841 intel_dp->link_configuration[0] = intel_dp->link_bw;
842 intel_dp->link_configuration[1] = intel_dp->lane_count;
843 intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B;
847 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
848 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
849 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
859 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
883 intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
886 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
888 switch (intel_dp->lane_count) {
890 intel_dp->DP |= DP_PORT_WIDTH_1;
893 intel_dp->DP |= DP_PORT_WIDTH_2;
896 intel_dp->DP |= DP_PORT_WIDTH_4;
899 if (intel_dp->has_audio) {
902 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
906 intel_dp_init_link_config(intel_dp);
910 if (is_cpu_edp(intel_dp) && IS_GEN7(dev) && !IS_VALLEYVIEW(dev)) {
912 intel_dp->DP |= DP_SYNC_HS_HIGH;
914 intel_dp->DP |= DP_SYNC_VS_HIGH;
915 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
917 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
918 intel_dp->DP |= DP_ENHANCED_FRAMING;
920 intel_dp->DP |= intel_crtc->pipe << 29;
924 intel_dp->DP |= DP_PLL_FREQ_160MHZ;
926 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
927 } else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) {
928 intel_dp->DP |= intel_dp->color_range;
931 intel_dp->DP |= DP_SYNC_HS_HIGH;
933 intel_dp->DP |= DP_SYNC_VS_HIGH;
934 intel_dp->DP |= DP_LINK_TRAIN_OFF;
936 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
937 intel_dp->DP |= DP_ENHANCED_FRAMING;
940 intel_dp->DP |= DP_PIPEB_SELECT;
942 if (is_cpu_edp(intel_dp)) {
945 intel_dp->DP |= DP_PLL_FREQ_160MHZ;
947 intel_dp->DP |= DP_PLL_FREQ_270MHZ;
950 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
963 static void ironlake_wait_panel_status(struct intel_dp *intel_dp,
967 struct drm_device *dev = intel_dp_to_dev(intel_dp);
982 static void ironlake_wait_panel_on(struct intel_dp *intel_dp)
985 ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
988 static void ironlake_wait_panel_off(struct intel_dp *intel_dp)
991 ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
994 static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp)
997 ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
1014 void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
1016 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1020 if (!is_edp(intel_dp))
1024 WARN(intel_dp->want_panel_vdd,
1027 intel_dp->want_panel_vdd = true;
1029 if (ironlake_edp_have_panel_vdd(intel_dp)) {
1034 if (!ironlake_edp_have_panel_power(intel_dp))
1035 ironlake_wait_panel_power_cycle(intel_dp);
1047 if (!ironlake_edp_have_panel_power(intel_dp)) {
1049 DRM_MSLEEP(intel_dp->panel_power_up_delay);
1053 static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
1055 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1059 if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) {
1069 DRM_MSLEEP(intel_dp->panel_power_down_delay);
1075 struct intel_dp *intel_dp = arg;
1076 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1079 ironlake_panel_vdd_off_sync(intel_dp);
1083 void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
1085 if (!is_edp(intel_dp))
1088 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd);
1089 WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on");
1091 intel_dp->want_panel_vdd = false;
1094 ironlake_panel_vdd_off_sync(intel_dp);
1101 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1105 &intel_dp->panel_vdd_work,
1106 msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5));
1110 void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1112 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1116 if (!is_edp(intel_dp))
1121 if (ironlake_edp_have_panel_power(intel_dp)) {
1126 ironlake_wait_panel_power_cycle(intel_dp);
1143 ironlake_wait_panel_on(intel_dp);
1152 void ironlake_edp_panel_off(struct intel_dp *intel_dp)
1154 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1158 if (!is_edp(intel_dp))
1163 WARN(!intel_dp->want_panel_vdd, "Need VDD to turn off panel\n");
1172 intel_dp->want_panel_vdd = false;
1174 ironlake_wait_panel_off(intel_dp);
1177 void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
1179 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1185 if (!is_edp(intel_dp))
1195 DRM_MSLEEP(intel_dp->backlight_on_delay);
1204 void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
1206 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1210 if (!is_edp(intel_dp))
1220 DRM_MSLEEP(intel_dp->backlight_off_delay);
1223 static void ironlake_edp_pll_on(struct intel_dp *intel_dp)
1225 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1239 /* We don't adjust intel_dp->DP while tearing down the link, to
1242 intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE);
1243 intel_dp->DP |= DP_PLL_ENABLE;
1244 I915_WRITE(DP_A, intel_dp->DP);
1249 static void ironlake_edp_pll_off(struct intel_dp *intel_dp)
1251 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1266 * intel_dp->DP because link_down must not change that (otherwise link
1275 void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
1280 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1284 ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER,
1294 ret = intel_dp_aux_native_write_1(intel_dp,
1307 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1310 u32 tmp = I915_READ(intel_dp->output_reg);
1315 if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) {
1317 } else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) {
1324 switch (intel_dp->output_reg) {
1347 intel_dp->output_reg);
1355 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1359 ironlake_edp_panel_vdd_on(intel_dp);
1360 ironlake_edp_backlight_off(intel_dp);
1361 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1362 ironlake_edp_panel_off(intel_dp);
1365 if (!is_cpu_edp(intel_dp))
1366 intel_dp_link_down(intel_dp);
1371 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1373 if (is_cpu_edp(intel_dp)) {
1374 intel_dp_link_down(intel_dp);
1375 ironlake_edp_pll_off(intel_dp);
1381 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1384 uint32_t dp_reg = I915_READ(intel_dp->output_reg);
1389 ironlake_edp_panel_vdd_on(intel_dp);
1390 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1391 intel_dp_start_link_train(intel_dp);
1392 ironlake_edp_panel_on(intel_dp);
1393 ironlake_edp_panel_vdd_off(intel_dp, true);
1394 intel_dp_complete_link_train(intel_dp);
1395 ironlake_edp_backlight_on(intel_dp);
1400 struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
1402 if (is_cpu_edp(intel_dp))
1403 ironlake_edp_pll_on(intel_dp);
1411 intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address,
1421 ret = intel_dp_aux_native_read(intel_dp, address, recv,
1436 intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1438 return intel_dp_aux_native_read_retry(intel_dp,
1462 intel_dp_voltage_max(struct intel_dp *intel_dp)
1464 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1466 if (IS_GEN7(dev) && is_cpu_edp(intel_dp))
1468 else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
1475 intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
1477 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1491 } else if (IS_GEN7(dev) && is_cpu_edp(intel_dp) && !IS_VALLEYVIEW(dev)) {
1517 intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1525 for (lane = 0; lane < intel_dp->lane_count; lane++) {
1535 voltage_max = intel_dp_voltage_max(intel_dp);
1539 preemph_max = intel_dp_pre_emphasis_max(intel_dp, v);
1544 intel_dp->train_set[lane] = v | p;
1679 intel_dp_set_link_train(struct intel_dp *intel_dp,
1683 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1729 (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) {
1768 I915_WRITE(intel_dp->output_reg, dp_reg_value);
1769 POSTING_READ(intel_dp->output_reg);
1771 intel_dp_aux_native_write_1(intel_dp,
1777 ret = intel_dp_aux_native_write(intel_dp,
1779 intel_dp->train_set,
1780 intel_dp->lane_count);
1781 if (ret != intel_dp->lane_count)
1790 intel_dp_start_link_train(struct intel_dp *intel_dp)
1792 struct drm_encoder *encoder = &dp_to_dig_port(intel_dp)->base.base;
1798 uint32_t DP = intel_dp->DP;
1804 intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
1805 intel_dp->link_configuration,
1810 memset(intel_dp->train_set, 0, 4);
1816 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1822 intel_dp->train_set[0]);
1824 } else if (IS_GEN7(dev) && is_cpu_edp(intel_dp) && !IS_VALLEYVIEW(dev)) {
1825 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1827 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1828 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1831 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1838 if (!intel_dp_set_link_train(intel_dp, DP,
1843 drm_dp_link_train_clock_recovery_delay(intel_dp->dpcd);
1844 if (!intel_dp_get_link_status(intel_dp, link_status)) {
1849 if (drm_dp_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1856 for (i = 0; i < intel_dp->lane_count; i++)
1857 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1859 if (i == intel_dp->lane_count) {
1865 memset(intel_dp->train_set, 0, 4);
1871 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1879 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1881 /* Compute new intel_dp->train_set as requested by target */
1882 intel_get_adjust_train(intel_dp, link_status);
1885 intel_dp->DP = DP;
1889 intel_dp_complete_link_train(struct intel_dp *intel_dp)
1891 struct drm_device *dev = intel_dp_to_dev(intel_dp);
1894 uint32_t DP = intel_dp->DP;
1901 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1907 intel_dp_link_down(intel_dp);
1912 signal_levels = intel_dp_signal_levels_hsw(intel_dp->train_set[0]);
1914 } else if (IS_GEN7(dev) && is_cpu_edp(intel_dp) && !IS_VALLEYVIEW(dev)) {
1915 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1917 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1918 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1921 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1926 if (!intel_dp_set_link_train(intel_dp, DP,
1931 drm_dp_link_train_channel_eq_delay(intel_dp->dpcd);
1932 if (!intel_dp_get_link_status(intel_dp, link_status))
1936 if (!drm_dp_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1937 intel_dp_start_link_train(intel_dp);
1942 if (drm_dp_channel_eq_ok(link_status, intel_dp->lane_count)) {
1949 intel_dp_link_down(intel_dp);
1950 intel_dp_start_link_train(intel_dp);
1956 /* Compute new intel_dp->train_set as requested by target */
1957 intel_get_adjust_train(intel_dp, link_status);
1964 intel_dp_set_link_train(intel_dp, DP, DP_TRAINING_PATTERN_DISABLE);
1968 intel_dp_link_down(struct intel_dp *intel_dp)
1970 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
1973 uint32_t DP = intel_dp->DP;
1993 if (WARN_ON((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0))
1998 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) {
2000 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
2003 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
2005 POSTING_READ(intel_dp->output_reg);
2010 I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) {
2022 I915_WRITE(intel_dp->output_reg, DP);
2036 POSTING_READ(intel_dp->output_reg);
2043 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
2044 POSTING_READ(intel_dp->output_reg);
2045 DRM_MSLEEP(intel_dp->panel_power_down_delay);
2049 intel_dp_get_dpcd(struct intel_dp *intel_dp)
2051 if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd,
2052 sizeof(intel_dp->dpcd)) == 0)
2055 if (intel_dp->dpcd[DP_DPCD_REV] == 0)
2058 if (!(intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
2062 if (intel_dp->dpcd[DP_DPCD_REV] == 0x10)
2065 if (intel_dp_aux_native_read_retry(intel_dp, DP_DOWNSTREAM_PORT_0,
2066 intel_dp->downstream_ports,
2074 intel_dp_probe_oui(struct intel_dp *intel_dp)
2078 if (!(intel_dp->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
2081 ironlake_edp_panel_vdd_on(intel_dp);
2083 if (intel_dp_aux_native_read_retry(intel_dp, DP_SINK_OUI, buf, 3))
2087 if (intel_dp_aux_native_read_retry(intel_dp, DP_BRANCH_OUI, buf, 3))
2091 ironlake_edp_panel_vdd_off(intel_dp, false);
2095 intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector)
2099 ret = intel_dp_aux_native_read_retry(intel_dp,
2109 intel_dp_handle_test_request(struct intel_dp *intel_dp)
2112 intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_NAK);
2125 intel_dp_check_link_status(struct intel_dp *intel_dp)
2127 struct intel_encoder *intel_encoder = &dp_to_dig_port(intel_dp)->base;
2138 if (!intel_dp_get_link_status(intel_dp, link_status)) {
2139 intel_dp_link_down(intel_dp);
2144 if (!intel_dp_get_dpcd(intel_dp)) {
2145 intel_dp_link_down(intel_dp);
2150 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
2151 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) {
2153 intel_dp_aux_native_write_1(intel_dp,
2158 intel_dp_handle_test_request(intel_dp);
2163 if (!drm_dp_channel_eq_ok(link_status, intel_dp->lane_count)) {
2166 intel_dp_start_link_train(intel_dp);
2167 intel_dp_complete_link_train(intel_dp);
2173 intel_dp_detect_dpcd(struct intel_dp *intel_dp)
2175 uint8_t *dpcd = intel_dp->dpcd;
2179 if (!intel_dp_get_dpcd(intel_dp))
2187 hpd = !!(intel_dp->downstream_ports[0] & DP_DS_PORT_HPD);
2190 if (!intel_dp_aux_native_read_retry(intel_dp, DP_SINK_COUNT,
2198 if (drm_probe_ddc(intel_dp->adapter))
2202 type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
2212 ironlake_dp_detect(struct intel_dp *intel_dp)
2214 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2218 if (is_edp(intel_dp)) {
2225 return intel_dp_detect_dpcd(intel_dp);
2229 g4x_dp_detect(struct intel_dp *intel_dp)
2231 struct drm_device *dev = intel_dp_to_dev(intel_dp);
2235 switch (intel_dp->output_reg) {
2252 return intel_dp_detect_dpcd(intel_dp);
2309 struct intel_dp *intel_dp = intel_attached_dp(connector);
2310 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
2315 char dpcd_hex_dump[sizeof(intel_dp->dpcd) * 3];
2317 intel_dp->has_audio = false;
2320 status = ironlake_dp_detect(intel_dp);
2322 status = g4x_dp_detect(intel_dp);
2324 hex_dump_to_buffer(intel_dp->dpcd, sizeof(intel_dp->dpcd),
2331 intel_dp_probe_oui(intel_dp);
2333 if (intel_dp->force_audio != HDMI_AUDIO_AUTO) {
2334 intel_dp->has_audio = (intel_dp->force_audio == HDMI_AUDIO_ON);
2336 edid = intel_dp_get_edid(connector, intel_dp->adapter);
2338 intel_dp->has_audio = drm_detect_monitor_audio(edid);
2350 struct intel_dp *intel_dp = intel_attached_dp(connector);
2358 ret = intel_dp_get_edid_modes(connector, intel_dp->adapter);
2363 if (is_edp(intel_dp) && intel_connector->panel.fixed_mode) {
2378 struct intel_dp *intel_dp = intel_attached_dp(connector);
2382 edid = intel_dp_get_edid(connector, intel_dp->adapter);
2399 struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
2410 if (i == intel_dp->force_audio)
2413 intel_dp->force_audio = i;
2420 if (has_audio == intel_dp->has_audio)
2423 intel_dp->has_audio = has_audio;
2428 if (val == !!intel_dp->color_range)
2431 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
2435 if (is_edp(intel_dp) &&
2466 struct intel_dp *intel_dp = intel_attached_dp(connector);
2471 if (is_edp(intel_dp))
2481 struct intel_dp *intel_dp = &intel_dig_port->dp;
2484 if (intel_dp->dp_iic_bus != NULL) {
2485 if (intel_dp->adapter != NULL) {
2486 device_delete_child(intel_dp->dp_iic_bus,
2487 intel_dp->adapter);
2489 device_delete_child(dev->dev, intel_dp->dp_iic_bus);
2492 if (is_edp(intel_dp)) {
2496 &intel_dp->panel_vdd_work, NULL);
2498 &intel_dp->panel_vdd_work);
2499 ironlake_panel_vdd_off_sync(intel_dp);
2531 struct intel_dp *intel_dp = enc_to_intel_dp(&intel_encoder->base);
2533 intel_dp_check_link_status(intel_dp);
2542 struct intel_dp *intel_dp;
2545 intel_dp = enc_to_intel_dp(&intel_encoder->base);
2549 return intel_dp->output_reg;
2576 intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
2583 if (is_edp(intel_dp)) {
2595 struct intel_dp *intel_dp,
2660 intel_dp->panel_power_up_delay = get_delay(t1_t3);
2661 intel_dp->backlight_on_delay = get_delay(t8);
2662 intel_dp->backlight_off_delay = get_delay(t9);
2663 intel_dp->panel_power_down_delay = get_delay(t10);
2664 intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
2668 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2669 intel_dp->panel_power_cycle_delay);
2672 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2680 struct intel_dp *intel_dp,
2701 if (is_cpu_edp(intel_dp))
2722 struct intel_dp *intel_dp = &intel_dig_port->dp;
2733 intel_dp->DP = I915_READ(intel_dp->output_reg);
2734 intel_dp->attached_connector = intel_connector;
2738 intel_dp->is_pch_edp = true;
2747 } else if (port == PORT_A || is_pch_edp(intel_dp)) {
2765 TIMEOUT_TASK_INIT(dev_priv->wq, &intel_dp->panel_vdd_work, 0,
2766 ironlake_panel_vdd_work, intel_dp);
2798 if (is_edp(intel_dp))
2799 intel_dp_init_panel_power_sequencer(dev, intel_dp, &power_seq);
2801 intel_dp_i2c_init(intel_dp, intel_connector, name);
2804 if (is_edp(intel_dp)) {
2810 ironlake_edp_panel_vdd_on(intel_dp);
2811 ret = intel_dp_get_dpcd(intel_dp);
2812 ironlake_edp_panel_vdd_off(intel_dp, false);
2815 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
2817 intel_dp->dpcd[DP_MAX_DOWNSPREAD] &
2828 intel_dp_init_panel_power_sequencer_registers(dev, intel_dp,
2831 ironlake_edp_panel_vdd_on(intel_dp);
2832 edid = drm_get_edid(connector, intel_dp->adapter);
2864 ironlake_edp_panel_vdd_off(intel_dp, false);
2867 if (is_edp(intel_dp)) {
2872 intel_dp_add_properties(intel_dp, connector);