Lines Matching refs:intel_dp

62 #include "intel_dp.h"
115 * @intel_dp: DP struct
122 bool intel_dp_is_edp(struct intel_dp *intel_dp)
124 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
129 bool intel_dp_as_sdp_supported(struct intel_dp *intel_dp)
131 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
134 drm_dp_as_sdp_supported(&intel_dp->aux, intel_dp->dpcd);
137 static void intel_dp_unset_edid(struct intel_dp *intel_dp);
169 static int max_dprx_rate(struct intel_dp *intel_dp)
171 if (intel_dp_tunnel_bw_alloc_is_enabled(intel_dp))
172 return drm_dp_tunnel_max_dprx_rate(intel_dp->tunnel);
174 return drm_dp_bw_code_to_link_rate(intel_dp->dpcd[DP_MAX_LINK_RATE]);
177 static int max_dprx_lane_count(struct intel_dp *intel_dp)
179 if (intel_dp_tunnel_bw_alloc_is_enabled(intel_dp))
180 return drm_dp_tunnel_max_dprx_lane_count(intel_dp->tunnel);
182 return drm_dp_max_lane_count(intel_dp->dpcd);
185 static void intel_dp_set_default_sink_rates(struct intel_dp *intel_dp)
187 intel_dp->sink_rates[0] = 162000;
188 intel_dp->num_sink_rates = 1;
192 static void intel_dp_set_dpcd_sink_rates(struct intel_dp *intel_dp)
200 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_CAN_DO_MAX_LINK_RATE_3_24_GBPS)) {
204 memcpy(intel_dp->sink_rates, quirk_rates, sizeof(quirk_rates));
205 intel_dp->num_sink_rates = ARRAY_SIZE(quirk_rates);
213 max_rate = max_dprx_rate(intel_dp);
214 max_lttpr_rate = drm_dp_lttpr_max_link_rate(intel_dp->lttpr_common_caps);
221 intel_dp->sink_rates[i] = dp_rates[i];
228 if (drm_dp_128b132b_supported(intel_dp->dpcd)) {
231 BUILD_BUG_ON(ARRAY_SIZE(intel_dp->sink_rates) < ARRAY_SIZE(dp_rates) + 3);
233 drm_dp_dpcd_readb(&intel_dp->aux,
236 if (drm_dp_lttpr_count(intel_dp->lttpr_common_caps)) {
238 if (intel_dp->lttpr_common_caps[0] >= 0x20 &&
239 intel_dp->lttpr_common_caps[DP_MAIN_LINK_CHANNEL_CODING_PHY_REPEATER -
243 uhbr_rates &= intel_dp->lttpr_common_caps[DP_PHY_REPEATER_128B132B_RATES -
252 intel_dp->sink_rates[i++] = 1000000;
254 intel_dp->sink_rates[i++] = 1350000;
256 intel_dp->sink_rates[i++] = 2000000;
259 intel_dp->num_sink_rates = i;
262 static void intel_dp_set_sink_rates(struct intel_dp *intel_dp)
264 struct intel_connector *connector = intel_dp->attached_connector;
265 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
268 intel_dp_set_dpcd_sink_rates(intel_dp);
270 if (intel_dp->num_sink_rates)
273 drm_err(&dp_to_i915(intel_dp)->drm,
278 intel_dp_set_default_sink_rates(intel_dp);
281 static void intel_dp_set_default_max_sink_lane_count(struct intel_dp *intel_dp)
283 intel_dp->max_sink_lane_count = 1;
286 static void intel_dp_set_max_sink_lane_count(struct intel_dp *intel_dp)
288 struct intel_connector *connector = intel_dp->attached_connector;
289 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
292 intel_dp->max_sink_lane_count = max_dprx_lane_count(intel_dp);
294 switch (intel_dp->max_sink_lane_count) {
301 drm_err(&dp_to_i915(intel_dp)->drm,
305 intel_dp->max_sink_lane_count);
307 intel_dp_set_default_max_sink_lane_count(intel_dp);
325 static int intel_dp_common_len_rate_limit(const struct intel_dp *intel_dp,
328 return intel_dp_rate_limit_len(intel_dp->common_rates,
329 intel_dp->num_common_rates, max_rate);
332 static int intel_dp_common_rate(struct intel_dp *intel_dp, int index)
334 if (drm_WARN_ON(&dp_to_i915(intel_dp)->drm,
335 index < 0 || index >= intel_dp->num_common_rates))
338 return intel_dp->common_rates[index];
342 int intel_dp_max_common_rate(struct intel_dp *intel_dp)
344 return intel_dp_common_rate(intel_dp, intel_dp->num_common_rates - 1);
359 int intel_dp_max_common_lane_count(struct intel_dp *intel_dp)
361 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
363 int sink_max = intel_dp->max_sink_lane_count;
365 int lttpr_max = drm_dp_lttpr_max_lane_count(intel_dp->lttpr_common_caps);
373 int intel_dp_max_lane_count(struct intel_dp *intel_dp)
375 switch (intel_dp->max_link_lane_count) {
379 return intel_dp->max_link_lane_count;
381 MISSING_CASE(intel_dp->max_link_lane_count);
418 * @intel_dp: Intel DP object
423 * account any BW limitations by a DP tunnel attached to @intel_dp.
427 int intel_dp_max_link_data_rate(struct intel_dp *intel_dp,
432 if (intel_dp_tunnel_bw_alloc_is_enabled(intel_dp))
434 drm_dp_tunnel_available_bw(intel_dp->tunnel));
439 bool intel_dp_has_bigjoiner(struct intel_dp *intel_dp)
441 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
450 static int dg2_max_source_rate(struct intel_dp *intel_dp)
452 return intel_dp_is_edp(intel_dp) ? 810000 : 1350000;
455 static int icl_max_source_rate(struct intel_dp *intel_dp)
457 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
459 if (intel_encoder_is_combo(encoder) && !intel_dp_is_edp(intel_dp))
465 static int ehl_max_source_rate(struct intel_dp *intel_dp)
467 if (intel_dp_is_edp(intel_dp))
473 static int mtl_max_source_rate(struct intel_dp *intel_dp)
475 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
483 static int vbt_max_link_rate(struct intel_dp *intel_dp)
485 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
490 if (intel_dp_is_edp(intel_dp)) {
491 struct intel_connector *connector = intel_dp->attached_connector;
504 intel_dp_set_source_rates(struct intel_dp *intel_dp)
527 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
534 intel_dp->source_rates || intel_dp->num_source_rates);
539 max_rate = mtl_max_source_rate(intel_dp);
544 max_rate = dg2_max_source_rate(intel_dp);
549 max_rate = ehl_max_source_rate(intel_dp);
551 max_rate = icl_max_source_rate(intel_dp);
567 vbt_max_rate = vbt_max_link_rate(intel_dp);
576 intel_dp->source_rates = source_rates;
577 intel_dp->num_source_rates = size;
615 static void intel_dp_set_common_rates(struct intel_dp *intel_dp)
617 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
620 !intel_dp->num_source_rates || !intel_dp->num_sink_rates);
622 intel_dp->num_common_rates = intersect_rates(intel_dp->source_rates,
623 intel_dp->num_source_rates,
624 intel_dp->sink_rates,
625 intel_dp->num_sink_rates,
626 intel_dp->common_rates);
629 if (drm_WARN_ON(&i915->drm, intel_dp->num_common_rates == 0)) {
630 intel_dp->common_rates[0] = 162000;
631 intel_dp->num_common_rates = 1;
635 static bool intel_dp_link_params_valid(struct intel_dp *intel_dp, int link_rate,
644 link_rate > intel_dp->max_link_rate)
648 lane_count > intel_dp_max_lane_count(intel_dp))
654 static bool intel_dp_can_link_train_fallback_for_edp(struct intel_dp *intel_dp,
660 intel_panel_preferred_fixed_mode(intel_dp->attached_connector);
664 max_rate = intel_dp_max_link_data_rate(intel_dp, link_rate, lane_count);
671 int intel_dp_get_link_train_fallback_values(struct intel_dp *intel_dp,
674 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
681 if (intel_dp->is_mst) {
686 if (intel_dp_is_edp(intel_dp) && !intel_dp->use_max_params) {
689 intel_dp->use_max_params = true;
693 index = intel_dp_rate_index(intel_dp->common_rates,
694 intel_dp->num_common_rates,
697 if (intel_dp_is_edp(intel_dp) &&
698 !intel_dp_can_link_train_fallback_for_edp(intel_dp,
699 intel_dp_common_rate(intel_dp, index - 1),
705 intel_dp->max_link_rate = intel_dp_common_rate(intel_dp, index - 1);
706 intel_dp->max_link_lane_count = lane_count;
708 if (intel_dp_is_edp(intel_dp) &&
709 !intel_dp_can_link_train_fallback_for_edp(intel_dp,
710 intel_dp_max_common_rate(intel_dp),
716 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
717 intel_dp->max_link_lane_count = lane_count >> 1;
940 static bool source_can_output(struct intel_dp *intel_dp,
943 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
968 dfp_can_convert_from_rgb(struct intel_dp *intel_dp,
971 if (!drm_dp_is_branch(intel_dp->dpcd))
975 return intel_dp->dfp.rgb_to_ycbcr;
978 return intel_dp->dfp.rgb_to_ycbcr &&
979 intel_dp->dfp.ycbcr_444_to_420;
985 dfp_can_convert_from_ycbcr444(struct intel_dp *intel_dp,
988 if (!drm_dp_is_branch(intel_dp->dpcd))
992 return intel_dp->dfp.ycbcr_444_to_420;
998 dfp_can_convert(struct intel_dp *intel_dp,
1004 return dfp_can_convert_from_rgb(intel_dp, sink_format);
1006 return dfp_can_convert_from_ycbcr444(intel_dp, sink_format);
1019 struct intel_dp *intel_dp = intel_attached_dp(connector);
1020 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1022 intel_dp->force_dsc_output_format;
1025 if (source_can_output(intel_dp, force_dsc_output_format) &&
1026 (!drm_dp_is_branch(intel_dp->dpcd) ||
1028 dfp_can_convert(intel_dp, force_dsc_output_format, sink_format)))
1035 dfp_can_convert_from_rgb(intel_dp, sink_format))
1039 dfp_can_convert_from_ycbcr444(intel_dp, sink_format))
1045 drm_WARN_ON(&i915->drm, !source_can_output(intel_dp, output_format));
1115 static int intel_dp_max_tmds_clock(struct intel_dp *intel_dp)
1117 struct intel_connector *connector = intel_dp->attached_connector;
1119 int max_tmds_clock = intel_dp->dfp.max_tmds_clock;
1129 intel_dp_tmds_clock_valid(struct intel_dp *intel_dp,
1141 min_tmds_clock = intel_dp->dfp.min_tmds_clock;
1142 max_tmds_clock = intel_dp_max_tmds_clock(intel_dp);
1158 struct intel_dp *intel_dp = intel_attached_dp(connector);
1164 if (intel_dp->dfp.pcon_max_frl_bw) {
1171 max_frl_bw = intel_dp->dfp.pcon_max_frl_bw;
1182 if (intel_dp->dfp.max_dotclock &&
1183 target_clock > intel_dp->dfp.max_dotclock)
1189 status = intel_dp_tmds_clock_valid(intel_dp, target_clock,
1198 status = intel_dp_tmds_clock_valid(intel_dp, target_clock,
1207 bool intel_dp_need_bigjoiner(struct intel_dp *intel_dp,
1211 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1213 if (!intel_dp_has_bigjoiner(intel_dp))
1225 struct intel_dp *intel_dp = intel_attached_dp(connector);
1247 if (intel_dp_is_edp(intel_dp) && fixed_mode) {
1255 if (intel_dp_need_bigjoiner(intel_dp, connector,
1266 max_link_clock = intel_dp_max_link_rate(intel_dp);
1267 max_lanes = intel_dp_max_lane_count(intel_dp);
1269 max_rate = intel_dp_max_link_data_rate(intel_dp, max_link_clock, max_lanes);
1291 if (intel_dp_is_edp(intel_dp)) {
1356 static void intel_dp_print_rates(struct intel_dp *intel_dp)
1358 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1365 intel_dp->source_rates, intel_dp->num_source_rates);
1369 intel_dp->sink_rates, intel_dp->num_sink_rates);
1373 intel_dp->common_rates, intel_dp->num_common_rates);
1378 intel_dp_max_link_rate(struct intel_dp *intel_dp)
1382 len = intel_dp_common_len_rate_limit(intel_dp, intel_dp->max_link_rate);
1384 return intel_dp_common_rate(intel_dp, len - 1);
1387 int intel_dp_rate_select(struct intel_dp *intel_dp, int rate)
1389 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1390 int i = intel_dp_rate_index(intel_dp->sink_rates,
1391 intel_dp->num_sink_rates, rate);
1399 void intel_dp_compute_rate(struct intel_dp *intel_dp, int port_clock,
1403 if (intel_dp->use_rate_select) {
1406 intel_dp_rate_select(intel_dp, port_clock);
1413 bool intel_dp_has_hdmi_sink(struct intel_dp *intel_dp)
1415 struct intel_connector *connector = intel_dp->attached_connector;
1420 static bool intel_dp_source_supports_fec(struct intel_dp *intel_dp,
1423 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1424 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1436 bool intel_dp_supports_fec(struct intel_dp *intel_dp,
1440 return intel_dp_source_supports_fec(intel_dp, pipe_config) &&
1455 static int intel_dp_hdmi_compute_bpc(struct intel_dp *intel_dp,
1478 intel_dp_has_hdmi_sink(intel_dp)) &&
1479 intel_dp_tmds_clock_valid(intel_dp, clock, bpc, crtc_state->sink_format,
1487 static int intel_dp_max_bpp(struct intel_dp *intel_dp,
1491 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1492 struct intel_connector *intel_connector = intel_dp->attached_connector;
1497 if (intel_dp->dfp.max_bpc)
1498 bpc = min_t(int, bpc, intel_dp->dfp.max_bpc);
1500 if (intel_dp->dfp.min_tmds_clock) {
1503 max_hdmi_bpc = intel_dp_hdmi_compute_bpc(intel_dp, crtc_state, bpc,
1512 if (intel_dp_is_edp(intel_dp)) {
1529 intel_dp_adjust_compliance_config(struct intel_dp *intel_dp,
1533 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1536 if (intel_dp->compliance.test_data.bpc != 0) {
1537 int bpp = 3 * intel_dp->compliance.test_data.bpc;
1546 if (intel_dp->compliance.test_type == DP_TEST_LINK_TRAINING) {
1552 if (intel_dp_link_params_valid(intel_dp, intel_dp->compliance.test_link_rate,
1553 intel_dp->compliance.test_lane_count)) {
1554 index = intel_dp_rate_index(intel_dp->common_rates,
1555 intel_dp->num_common_rates,
1556 intel_dp->compliance.test_link_rate);
1559 intel_dp->compliance.test_link_rate;
1561 intel_dp->compliance.test_lane_count;
1593 intel_dp_compute_link_config_wide(struct intel_dp *intel_dp,
1608 for (i = 0; i < intel_dp->num_common_rates; i++) {
1609 link_rate = intel_dp_common_rate(intel_dp, i);
1617 link_avail = intel_dp_max_link_data_rate(intel_dp,
1793 static int dsc_compute_link_config(struct intel_dp *intel_dp,
1803 for (i = 0; i < intel_dp->num_common_rates; i++) {
1804 link_rate = intel_dp_common_rate(intel_dp, i);
1885 static int dsc_src_max_compressed_bpp(struct intel_dp *intel_dp)
1887 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1904 icl_dsc_compute_link_config(struct intel_dp *intel_dp,
1923 ret = dsc_compute_link_config(intel_dp,
1945 xelpd_dsc_compute_link_config(struct intel_dp *intel_dp,
1955 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1972 if (intel_dp->force_dsc_fractional_bpp_en &&
1975 ret = dsc_compute_link_config(intel_dp,
1982 if (intel_dp->force_dsc_fractional_bpp_en &&
1992 static int dsc_compute_compressed_bpp(struct intel_dp *intel_dp,
2000 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2010 dsc_src_max_bpp = dsc_src_max_compressed_bpp(intel_dp);
2023 return xelpd_dsc_compute_link_config(intel_dp, connector, pipe_config, limits,
2025 return icl_dsc_compute_link_config(intel_dp, pipe_config, limits,
2055 int intel_dp_force_dsc_pipe_bpp(struct intel_dp *intel_dp,
2059 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2062 if (!intel_dp->force_dsc_bpc)
2065 forced_bpp = intel_dp->force_dsc_bpc * 3;
2068 drm_dbg_kms(&i915->drm, "Input DSC BPC forced to %d\n", intel_dp->force_dsc_bpc);
2073 intel_dp->force_dsc_bpc);
2078 static int intel_dp_dsc_compute_pipe_bpp(struct intel_dp *intel_dp,
2084 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2094 forced_bpp = intel_dp_force_dsc_pipe_bpp(intel_dp, conn_state, limits);
2097 ret = dsc_compute_compressed_bpp(intel_dp, connector, pipe_config,
2126 ret = dsc_compute_compressed_bpp(intel_dp, connector, pipe_config,
2137 static int intel_edp_dsc_compute_pipe_bpp(struct intel_dp *intel_dp,
2142 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2149 forced_bpp = intel_dp_force_dsc_pipe_bpp(intel_dp, conn_state, limits);
2172 dsc_src_max_bpp = dsc_src_max_compressed_bpp(intel_dp);
2190 int intel_dp_dsc_compute_config(struct intel_dp *intel_dp,
2197 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2206 (!intel_dp_is_edp(intel_dp) &&
2207 intel_dp_supports_fec(intel_dp, connector, pipe_config));
2222 if (intel_dp_is_edp(intel_dp))
2223 ret = intel_edp_dsc_compute_pipe_bpp(intel_dp, pipe_config,
2226 ret = intel_dp_dsc_compute_pipe_bpp(intel_dp, pipe_config,
2236 if (intel_dp_is_edp(intel_dp)) {
2291 * @intel_dp: intel DP
2302 intel_dp_compute_config_link_bpp_limits(struct intel_dp *intel_dp,
2311 const struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
2350 intel_dp_compute_config_limits(struct intel_dp *intel_dp,
2356 limits->min_rate = intel_dp_common_rate(intel_dp, 0);
2357 limits->max_rate = intel_dp_max_link_rate(intel_dp);
2363 limits->max_lane_count = intel_dp_max_lane_count(intel_dp);
2366 limits->pipe.max_bpp = intel_dp_max_bpp(intel_dp, crtc_state,
2369 if (intel_dp->use_max_params) {
2382 intel_dp_adjust_compliance_config(intel_dp, crtc_state, limits);
2384 return intel_dp_compute_config_link_bpp_limits(intel_dp,
2423 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2429 !intel_dp_supports_fec(intel_dp, connector, pipe_config))
2432 if (intel_dp_need_bigjoiner(intel_dp, connector,
2439 dsc_needed = joiner_needs_dsc || intel_dp->force_dsc_en ||
2440 !intel_dp_compute_config_limits(intel_dp, pipe_config,
2450 ret = intel_dp_compute_link_config_wide(intel_dp, pipe_config,
2459 str_yes_no(intel_dp->force_dsc_en));
2461 if (!intel_dp_compute_config_limits(intel_dp, pipe_config,
2467 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config,
2479 intel_dp_max_link_data_rate(intel_dp,
2620 static void intel_dp_compute_as_sdp(struct intel_dp *intel_dp,
2628 !intel_dp_as_sdp_supported(intel_dp))
2643 static void intel_dp_compute_vsc_sdp(struct intel_dp *intel_dp,
2649 if ((!intel_dp->colorimetry_support ||
2692 intel_dp_compute_hdr_metadata_infoframe_sdp(struct intel_dp *intel_dp,
2697 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2812 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2813 struct intel_connector *connector = intel_dp->attached_connector;
2878 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2882 intel_dp_queue_modeset_retry_work(intel_dp->attached_connector);
2891 if (connector->mst_port == intel_dp)
2904 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2906 struct intel_connector *connector = intel_dp->attached_connector;
2913 if (intel_dp_is_edp(intel_dp) && fixed_mode) {
2942 if ((intel_dp_is_edp(intel_dp) && fixed_mode) ||
2953 drm_dp_enhanced_frame_cap(intel_dp->dpcd);
2961 if (intel_dp->mso_link_count) {
2962 int n = intel_dp->mso_link_count;
2963 int overlap = intel_dp->mso_pixel_overlap;
2998 intel_dp_compute_as_sdp(intel_dp, pipe_config);
2999 intel_psr_compute_config(intel_dp, pipe_config, conn_state);
3001 intel_dp_compute_vsc_sdp(intel_dp, pipe_config, conn_state);
3002 intel_dp_compute_hdr_metadata_infoframe_sdp(intel_dp, pipe_config, conn_state);
3004 return intel_dp_tunnel_atomic_compute_stream_bw(state, intel_dp, connector,
3008 void intel_dp_set_link_params(struct intel_dp *intel_dp,
3011 memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set));
3012 intel_dp->link_trained = false;
3013 intel_dp->link_rate = link_rate;
3014 intel_dp->lane_count = lane_count;
3017 static void intel_dp_reset_max_link_params(struct intel_dp *intel_dp)
3019 intel_dp->max_link_lane_count = intel_dp_max_common_lane_count(intel_dp);
3020 intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
3027 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(conn_state->best_encoder));
3028 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3030 if (!intel_dp_is_edp(intel_dp))
3036 intel_pps_backlight_on(intel_dp);
3042 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(old_conn_state->best_encoder));
3043 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3045 if (!intel_dp_is_edp(intel_dp))
3050 intel_pps_backlight_off(intel_dp);
3054 static bool downstream_hpd_needs_d0(struct intel_dp *intel_dp)
3064 return intel_dp->dpcd[DP_DPCD_REV] == 0x11 &&
3065 drm_dp_is_branch(intel_dp->dpcd) &&
3066 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD;
3243 intel_edp_init_source_oui(struct intel_dp *intel_dp, bool careful)
3245 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3254 if (drm_dp_dpcd_read(&intel_dp->aux, DP_SOURCE_OUI, buf, sizeof(buf)) < 0)
3261 if (drm_dp_dpcd_write(&intel_dp->aux, DP_SOURCE_OUI, oui, sizeof(oui)) < 0)
3264 intel_dp->last_oui_write = jiffies;
3267 void intel_dp_wait_source_oui(struct intel_dp *intel_dp)
3269 struct intel_connector *connector = intel_dp->attached_connector;
3270 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3276 wait_remaining_ms_from_jiffies(intel_dp->last_oui_write,
3281 void intel_dp_set_power(struct intel_dp *intel_dp, u8 mode)
3283 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3288 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
3292 if (downstream_hpd_needs_d0(intel_dp))
3295 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode);
3297 struct intel_lspcon *lspcon = dp_to_lspcon(intel_dp);
3299 lspcon_resume(dp_to_dig_port(intel_dp));
3302 if (intel_dp_is_edp(intel_dp))
3303 intel_edp_init_source_oui(intel_dp, false);
3310 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode);
3327 intel_dp_get_dpcd(struct intel_dp *intel_dp);
3340 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3347 if (crtc_state && intel_dp->dpcd[DP_DPCD_REV] == 0) {
3348 intel_dp_get_dpcd(intel_dp);
3352 intel_dp_tunnel_resume(intel_dp, crtc_state, dpcd_updated);
3355 intel_dp_reset_max_link_params(intel_dp);
3362 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3369 if (intel_dp_rate_index(intel_dp->source_rates, intel_dp->num_source_rates,
3391 if (CAN_PANEL_REPLAY(intel_dp)) {
3402 static void intel_dp_get_pcon_dsc_cap(struct intel_dp *intel_dp)
3404 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3408 memset(intel_dp->pcon_dsc_dpcd, 0, sizeof(intel_dp->pcon_dsc_dpcd));
3410 if (drm_dp_dpcd_read(&intel_dp->aux, DP_PCON_DSC_ENCODER,
3411 intel_dp->pcon_dsc_dpcd,
3412 sizeof(intel_dp->pcon_dsc_dpcd)) < 0)
3417 (int)sizeof(intel_dp->pcon_dsc_dpcd), intel_dp->pcon_dsc_dpcd);
3452 static int intel_dp_hdmi_sink_max_frl(struct intel_dp *intel_dp)
3454 struct intel_connector *intel_connector = intel_dp->attached_connector;
3475 intel_dp_pcon_is_frl_trained(struct intel_dp *intel_dp,
3478 if (drm_dp_pcon_hdmi_link_active(&intel_dp->aux) &&
3479 drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, frl_trained_mask) == DP_PCON_HDMI_MODE_FRL &&
3486 static int intel_dp_pcon_start_frl_training(struct intel_dp *intel_dp)
3491 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3496 max_pcon_frl_bw = intel_dp->dfp.pcon_max_frl_bw;
3499 max_edid_frl_bw = intel_dp_hdmi_sink_max_frl(intel_dp);
3510 if (intel_dp_pcon_is_frl_trained(intel_dp, max_frl_bw_mask, &frl_trained_mask))
3513 ret = drm_dp_pcon_frl_prepare(&intel_dp->aux, false);
3517 wait_for(is_active = drm_dp_pcon_is_frl_ready(&intel_dp->aux) == true, TIMEOUT_FRL_READY_MS);
3522 ret = drm_dp_pcon_frl_configure_1(&intel_dp->aux, max_frl_bw,
3526 ret = drm_dp_pcon_frl_configure_2(&intel_dp->aux, max_frl_bw_mask,
3530 ret = drm_dp_pcon_frl_enable(&intel_dp->aux);
3538 intel_dp_pcon_is_frl_trained(intel_dp, max_frl_bw_mask, &frl_trained_mask),
3546 intel_dp->frl.trained_rate_gbps = intel_dp_pcon_get_frl_mask(frl_trained_mask);
3547 intel_dp->frl.is_trained = true;
3548 drm_dbg(&i915->drm, "FRL trained with : %d Gbps\n", intel_dp->frl.trained_rate_gbps);
3553 static bool intel_dp_is_hdmi_2_1_sink(struct intel_dp *intel_dp)
3555 if (drm_dp_is_branch(intel_dp->dpcd) &&
3556 intel_dp_has_hdmi_sink(intel_dp) &&
3557 intel_dp_hdmi_sink_max_frl(intel_dp) > 0)
3564 int intel_dp_pcon_set_tmds_mode(struct intel_dp *intel_dp)
3572 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf);
3578 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf);
3585 void intel_dp_check_frl_training(struct intel_dp *intel_dp)
3587 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
3594 if (!(intel_dp->downstream_ports[2] & DP_PCON_SOURCE_CTL_MODE) ||
3595 !intel_dp_is_hdmi_2_1_sink(intel_dp) ||
3596 intel_dp->frl.is_trained)
3599 if (intel_dp_pcon_start_frl_training(intel_dp) < 0) {
3603 ret = intel_dp_pcon_set_tmds_mode(intel_dp);
3604 mode = drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, NULL);
3622 intel_dp_pcon_dsc_enc_slices(struct intel_dp *intel_dp,
3625 struct intel_connector *intel_connector = intel_dp->attached_connector;
3629 int pcon_max_slices = drm_dp_pcon_dsc_max_slices(intel_dp->pcon_dsc_dpcd);
3630 int pcon_max_slice_width = drm_dp_pcon_dsc_max_slice_width(intel_dp->pcon_dsc_dpcd);
3638 intel_dp_pcon_dsc_enc_bpp(struct intel_dp *intel_dp,
3642 struct intel_connector *intel_connector = intel_dp->attached_connector;
3646 int pcon_fractional_bpp = drm_dp_pcon_dsc_bpp_incr(intel_dp->pcon_dsc_dpcd);
3656 intel_dp_pcon_dsc_configure(struct intel_dp *intel_dp,
3665 struct intel_connector *intel_connector = intel_dp->attached_connector;
3666 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3670 if (!intel_dp_is_hdmi_2_1_sink(intel_dp))
3678 if (!drm_dp_pcon_enc_is_dsc_1_2(intel_dp->pcon_dsc_dpcd) ||
3686 num_slices = intel_dp_pcon_dsc_enc_slices(intel_dp, crtc_state);
3693 bits_per_pixel = intel_dp_pcon_dsc_enc_bpp(intel_dp, crtc_state,
3705 ret = drm_dp_pcon_pps_override_param(&intel_dp->aux, pps_param);
3710 void intel_dp_configure_protocol_converter(struct intel_dp *intel_dp,
3713 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3718 if (intel_dp->dpcd[DP_DPCD_REV] < 0x13)
3721 if (!drm_dp_is_branch(intel_dp->dpcd))
3724 tmp = intel_dp_has_hdmi_sink(intel_dp) ? DP_HDMI_DVI_OUTPUT_CONFIG : 0;
3726 if (drm_dp_dpcd_writeb(&intel_dp->aux,
3729 str_enable_disable(intel_dp_has_hdmi_sink(intel_dp)));
3761 if (drm_dp_dpcd_writeb(&intel_dp->aux,
3765 str_enable_disable(intel_dp->dfp.ycbcr_444_to_420));
3769 if (drm_dp_pcon_convert_rgb_to_ycbcr(&intel_dp->aux, tmp) < 0)
3775 bool intel_dp_get_colorimetry_status(struct intel_dp *intel_dp)
3779 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_DPRX_FEATURE_ENUMERATION_LIST,
3841 struct intel_dp *intel_dp = intel_attached_dp(connector);
3843 int n = intel_dp->mso_link_count;
3844 int overlap = intel_dp->mso_pixel_overlap;
3866 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3867 struct intel_connector *connector = intel_dp->attached_connector;
3890 static void intel_edp_mso_init(struct intel_dp *intel_dp)
3892 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3893 struct intel_connector *connector = intel_dp->attached_connector;
3897 if (intel_dp->edp_dpcd[0] < DP_EDP_14)
3900 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_EDP_MSO_LINK_CAPABILITIES, &mso) != 1) {
3907 if (mso % 2 || mso > drm_dp_max_lane_count(intel_dp->dpcd)) {
3914 mso, drm_dp_max_lane_count(intel_dp->dpcd) / mso,
3922 intel_dp->mso_link_count = mso;
3923 intel_dp->mso_pixel_overlap = mso ? info->mso_pixel_overlap : 0;
3927 intel_edp_init_dpcd(struct intel_dp *intel_dp, struct intel_connector *connector)
3930 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
3933 drm_WARN_ON(&dev_priv->drm, intel_dp->dpcd[DP_DPCD_REV] != 0);
3935 if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd) != 0)
3938 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
3939 drm_dp_is_branch(intel_dp->dpcd));
3950 if (drm_dp_dpcd_read(&intel_dp->aux, DP_EDP_DPCD_REV,
3951 intel_dp->edp_dpcd, sizeof(intel_dp->edp_dpcd)) ==
3952 sizeof(intel_dp->edp_dpcd)) {
3954 (int)sizeof(intel_dp->edp_dpcd),
3955 intel_dp->edp_dpcd);
3957 intel_dp->use_max_params = intel_dp->edp_dpcd[0] < DP_EDP_14;
3961 * This has to be called after intel_dp->edp_dpcd is filled, PSR checks
3962 * for SET_POWER_CAPABLE bit in intel_dp->edp_dpcd[1]
3964 intel_psr_init_dpcd(intel_dp);
3967 intel_dp->num_sink_rates = 0;
3970 if (intel_dp->edp_dpcd[0] >= DP_EDP_14) {
3974 drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES,
3989 intel_dp->sink_rates[i] = (val * 200) / 10;
3991 intel_dp->num_sink_rates = i;
3998 if (intel_dp->num_sink_rates)
3999 intel_dp->use_rate_select = true;
4001 intel_dp_set_sink_rates(intel_dp);
4002 intel_dp_set_max_sink_lane_count(intel_dp);
4006 intel_edp_get_dsc_sink_cap(intel_dp->edp_dpcd[0],
4013 intel_edp_init_source_oui(intel_dp, true);
4019 intel_dp_has_sink_count(struct intel_dp *intel_dp)
4021 if (!intel_dp->attached_connector)
4024 return drm_dp_read_sink_count_cap(&intel_dp->attached_connector->base,
4025 intel_dp->dpcd,
4026 &intel_dp->desc);
4029 void intel_dp_update_sink_caps(struct intel_dp *intel_dp)
4031 intel_dp_set_sink_rates(intel_dp);
4032 intel_dp_set_max_sink_lane_count(intel_dp);
4033 intel_dp_set_common_rates(intel_dp);
4037 intel_dp_get_dpcd(struct intel_dp *intel_dp)
4041 if (intel_dp_init_lttpr_and_dprx_caps(intel_dp) < 0)
4048 if (!intel_dp_is_edp(intel_dp)) {
4049 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
4050 drm_dp_is_branch(intel_dp->dpcd));
4052 intel_dp_update_sink_caps(intel_dp);
4055 if (intel_dp_has_sink_count(intel_dp)) {
4056 ret = drm_dp_read_sink_count(&intel_dp->aux);
4062 * a member variable in intel_dp will track any changes
4065 intel_dp->sink_count = ret;
4074 if (!intel_dp->sink_count)
4078 return drm_dp_read_downstream_info(&intel_dp->aux, intel_dp->dpcd,
4079 intel_dp->downstream_ports) == 0;
4093 intel_dp_mst_mode_choose(struct intel_dp *intel_dp,
4096 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4101 if (!intel_dp_mst_source_support(intel_dp))
4105 !(intel_dp->dpcd[DP_MAIN_LINK_CHANNEL_CODING] & DP_CAP_ANSI_128B132B))
4112 intel_dp_mst_detect(struct intel_dp *intel_dp)
4114 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4115 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
4119 sink_mst_mode = drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd);
4121 mst_detect = intel_dp_mst_mode_choose(intel_dp, sink_mst_mode);
4126 str_yes_no(intel_dp_mst_source_support(intel_dp)),
4135 intel_dp_mst_configure(struct intel_dp *intel_dp)
4137 if (!intel_dp_mst_source_support(intel_dp))
4140 intel_dp->is_mst = intel_dp->mst_detect != DRM_DP_SST;
4142 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr, intel_dp->is_mst);
4145 intel_dp->mst_detect = DRM_DP_SST;
4149 intel_dp_mst_disconnect(struct intel_dp *intel_dp)
4151 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4153 if (!intel_dp->is_mst)
4157 intel_dp->is_mst, intel_dp->mst_mgr.mst_state);
4158 intel_dp->is_mst = false;
4159 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr, intel_dp->is_mst);
4163 intel_dp_get_sink_irq_esi(struct intel_dp *intel_dp, u8 *esi)
4165 return drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT_ESI, esi, 4) == 4;
4168 static bool intel_dp_ack_sink_irq_esi(struct intel_dp *intel_dp, u8 esi[4])
4173 if (drm_dp_dpcd_write(&intel_dp->aux, DP_SINK_COUNT_ESI + 1,
4623 static u8 intel_dp_autotest_link_training(struct intel_dp *intel_dp)
4625 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4633 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LANE_COUNT,
4642 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LINK_RATE,
4651 if (!intel_dp_link_params_valid(intel_dp, test_link_rate,
4655 intel_dp->compliance.test_lane_count = test_lane_count;
4656 intel_dp->compliance.test_link_rate = test_link_rate;
4661 static u8 intel_dp_autotest_video_pattern(struct intel_dp *intel_dp)
4663 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4670 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_PATTERN,
4679 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_H_WIDTH_HI,
4686 status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_V_HEIGHT_HI,
4693 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_MISC0,
4705 intel_dp->compliance.test_data.bpc = 6;
4708 intel_dp->compliance.test_data.bpc = 8;
4714 intel_dp->compliance.test_data.video_pattern = test_pattern;
4715 intel_dp->compliance.test_data.hdisplay = be16_to_cpu(h_width);
4716 intel_dp->compliance.test_data.vdisplay = be16_to_cpu(v_height);
4718 intel_dp->compliance.test_active = true;
4723 static u8 intel_dp_autotest_edid(struct intel_dp *intel_dp)
4725 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4727 struct intel_connector *intel_connector = intel_dp->attached_connector;
4732 intel_dp->aux.i2c_defer_count > 6) {
4740 if (intel_dp->aux.i2c_nack_count > 0 ||
4741 intel_dp->aux.i2c_defer_count > 0)
4744 intel_dp->aux.i2c_nack_count,
4745 intel_dp->aux.i2c_defer_count);
4746 intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_FAILSAFE;
4754 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_EDID_CHECKSUM,
4760 intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_PREFERRED;
4764 intel_dp->compliance.test_active = true;
4769 static void intel_dp_phy_pattern_update(struct intel_dp *intel_dp,
4773 to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
4775 &intel_dp->compliance.test_data.phytest;
4777 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
4852 static void intel_dp_process_phy_request(struct intel_dp *intel_dp,
4855 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4857 &intel_dp->compliance.test_data.phytest;
4860 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX,
4867 intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX,
4870 intel_dp_set_signal_levels(intel_dp, crtc_state, DP_PHY_DPRX);
4872 intel_dp_phy_pattern_update(intel_dp, crtc_state);
4874 drm_dp_dpcd_write(&intel_dp->aux, DP_TRAINING_LANE0_SET,
4875 intel_dp->train_set, crtc_state->lane_count);
4877 drm_dp_set_phy_test_pattern(&intel_dp->aux, data,
4878 intel_dp->dpcd[DP_DPCD_REV]);
4881 static u8 intel_dp_autotest_phy_pattern(struct intel_dp *intel_dp)
4883 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4885 &intel_dp->compliance.test_data.phytest;
4887 if (drm_dp_get_phy_test_pattern(&intel_dp->aux, data)) {
4893 intel_dp->compliance.test_active = true;
4898 static void intel_dp_handle_test_request(struct intel_dp *intel_dp)
4900 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4905 status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_REQUEST, &request);
4915 response = intel_dp_autotest_link_training(intel_dp);
4919 response = intel_dp_autotest_video_pattern(intel_dp);
4923 response = intel_dp_autotest_edid(intel_dp);
4927 response = intel_dp_autotest_phy_pattern(intel_dp);
4936 intel_dp->compliance.test_type = request;
4939 status = drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_RESPONSE, response);
4945 static bool intel_dp_link_ok(struct intel_dp *intel_dp,
4948 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
4950 bool uhbr = intel_dp->link_rate >= 1000000;
4955 intel_dp->lane_count);
4957 ok = drm_dp_channel_eq_ok(link_status, intel_dp->lane_count);
4962 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
4972 intel_dp_mst_hpd_irq(struct intel_dp *intel_dp, u8 *esi, u8 *ack)
4976 drm_dp_mst_hpd_irq_handle_event(&intel_dp->mst_mgr, esi, ack, &handled);
4979 intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
4984 static bool intel_dp_mst_link_status(struct intel_dp *intel_dp)
4986 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
4991 if (drm_dp_dpcd_read(&intel_dp->aux, DP_LANE0_1_STATUS_ESI, link_status,
4999 return intel_dp_link_ok(intel_dp, link_status);
5004 * @intel_dp: Intel DP struct
5017 intel_dp_check_mst_status(struct intel_dp *intel_dp)
5019 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5023 drm_WARN_ON_ONCE(&i915->drm, intel_dp->active_mst_links < 0);
5029 if (!intel_dp_get_sink_irq_esi(intel_dp, esi)) {
5039 if (intel_dp->active_mst_links > 0 && link_ok &&
5041 if (!intel_dp_mst_link_status(intel_dp))
5046 intel_dp_mst_hpd_irq(intel_dp, esi, ack);
5050 &intel_dp->aux))
5058 if (!intel_dp_ack_sink_irq_esi(intel_dp, ack))
5062 drm_dp_mst_hpd_irq_send_new_request(&intel_dp->mst_mgr);
5069 intel_dp_handle_hdmi_link_status_change(struct intel_dp *intel_dp)
5074 is_active = drm_dp_pcon_hdmi_link_active(&intel_dp->aux);
5075 if (intel_dp->frl.is_trained && !is_active) {
5076 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, &buf) < 0)
5080 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf) < 0)
5083 drm_dp_pcon_hdmi_frl_link_error_count(&intel_dp->aux, &intel_dp->attached_connector->base);
5085 intel_dp->frl.is_trained = false;
5088 intel_dp_check_frl_training(intel_dp);
5093 intel_dp_needs_link_retrain(struct intel_dp *intel_dp)
5097 if (!intel_dp->link_trained)
5108 if (intel_psr_enabled(intel_dp))
5111 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX,
5116 * Validate the cached values of intel_dp->link_rate and
5117 * intel_dp->lane_count before attempting to retrain.
5123 if (!intel_dp_link_params_valid(intel_dp, intel_dp->link_rate,
5124 intel_dp->lane_count))
5128 return !intel_dp_link_ok(intel_dp, link_status);
5131 static bool intel_dp_has_connector(struct intel_dp *intel_dp,
5134 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5142 encoder = &dp_to_dig_port(intel_dp)->base;
5148 encoder = &intel_dp->mst_encoders[pipe]->base;
5156 int intel_dp_get_active_pipes(struct intel_dp *intel_dp,
5160 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5174 if (!intel_dp_has_connector(intel_dp, conn_state))
5204 static bool intel_dp_is_connected(struct intel_dp *intel_dp)
5206 struct intel_connector *connector = intel_dp->attached_connector;
5209 intel_dp->is_mst;
5216 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
5221 if (!intel_dp_is_connected(intel_dp))
5229 if (!intel_dp_needs_link_retrain(intel_dp))
5232 ret = intel_dp_get_active_pipes(intel_dp, ctx, &pipe_mask);
5239 if (!intel_dp_needs_link_retrain(intel_dp))
5266 intel_dp_check_frl_training(intel_dp);
5267 intel_dp_pcon_dsc_configure(intel_dp, crtc_state);
5268 intel_dp_start_link_train(intel_dp, crtc_state);
5269 intel_dp_stop_link_train(intel_dp, crtc_state);
5289 static int intel_dp_prep_phy_test(struct intel_dp *intel_dp,
5293 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5307 if (!intel_dp_has_connector(intel_dp, conn_state))
5340 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
5350 ret = intel_dp_prep_phy_test(intel_dp, ctx, &pipe_mask);
5370 intel_dp_process_phy_request(intel_dp, crtc_state);
5401 static void intel_dp_check_device_service_irq(struct intel_dp *intel_dp)
5403 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5406 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
5409 if (drm_dp_dpcd_readb(&intel_dp->aux,
5413 drm_dp_dpcd_writeb(&intel_dp->aux, DP_DEVICE_SERVICE_IRQ_VECTOR, val);
5416 intel_dp_handle_test_request(intel_dp);
5419 intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
5425 static bool intel_dp_check_link_service_irq(struct intel_dp *intel_dp)
5427 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5431 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
5434 if (drm_dp_dpcd_readb(&intel_dp->aux,
5440 &intel_dp->aux))
5443 if (drm_dp_dpcd_writeb(&intel_dp->aux,
5448 intel_dp_handle_hdmi_link_status_change(intel_dp);
5467 intel_dp_short_pulse(struct intel_dp *intel_dp)
5469 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
5470 u8 old_sink_count = intel_dp->sink_count;
5478 memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
5486 ret = intel_dp_get_dpcd(intel_dp);
5488 if ((old_sink_count != intel_dp->sink_count) || !ret) {
5493 intel_dp_check_device_service_irq(intel_dp);
5494 reprobe_needed = intel_dp_check_link_service_irq(intel_dp);
5497 drm_dp_cec_irq(&intel_dp->aux);
5500 if (intel_dp_needs_link_retrain(intel_dp))
5503 intel_psr_short_pulse(intel_dp);
5505 switch (intel_dp->compliance.test_type) {
5529 intel_dp_detect_dpcd(struct intel_dp *intel_dp)
5531 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5532 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5533 u8 *dpcd = intel_dp->dpcd;
5536 if (drm_WARN_ON(&i915->drm, intel_dp_is_edp(intel_dp)))
5541 if (!intel_dp_get_dpcd(intel_dp))
5544 intel_dp->mst_detect = intel_dp_mst_detect(intel_dp);
5551 if (intel_dp_has_sink_count(intel_dp) &&
5552 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) {
5553 return intel_dp->sink_count ?
5557 if (intel_dp->mst_detect == DRM_DP_MST)
5561 if (drm_probe_ddc(&intel_dp->aux.ddc))
5565 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
5566 type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
5571 type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
5584 edp_detect(struct intel_dp *intel_dp)
5653 intel_dp_get_edid(struct intel_dp *intel_dp)
5655 struct intel_connector *connector = intel_dp->attached_connector;
5667 return drm_edid_read_ddc(&connector->base, &intel_dp->aux.ddc);
5671 intel_dp_update_dfp(struct intel_dp *intel_dp,
5674 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5675 struct intel_connector *connector = intel_dp->attached_connector;
5677 intel_dp->dfp.max_bpc =
5678 drm_dp_downstream_max_bpc(intel_dp->dpcd,
5679 intel_dp->downstream_ports, drm_edid);
5681 intel_dp->dfp.max_dotclock =
5682 drm_dp_downstream_max_dotclock(intel_dp->dpcd,
5683 intel_dp->downstream_ports);
5685 intel_dp->dfp.min_tmds_clock =
5686 drm_dp_downstream_min_tmds_clock(intel_dp->dpcd,
5687 intel_dp->downstream_ports,
5689 intel_dp->dfp.max_tmds_clock =
5690 drm_dp_downstream_max_tmds_clock(intel_dp->dpcd,
5691 intel_dp->downstream_ports,
5694 intel_dp->dfp.pcon_max_frl_bw =
5695 drm_dp_get_pcon_max_frl_bw(intel_dp->dpcd,
5696 intel_dp->downstream_ports);
5701 intel_dp->dfp.max_bpc,
5702 intel_dp->dfp.max_dotclock,
5703 intel_dp->dfp.min_tmds_clock,
5704 intel_dp->dfp.max_tmds_clock,
5705 intel_dp->dfp.pcon_max_frl_bw);
5707 intel_dp_get_pcon_dsc_cap(intel_dp);
5711 intel_dp_can_ycbcr420(struct intel_dp *intel_dp)
5713 if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420) &&
5714 (!drm_dp_is_branch(intel_dp->dpcd) || intel_dp->dfp.ycbcr420_passthrough))
5717 if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_RGB) &&
5718 dfp_can_convert_from_rgb(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420))
5721 if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR444) &&
5722 dfp_can_convert_from_ycbcr444(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420))
5729 intel_dp_update_420(struct intel_dp *intel_dp)
5731 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5732 struct intel_connector *connector = intel_dp->attached_connector;
5734 intel_dp->dfp.ycbcr420_passthrough =
5735 drm_dp_downstream_420_passthrough(intel_dp->dpcd,
5736 intel_dp->downstream_ports);
5738 intel_dp->dfp.ycbcr_444_to_420 =
5739 dp_to_dig_port(intel_dp)->lspcon.active ||
5740 drm_dp_downstream_444_to_420_conversion(intel_dp->dpcd,
5741 intel_dp->downstream_ports);
5742 intel_dp->dfp.rgb_to_ycbcr =
5743 drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd,
5744 intel_dp->downstream_ports,
5747 connector->base.ycbcr_420_allowed = intel_dp_can_ycbcr420(intel_dp);
5752 str_yes_no(intel_dp->dfp.rgb_to_ycbcr),
5754 str_yes_no(intel_dp->dfp.ycbcr_444_to_420));
5758 intel_dp_set_edid(struct intel_dp *intel_dp)
5760 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5761 struct intel_connector *connector = intel_dp->attached_connector;
5765 intel_dp_unset_edid(intel_dp);
5766 drm_edid = intel_dp_get_edid(intel_dp);
5777 intel_dp_update_dfp(intel_dp, drm_edid);
5778 intel_dp_update_420(intel_dp);
5780 drm_dp_cec_attach(&intel_dp->aux,
5785 intel_dp_unset_edid(struct intel_dp *intel_dp)
5787 struct intel_connector *connector = intel_dp->attached_connector;
5789 drm_dp_cec_unset_edid(&intel_dp->aux);
5793 intel_dp->dfp.max_bpc = 0;
5794 intel_dp->dfp.max_dotclock = 0;
5795 intel_dp->dfp.min_tmds_clock = 0;
5796 intel_dp->dfp.max_tmds_clock = 0;
5798 intel_dp->dfp.pcon_max_frl_bw = 0;
5800 intel_dp->dfp.ycbcr_444_to_420 = false;
5808 intel_dp_detect_dsc_caps(struct intel_dp *intel_dp, struct intel_connector *connector)
5810 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5816 if (intel_dp_is_edp(intel_dp))
5817 intel_edp_get_dsc_sink_cap(intel_dp->edp_dpcd[0],
5820 intel_dp_get_dsc_sink_cap(intel_dp->dpcd[DP_DPCD_REV],
5832 struct intel_dp *intel_dp = intel_attached_dp(intel_connector);
5833 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5850 if (intel_dp_is_edp(intel_dp))
5851 status = edp_detect(intel_dp);
5853 status = intel_dp_detect_dpcd(intel_dp);
5858 memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
5860 intel_dp->psr.sink_panel_replay_support = false;
5862 intel_dp_mst_disconnect(intel_dp);
5864 intel_dp_tunnel_disconnect(intel_dp);
5869 ret = intel_dp_tunnel_detect(intel_dp, ctx);
5876 if (!intel_dp_is_edp(intel_dp))
5877 intel_psr_init_dpcd(intel_dp);
5879 intel_dp_detect_dsc_caps(intel_dp, intel_connector);
5881 intel_dp_mst_configure(intel_dp);
5887 if (intel_dp->reset_link_params || intel_dp->is_mst) {
5888 intel_dp_reset_max_link_params(intel_dp);
5889 intel_dp->reset_link_params = false;
5892 intel_dp_print_rates(intel_dp);
5894 if (intel_dp->is_mst) {
5908 if (!intel_dp_is_edp(intel_dp)) {
5919 intel_dp->aux.i2c_nack_count = 0;
5920 intel_dp->aux.i2c_defer_count = 0;
5922 intel_dp_set_edid(intel_dp);
5923 if (intel_dp_is_edp(intel_dp) ||
5927 intel_dp_check_device_service_irq(intel_dp);
5930 if (status != connector_status_connected && !intel_dp->is_mst)
5931 intel_dp_unset_edid(intel_dp);
5933 if (!intel_dp_is_edp(intel_dp))
5936 intel_dp->dpcd,
5937 intel_dp->downstream_ports);
5944 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
5945 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5955 intel_dp_unset_edid(intel_dp);
5960 intel_dp_set_edid(intel_dp);
5979 struct intel_dp *intel_dp = intel_attached_dp(intel_connector);
5983 intel_dp->dpcd,
5984 intel_dp->downstream_ports);
5998 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
5999 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
6008 intel_dp->aux.name, connector->kdev->kobj.name);
6010 intel_dp->aux.dev = connector->kdev;
6011 ret = drm_dp_aux_register(&intel_dp->aux);
6013 drm_dp_cec_register_connector(&intel_dp->aux, connector);
6034 struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
6036 drm_dp_cec_unregister_connector(&intel_dp->aux);
6037 drm_dp_aux_unregister(&intel_dp->aux);
6057 struct intel_dp *intel_dp = &dig_port->dp;
6061 intel_dp_tunnel_destroy(intel_dp);
6063 intel_pps_vdd_off_sync(intel_dp);
6069 intel_pps_wait_power_cycle(intel_dp);
6071 intel_dp_aux_fini(intel_dp);
6076 struct intel_dp *intel_dp = enc_to_intel_dp(intel_encoder);
6078 intel_pps_vdd_off_sync(intel_dp);
6080 intel_dp_tunnel_suspend(intel_dp);
6085 struct intel_dp *intel_dp = enc_to_intel_dp(intel_encoder);
6087 intel_pps_wait_power_cycle(intel_dp);
6205 struct intel_dp *intel_dp = enc_to_intel_dp(intel_conn->encoder);
6212 if (intel_dp_mst_source_support(intel_dp)) {
6213 ret = drm_dp_mst_root_conn_atomic_check(conn_state, &intel_dp->mst_mgr);
6222 intel_dp,
6289 struct intel_dp *intel_dp = &dig_port->dp;
6293 (long_hpd || !intel_pps_have_panel_power_or_vdd(intel_dp))) {
6322 intel_dp_read_dprx_caps(intel_dp, dpcd);
6325 intel_dp->reset_link_params = true;
6329 if (intel_dp->is_mst) {
6330 if (!intel_dp_check_mst_status(intel_dp))
6332 } else if (!intel_dp_short_pulse(intel_dp)) {
6387 intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
6390 enum port port = dp_to_dig_port(intel_dp)->base.port;
6392 if (!intel_dp_is_edp(intel_dp))
6405 if (intel_bios_encoder_is_lspcon(dp_to_dig_port(intel_dp)->base.devdata)) {
6412 if (has_gamut_metadata_dip(&dp_to_dig_port(intel_dp)->base))
6420 intel_edp_add_properties(struct intel_dp *intel_dp)
6422 struct intel_connector *connector = intel_dp->attached_connector;
6435 static void intel_edp_backlight_setup(struct intel_dp *intel_dp,
6438 struct drm_i915_private *i915 = dp_to_i915(intel_dp);
6447 pipe = vlv_active_pipe(intel_dp);
6450 pipe = intel_dp->pps.pps_pipe;
6459 static bool intel_edp_init_connector(struct intel_dp *intel_dp,
6462 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
6465 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
6469 if (!intel_dp_is_edp(intel_dp))
6490 if (!intel_pps_init(intel_dp)) {
6512 has_dpcd = intel_edp_init_dpcd(intel_dp, intel_connector);
6551 if (DISPLAY_VER(dev_priv) == 9 && drm_dp_is_branch(intel_dp->dpcd) &&
6552 (intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] & DP_DWN_STRM_PORT_TYPE_MASK) ==
6588 intel_edp_mso_init(intel_dp);
6609 intel_edp_backlight_setup(intel_dp, intel_connector);
6611 intel_edp_add_properties(intel_dp);
6613 intel_pps_init_late(intel_dp);
6618 intel_pps_vdd_off_sync(intel_dp);
6659 struct intel_dp *intel_dp = &dig_port->dp;
6675 intel_dp->reset_link_params = true;
6676 intel_dp->pps.pps_pipe = INVALID_PIPE;
6677 intel_dp->pps.active_pipe = INVALID_PIPE;
6680 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
6681 intel_dp->attached_connector = intel_connector;
6701 intel_dp_set_default_sink_rates(intel_dp);
6702 intel_dp_set_default_max_sink_lane_count(intel_dp);
6705 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
6707 intel_dp_aux_init(intel_dp);
6708 intel_connector->dp.dsc_decompression_aux = &intel_dp->aux;
6716 type, &intel_dp->aux.ddc);
6733 if (!intel_edp_init_connector(intel_dp, intel_connector)) {
6734 intel_dp_aux_fini(intel_dp);
6738 intel_dp_set_source_rates(intel_dp);
6739 intel_dp_set_common_rates(intel_dp);
6740 intel_dp_reset_max_link_params(intel_dp);
6746 intel_dp_add_properties(intel_dp, connector);
6748 if (is_hdcp_supported(dev_priv, port) && !intel_dp_is_edp(intel_dp)) {
6755 intel_dp->colorimetry_support =
6756 intel_dp_get_colorimetry_status(intel_dp);
6758 intel_dp->frl.is_trained = false;
6759 intel_dp->frl.trained_rate_gbps = 0;
6761 intel_psr_init(intel_dp);
6780 struct intel_dp *intel_dp;
6785 intel_dp = enc_to_intel_dp(encoder);
6787 if (!intel_dp_mst_source_support(intel_dp))
6790 if (intel_dp->is_mst)
6791 drm_dp_mst_topology_mgr_suspend(&intel_dp->mst_mgr);
6803 struct intel_dp *intel_dp;
6809 intel_dp = enc_to_intel_dp(encoder);
6811 if (!intel_dp_mst_source_support(intel_dp))
6814 ret = drm_dp_mst_topology_mgr_resume(&intel_dp->mst_mgr,
6817 intel_dp->is_mst = false;
6818 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,