Lines Matching refs:dev_priv

151 bool intel_display_power_well_is_enabled(struct drm_i915_private *dev_priv,
156 power_well = lookup_power_well(dev_priv, power_well_id);
158 return intel_power_well_is_enabled(dev_priv, power_well);
187 static void hsw_power_well_post_enable(struct drm_i915_private *dev_priv,
191 intel_vga_reset_io_mem(dev_priv);
194 gen8_irq_power_well_post_enable(dev_priv, irq_pipe_mask);
197 static void hsw_power_well_pre_disable(struct drm_i915_private *dev_priv,
201 gen8_irq_power_well_pre_disable(dev_priv, irq_pipe_mask);
222 aux_ch_to_digital_port(struct drm_i915_private *dev_priv,
227 for_each_intel_encoder(&dev_priv->drm, encoder) {
259 static void hsw_wait_for_power_well_enable(struct drm_i915_private *dev_priv,
272 if (IS_DG2(dev_priv) && power_well->desc->fixed_enable_delay) {
278 if (intel_de_wait_for_set(dev_priv, regs->driver,
280 drm_dbg_kms(&dev_priv->drm, "%s power well enable timeout\n",
283 drm_WARN_ON(&dev_priv->drm, !timeout_expected);
288 static u32 hsw_power_well_requesters(struct drm_i915_private *dev_priv,
295 ret = intel_de_read(dev_priv, regs->bios) & req_mask ? 1 : 0;
296 ret |= intel_de_read(dev_priv, regs->driver) & req_mask ? 2 : 0;
298 ret |= intel_de_read(dev_priv, regs->kvmr) & req_mask ? 4 : 0;
299 ret |= intel_de_read(dev_priv, regs->debug) & req_mask ? 8 : 0;
304 static void hsw_wait_for_power_well_disable(struct drm_i915_private *dev_priv,
321 wait_for((disabled = !(intel_de_read(dev_priv, regs->driver) &
323 (reqs = hsw_power_well_requesters(dev_priv, regs, pw_idx)), 1);
327 drm_dbg_kms(&dev_priv->drm,
333 static void gen9_wait_for_power_well_fuses(struct drm_i915_private *dev_priv,
337 drm_WARN_ON(&dev_priv->drm,
338 intel_de_wait_for_set(dev_priv, SKL_FUSE_STATUS,
342 static void hsw_power_well_enable(struct drm_i915_private *dev_priv,
351 pg = DISPLAY_VER(dev_priv) >= 11 ? ICL_PW_CTL_IDX_TO_PG(pw_idx) :
355 if (IS_ALDERLAKE_P(dev_priv) && pg == SKL_PG1)
356 intel_de_rmw(dev_priv, GEN8_CHICKEN_DCPR_1, 0, DISABLE_FLR_SRC);
366 gen9_wait_for_power_well_fuses(dev_priv, SKL_PG0);
369 intel_de_rmw(dev_priv, regs->driver, 0, HSW_PWR_WELL_CTL_REQ(pw_idx));
371 hsw_wait_for_power_well_enable(dev_priv, power_well, false);
376 pg = DISPLAY_VER(dev_priv) >= 11 ? ICL_PW_CTL_IDX_TO_PG(pw_idx) :
378 gen9_wait_for_power_well_fuses(dev_priv, pg);
381 hsw_power_well_post_enable(dev_priv,
386 static void hsw_power_well_disable(struct drm_i915_private *dev_priv,
392 hsw_power_well_pre_disable(dev_priv,
395 intel_de_rmw(dev_priv, regs->driver, HSW_PWR_WELL_CTL_REQ(pw_idx), 0);
396 hsw_wait_for_power_well_disable(dev_priv, power_well);
407 icl_combo_phy_aux_power_well_enable(struct drm_i915_private *dev_priv,
413 drm_WARN_ON(&dev_priv->drm, !IS_ICELAKE(dev_priv));
415 intel_de_rmw(dev_priv, regs->driver, 0, HSW_PWR_WELL_CTL_REQ(pw_idx));
421 intel_de_rmw(dev_priv, ICL_PORT_CL_DW12(ICL_AUX_PW_TO_PHY(pw_idx)),
424 hsw_wait_for_power_well_enable(dev_priv, power_well, false);
428 !intel_aux_ch_is_edp(dev_priv, ICL_AUX_PW_TO_CH(pw_idx)))
429 intel_de_rmw(dev_priv, ICL_PORT_TX_DW6_AUX(ICL_AUX_PW_TO_PHY(pw_idx)),
434 icl_combo_phy_aux_power_well_disable(struct drm_i915_private *dev_priv,
440 drm_WARN_ON(&dev_priv->drm, !IS_ICELAKE(dev_priv));
446 intel_de_rmw(dev_priv, ICL_PORT_CL_DW12(ICL_AUX_PW_TO_PHY(pw_idx)),
449 intel_de_rmw(dev_priv, regs->driver, HSW_PWR_WELL_CTL_REQ(pw_idx), 0);
451 hsw_wait_for_power_well_disable(dev_priv, power_well);
456 static void icl_tc_port_assert_ref_held(struct drm_i915_private *dev_priv,
460 if (drm_WARN_ON(&dev_priv->drm, !dig_port))
463 if (DISPLAY_VER(dev_priv) == 11 && intel_tc_cold_requires_aux_pw(dig_port))
466 drm_WARN_ON(&dev_priv->drm, !intel_tc_port_ref_held(dig_port));
471 static void icl_tc_port_assert_ref_held(struct drm_i915_private *dev_priv,
503 icl_tc_phy_aux_power_well_enable(struct drm_i915_private *dev_priv,
507 struct intel_digital_port *dig_port = aux_ch_to_digital_port(dev_priv, aux_ch);
512 icl_tc_port_assert_ref_held(dev_priv, power_well, dig_port);
514 intel_de_rmw(dev_priv, DP_AUX_CH_CTL(aux_ch),
517 intel_de_rmw(dev_priv, regs->driver,
527 if (DISPLAY_VER(dev_priv) == 11 && intel_tc_cold_requires_aux_pw(dig_port))
528 icl_tc_cold_exit(dev_priv);
530 hsw_wait_for_power_well_enable(dev_priv, power_well, timeout_expected);
532 if (DISPLAY_VER(dev_priv) >= 12 && !is_tbt) {
537 if (wait_for(intel_dkl_phy_read(dev_priv, DKL_CMN_UC_DW_27(tc_port)) &
539 drm_warn(&dev_priv->drm,
545 icl_aux_power_well_enable(struct drm_i915_private *dev_priv,
548 enum phy phy = icl_aux_pw_to_phy(dev_priv, power_well);
550 if (intel_phy_is_tc(dev_priv, phy))
551 return icl_tc_phy_aux_power_well_enable(dev_priv, power_well);
552 else if (IS_ICELAKE(dev_priv))
553 return icl_combo_phy_aux_power_well_enable(dev_priv,
556 return hsw_power_well_enable(dev_priv, power_well);
560 icl_aux_power_well_disable(struct drm_i915_private *dev_priv,
563 enum phy phy = icl_aux_pw_to_phy(dev_priv, power_well);
565 if (intel_phy_is_tc(dev_priv, phy))
566 return hsw_power_well_disable(dev_priv, power_well);
567 else if (IS_ICELAKE(dev_priv))
568 return icl_combo_phy_aux_power_well_disable(dev_priv,
571 return hsw_power_well_disable(dev_priv, power_well);
579 static bool hsw_power_well_enabled(struct drm_i915_private *dev_priv,
589 val = intel_de_read(dev_priv, regs->driver);
597 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv) &&
599 val |= intel_de_read(dev_priv, regs->bios);
604 static void assert_can_enable_dc9(struct drm_i915_private *dev_priv)
606 drm_WARN_ONCE(&dev_priv->drm,
607 (intel_de_read(dev_priv, DC_STATE_EN) & DC_STATE_EN_DC9),
609 drm_WARN_ONCE(&dev_priv->drm,
610 intel_de_read(dev_priv, DC_STATE_EN) &
613 drm_WARN_ONCE(&dev_priv->drm,
614 intel_de_read(dev_priv, HSW_PWR_WELL_CTL2) &
617 drm_WARN_ONCE(&dev_priv->drm, intel_irqs_enabled(dev_priv),
629 static void assert_can_disable_dc9(struct drm_i915_private *dev_priv)
631 drm_WARN_ONCE(&dev_priv->drm, intel_irqs_enabled(dev_priv),
633 drm_WARN_ONCE(&dev_priv->drm,
634 intel_de_read(dev_priv, DC_STATE_EN) &
647 static void gen9_write_dc_state(struct drm_i915_private *dev_priv,
654 intel_de_write(dev_priv, DC_STATE_EN, state);
662 v = intel_de_read(dev_priv, DC_STATE_EN);
665 intel_de_write(dev_priv, DC_STATE_EN, state);
675 drm_err(&dev_priv->drm,
681 drm_dbg_kms(&dev_priv->drm,
686 static u32 gen9_dc_mask(struct drm_i915_private *dev_priv)
692 if (DISPLAY_VER(dev_priv) >= 12)
695 else if (DISPLAY_VER(dev_priv) == 11)
697 else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv))
723 * @dev_priv: i915 device instance
744 void gen9_set_dc_state(struct drm_i915_private *dev_priv, u32 state)
746 struct i915_power_domains *power_domains = &dev_priv->display.power.domains;
750 if (!HAS_DISPLAY(dev_priv))
753 if (drm_WARN_ON_ONCE(&dev_priv->drm,
757 val = intel_de_read(dev_priv, DC_STATE_EN);
758 mask = gen9_dc_mask(dev_priv);
759 drm_dbg_kms(&dev_priv->drm, "Setting DC state from %02x to %02x\n",
764 drm_err(&dev_priv->drm, "DC state mismatch (0x%x -> 0x%x)\n",
770 gen9_write_dc_state(dev_priv, val);
775 static void tgl_enable_dc3co(struct drm_i915_private *dev_priv)
777 drm_dbg_kms(&dev_priv->drm, "Enabling DC3CO\n");
778 gen9_set_dc_state(dev_priv, DC_STATE_EN_DC3CO);
781 static void tgl_disable_dc3co(struct drm_i915_private *dev_priv)
783 drm_dbg_kms(&dev_priv->drm, "Disabling DC3CO\n");
784 intel_de_rmw(dev_priv, DC_STATE_EN, DC_STATE_DC3CO_STATUS, 0);
785 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
792 static void assert_can_enable_dc5(struct drm_i915_private *dev_priv)
797 if (DISPLAY_VER(dev_priv) == 12)
802 drm_WARN_ONCE(&dev_priv->drm,
803 intel_display_power_well_is_enabled(dev_priv, high_pg),
806 drm_WARN_ONCE(&dev_priv->drm,
807 (intel_de_read(dev_priv, DC_STATE_EN) &
810 assert_rpm_wakelock_held(&dev_priv->runtime_pm);
812 assert_dmc_loaded(dev_priv);
815 void gen9_enable_dc5(struct drm_i915_private *dev_priv)
817 assert_can_enable_dc5(dev_priv);
819 drm_dbg_kms(&dev_priv->drm, "Enabling DC5\n");
822 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv))
823 intel_de_rmw(dev_priv, GEN8_CHICKEN_DCPR_1,
826 intel_dmc_wl_enable(&dev_priv->display);
828 gen9_set_dc_state(dev_priv, DC_STATE_EN_UPTO_DC5);
831 static void assert_can_enable_dc6(struct drm_i915_private *dev_priv)
833 drm_WARN_ONCE(&dev_priv->drm,
834 (intel_de_read(dev_priv, UTIL_PIN_CTL) &
838 drm_WARN_ONCE(&dev_priv->drm,
839 (intel_de_read(dev_priv, DC_STATE_EN) &
843 assert_dmc_loaded(dev_priv);
846 void skl_enable_dc6(struct drm_i915_private *dev_priv)
848 assert_can_enable_dc6(dev_priv);
850 drm_dbg_kms(&dev_priv->drm, "Enabling DC6\n");
853 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv))
854 intel_de_rmw(dev_priv, GEN8_CHICKEN_DCPR_1,
857 intel_dmc_wl_enable(&dev_priv->display);
859 gen9_set_dc_state(dev_priv, DC_STATE_EN_UPTO_DC6);
862 void bxt_enable_dc9(struct drm_i915_private *dev_priv)
864 assert_can_enable_dc9(dev_priv);
866 drm_dbg_kms(&dev_priv->drm, "Enabling DC9\n");
872 if (!HAS_PCH_SPLIT(dev_priv))
873 intel_pps_reset_all(dev_priv);
874 gen9_set_dc_state(dev_priv, DC_STATE_EN_DC9);
877 void bxt_disable_dc9(struct drm_i915_private *dev_priv)
879 assert_can_disable_dc9(dev_priv);
881 drm_dbg_kms(&dev_priv->drm, "Disabling DC9\n");
883 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
885 intel_pps_unlock_regs_wa(dev_priv);
888 static void hsw_power_well_sync_hw(struct drm_i915_private *dev_priv,
894 u32 bios_req = intel_de_read(dev_priv, regs->bios);
898 u32 drv_req = intel_de_read(dev_priv, regs->driver);
901 intel_de_write(dev_priv, regs->driver, drv_req | mask);
902 intel_de_write(dev_priv, regs->bios, bios_req & ~mask);
906 static void bxt_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
909 bxt_dpio_phy_init(dev_priv, i915_power_well_instance(power_well)->bxt.phy);
912 static void bxt_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
915 bxt_dpio_phy_uninit(dev_priv, i915_power_well_instance(power_well)->bxt.phy);
918 static bool bxt_dpio_cmn_power_well_enabled(struct drm_i915_private *dev_priv,
921 return bxt_dpio_phy_is_enabled(dev_priv, i915_power_well_instance(power_well)->bxt.phy);
924 static void bxt_verify_dpio_phy_power_wells(struct drm_i915_private *dev_priv)
928 power_well = lookup_power_well(dev_priv, BXT_DISP_PW_DPIO_CMN_A);
930 bxt_dpio_phy_verify_state(dev_priv, i915_power_well_instance(power_well)->bxt.phy);
932 power_well = lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
934 bxt_dpio_phy_verify_state(dev_priv, i915_power_well_instance(power_well)->bxt.phy);
936 if (IS_GEMINILAKE(dev_priv)) {
937 power_well = lookup_power_well(dev_priv,
940 bxt_dpio_phy_verify_state(dev_priv,
945 static bool gen9_dc_off_power_well_enabled(struct drm_i915_private *dev_priv,
948 return ((intel_de_read(dev_priv, DC_STATE_EN) & DC_STATE_EN_DC3CO) == 0 &&
949 (intel_de_read(dev_priv, DC_STATE_EN) & DC_STATE_EN_UPTO_DC5_DC6_MASK) == 0);
952 static void gen9_assert_dbuf_enabled(struct drm_i915_private *dev_priv)
954 u8 hw_enabled_dbuf_slices = intel_enabled_dbuf_slices_mask(dev_priv);
955 u8 enabled_dbuf_slices = dev_priv->display.dbuf.enabled_slices;
957 drm_WARN(&dev_priv->drm,
964 void gen9_disable_dc_states(struct drm_i915_private *dev_priv)
966 struct i915_power_domains *power_domains = &dev_priv->display.power.domains;
970 tgl_disable_dc3co(dev_priv);
974 gen9_set_dc_state(dev_priv, DC_STATE_DISABLE);
976 if (!HAS_DISPLAY(dev_priv))
979 intel_dmc_wl_disable(&dev_priv->display);
981 intel_cdclk_get_cdclk(dev_priv, &cdclk_config);
983 drm_WARN_ON(&dev_priv->drm,
984 intel_cdclk_clock_changed(&dev_priv->display.cdclk.hw,
987 gen9_assert_dbuf_enabled(dev_priv);
989 if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv))
990 bxt_verify_dpio_phy_power_wells(dev_priv);
992 if (DISPLAY_VER(dev_priv) >= 11)
998 intel_combo_phy_init(dev_priv);
1001 static void gen9_dc_off_power_well_enable(struct drm_i915_private *dev_priv,
1004 gen9_disable_dc_states(dev_priv);
1007 static void gen9_dc_off_power_well_disable(struct drm_i915_private *dev_priv,
1010 struct i915_power_domains *power_domains = &dev_priv->display.power.domains;
1012 if (!intel_dmc_has_payload(dev_priv))
1017 tgl_enable_dc3co(dev_priv);
1020 skl_enable_dc6(dev_priv);
1023 gen9_enable_dc5(dev_priv);
1028 static void i9xx_power_well_sync_hw_noop(struct drm_i915_private *dev_priv,
1033 static void i9xx_always_on_power_well_noop(struct drm_i915_private *dev_priv,
1038 static bool i9xx_always_on_power_well_enabled(struct drm_i915_private *dev_priv,
1044 static void i830_pipes_power_well_enable(struct drm_i915_private *dev_priv,
1047 if ((intel_de_read(dev_priv, TRANSCONF(PIPE_A)) & TRANSCONF_ENABLE) == 0)
1048 i830_enable_pipe(dev_priv, PIPE_A);
1049 if ((intel_de_read(dev_priv, TRANSCONF(PIPE_B)) & TRANSCONF_ENABLE) == 0)
1050 i830_enable_pipe(dev_priv, PIPE_B);
1053 static void i830_pipes_power_well_disable(struct drm_i915_private *dev_priv,
1056 i830_disable_pipe(dev_priv, PIPE_B);
1057 i830_disable_pipe(dev_priv, PIPE_A);
1060 static bool i830_pipes_power_well_enabled(struct drm_i915_private *dev_priv,
1063 return intel_de_read(dev_priv, TRANSCONF(PIPE_A)) & TRANSCONF_ENABLE &&
1064 intel_de_read(dev_priv, TRANSCONF(PIPE_B)) & TRANSCONF_ENABLE;
1067 static void i830_pipes_power_well_sync_hw(struct drm_i915_private *dev_priv,
1071 i830_pipes_power_well_enable(dev_priv, power_well);
1073 i830_pipes_power_well_disable(dev_priv, power_well);
1076 static void vlv_set_power_well(struct drm_i915_private *dev_priv,
1088 vlv_punit_get(dev_priv);
1091 ((vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_STATUS) & mask) == state)
1096 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL);
1099 vlv_punit_write(dev_priv, PUNIT_REG_PWRGT_CTRL, ctrl);
1102 drm_err(&dev_priv->drm,
1105 vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL));
1110 vlv_punit_put(dev_priv);
1113 static void vlv_power_well_enable(struct drm_i915_private *dev_priv,
1116 vlv_set_power_well(dev_priv, power_well, true);
1119 static void vlv_power_well_disable(struct drm_i915_private *dev_priv,
1122 vlv_set_power_well(dev_priv, power_well, false);
1125 static bool vlv_power_well_enabled(struct drm_i915_private *dev_priv,
1137 vlv_punit_get(dev_priv);
1139 state = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_STATUS) & mask;
1144 drm_WARN_ON(&dev_priv->drm, state != PUNIT_PWRGT_PWR_ON(pw_idx) &&
1153 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL) & mask;
1154 drm_WARN_ON(&dev_priv->drm, ctrl != state);
1156 vlv_punit_put(dev_priv);
1161 static void vlv_init_display_clock_gating(struct drm_i915_private *dev_priv)
1169 intel_de_rmw(dev_priv, DSPCLK_GATE_D(dev_priv),
1175 intel_de_write(dev_priv, MI_ARB_VLV,
1177 intel_de_write(dev_priv, CBR1_VLV, 0);
1179 drm_WARN_ON(&dev_priv->drm, RUNTIME_INFO(dev_priv)->rawclk_freq == 0);
1180 intel_de_write(dev_priv, RAWCLK_FREQ_VLV,
1181 DIV_ROUND_CLOSEST(RUNTIME_INFO(dev_priv)->rawclk_freq,
1185 static void vlv_display_power_well_init(struct drm_i915_private *dev_priv)
1198 for_each_pipe(dev_priv, pipe) {
1199 u32 val = intel_de_read(dev_priv, DPLL(pipe));
1205 intel_de_write(dev_priv, DPLL(pipe), val);
1208 vlv_init_display_clock_gating(dev_priv);
1210 spin_lock_irq(&dev_priv->irq_lock);
1211 valleyview_enable_display_irqs(dev_priv);
1212 spin_unlock_irq(&dev_priv->irq_lock);
1218 if (dev_priv->display.power.domains.initializing)
1221 intel_hpd_init(dev_priv);
1222 intel_hpd_poll_disable(dev_priv);
1225 for_each_intel_encoder(&dev_priv->drm, encoder) {
1230 intel_vga_redisable_power_on(dev_priv);
1232 intel_pps_unlock_regs_wa(dev_priv);
1235 static void vlv_display_power_well_deinit(struct drm_i915_private *dev_priv)
1237 spin_lock_irq(&dev_priv->irq_lock);
1238 valleyview_disable_display_irqs(dev_priv);
1239 spin_unlock_irq(&dev_priv->irq_lock);
1242 intel_synchronize_irq(dev_priv);
1244 intel_pps_reset_all(dev_priv);
1247 if (!dev_priv->drm.dev->power.is_suspended)
1248 intel_hpd_poll_enable(dev_priv);
1251 static void vlv_display_power_well_enable(struct drm_i915_private *dev_priv,
1254 vlv_set_power_well(dev_priv, power_well, true);
1256 vlv_display_power_well_init(dev_priv);
1259 static void vlv_display_power_well_disable(struct drm_i915_private *dev_priv,
1262 vlv_display_power_well_deinit(dev_priv);
1264 vlv_set_power_well(dev_priv, power_well, false);
1267 static void vlv_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
1273 vlv_set_power_well(dev_priv, power_well, true);
1286 intel_de_rmw(dev_priv, DPIO_CTL, 0, DPIO_CMNRST);
1289 static void vlv_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
1294 for_each_pipe(dev_priv, pipe)
1295 assert_pll_disabled(dev_priv, pipe);
1298 intel_de_rmw(dev_priv, DPIO_CTL, DPIO_CMNRST, 0);
1300 vlv_set_power_well(dev_priv, power_well, false);
1305 static void assert_chv_phy_status(struct drm_i915_private *dev_priv)
1308 lookup_power_well(dev_priv, VLV_DISP_PW_DPIO_CMN_BC);
1310 lookup_power_well(dev_priv, CHV_DISP_PW_DPIO_CMN_D);
1311 u32 phy_control = dev_priv->display.power.chv_phy_control;
1322 if (!dev_priv->display.power.chv_phy_assert[DPIO_PHY0])
1330 if (!dev_priv->display.power.chv_phy_assert[DPIO_PHY1])
1335 if (intel_power_well_is_enabled(dev_priv, cmn_bc)) {
1358 (intel_de_read(dev_priv, DPLL(PIPE_B)) & DPLL_VCO_ENABLE) == 0)
1376 if (intel_power_well_is_enabled(dev_priv, cmn_d)) {
1401 if (intel_de_wait(dev_priv, DISPLAY_PHY_STATUS,
1403 drm_err(&dev_priv->drm,
1405 intel_de_read(dev_priv, DISPLAY_PHY_STATUS) & phy_status_mask,
1406 phy_status, dev_priv->display.power.chv_phy_control);
1411 static void chv_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
1418 drm_WARN_ON_ONCE(&dev_priv->drm,
1429 vlv_set_power_well(dev_priv, power_well, true);
1432 if (intel_de_wait_for_set(dev_priv, DISPLAY_PHY_STATUS,
1434 drm_err(&dev_priv->drm, "Display PHY %d is not power up\n",
1437 vlv_dpio_get(dev_priv);
1440 tmp = vlv_dpio_read(dev_priv, phy, CHV_CMN_DW28);
1443 vlv_dpio_write(dev_priv, phy, CHV_CMN_DW28, tmp);
1446 tmp = vlv_dpio_read(dev_priv, phy, CHV_CMN_DW6_CH1);
1448 vlv_dpio_write(dev_priv, phy, CHV_CMN_DW6_CH1, tmp);
1455 tmp = vlv_dpio_read(dev_priv, phy, CHV_CMN_DW30);
1457 vlv_dpio_write(dev_priv, phy, CHV_CMN_DW30, tmp);
1460 vlv_dpio_put(dev_priv);
1462 dev_priv->display.power.chv_phy_control |= PHY_COM_LANE_RESET_DEASSERT(phy);
1463 intel_de_write(dev_priv, DISPLAY_PHY_CONTROL,
1464 dev_priv->display.power.chv_phy_control);
1466 drm_dbg_kms(&dev_priv->drm,
1468 phy, dev_priv->display.power.chv_phy_control);
1470 assert_chv_phy_status(dev_priv);
1473 static void chv_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
1479 drm_WARN_ON_ONCE(&dev_priv->drm,
1485 assert_pll_disabled(dev_priv, PIPE_A);
1486 assert_pll_disabled(dev_priv, PIPE_B);
1489 assert_pll_disabled(dev_priv, PIPE_C);
1492 dev_priv->display.power.chv_phy_control &= ~PHY_COM_LANE_RESET_DEASSERT(phy);
1493 intel_de_write(dev_priv, DISPLAY_PHY_CONTROL,
1494 dev_priv->display.power.chv_phy_control);
1496 vlv_set_power_well(dev_priv, power_well, false);
1498 drm_dbg_kms(&dev_priv->drm,
1500 phy, dev_priv->display.power.chv_phy_control);
1503 dev_priv->display.power.chv_phy_assert[phy] = true;
1505 assert_chv_phy_status(dev_priv);
1508 static void assert_chv_phy_powergate(struct drm_i915_private *dev_priv, enum dpio_phy phy,
1520 if (!dev_priv->display.power.chv_phy_assert[phy])
1528 vlv_dpio_get(dev_priv);
1529 val = vlv_dpio_read(dev_priv, phy, reg);
1530 vlv_dpio_put(dev_priv);
1563 drm_WARN(&dev_priv->drm, actual != expected,
1572 bool chv_phy_powergate_ch(struct drm_i915_private *dev_priv, enum dpio_phy phy,
1575 struct i915_power_domains *power_domains = &dev_priv->display.power.domains;
1580 was_override = dev_priv->display.power.chv_phy_control & PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1586 dev_priv->display.power.chv_phy_control |= PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1588 dev_priv->display.power.chv_phy_control &= ~PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1590 intel_de_write(dev_priv, DISPLAY_PHY_CONTROL,
1591 dev_priv->display.power.chv_phy_control);
1593 drm_dbg_kms(&dev_priv->drm,
1595 phy, ch, dev_priv->display.power.chv_phy_control);
1597 assert_chv_phy_status(dev_priv);
1608 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1609 struct i915_power_domains *power_domains = &dev_priv->display.power.domains;
1615 dev_priv->display.power.chv_phy_control &= ~PHY_CH_POWER_DOWN_OVRD(0xf, phy, ch);
1616 dev_priv->display.power.chv_phy_control |= PHY_CH_POWER_DOWN_OVRD(mask, phy, ch);
1619 dev_priv->display.power.chv_phy_control |= PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1621 dev_priv->display.power.chv_phy_control &= ~PHY_CH_POWER_DOWN_OVRD_EN(phy, ch);
1623 intel_de_write(dev_priv, DISPLAY_PHY_CONTROL,
1624 dev_priv->display.power.chv_phy_control);
1626 drm_dbg_kms(&dev_priv->drm,
1628 phy, ch, mask, dev_priv->display.power.chv_phy_control);
1630 assert_chv_phy_status(dev_priv);
1632 assert_chv_phy_powergate(dev_priv, phy, ch, override, mask);
1637 static bool chv_pipe_power_well_enabled(struct drm_i915_private *dev_priv,
1644 vlv_punit_get(dev_priv);
1646 state = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) & DP_SSS_MASK(pipe);
1651 drm_WARN_ON(&dev_priv->drm, state != DP_SSS_PWR_ON(pipe) &&
1659 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) & DP_SSC_MASK(pipe);
1660 drm_WARN_ON(&dev_priv->drm, ctrl << 16 != state);
1662 vlv_punit_put(dev_priv);
1667 static void chv_set_pipe_power_well(struct drm_i915_private *dev_priv,
1677 vlv_punit_get(dev_priv);
1680 ((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) & DP_SSS_MASK(pipe)) == state)
1685 ctrl = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
1688 vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, ctrl);
1691 drm_err(&dev_priv->drm,
1694 vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM));
1699 vlv_punit_put(dev_priv);
1702 static void chv_pipe_power_well_sync_hw(struct drm_i915_private *dev_priv,
1705 intel_de_write(dev_priv, DISPLAY_PHY_CONTROL,
1706 dev_priv->display.power.chv_phy_control);
1709 static void chv_pipe_power_well_enable(struct drm_i915_private *dev_priv,
1712 chv_set_pipe_power_well(dev_priv, power_well, true);
1714 vlv_display_power_well_init(dev_priv);
1717 static void chv_pipe_power_well_disable(struct drm_i915_private *dev_priv,
1720 vlv_display_power_well_deinit(dev_priv);
1722 chv_set_pipe_power_well(dev_priv, power_well, false);
1792 tgl_tc_cold_off_power_well_is_enabled(struct drm_i915_private *dev_priv,
1802 static void xelpdp_aux_power_well_enable(struct drm_i915_private *dev_priv,
1806 enum phy phy = icl_aux_pw_to_phy(dev_priv, power_well);
1808 if (intel_phy_is_tc(dev_priv, phy))
1809 icl_tc_port_assert_ref_held(dev_priv, power_well,
1810 aux_ch_to_digital_port(dev_priv, aux_ch));
1812 intel_de_rmw(dev_priv, XELPDP_DP_AUX_CH_CTL(dev_priv, aux_ch),
1825 static void xelpdp_aux_power_well_disable(struct drm_i915_private *dev_priv,
1830 intel_de_rmw(dev_priv, XELPDP_DP_AUX_CH_CTL(dev_priv, aux_ch),
1836 static bool xelpdp_aux_power_well_enabled(struct drm_i915_private *dev_priv,
1841 return intel_de_read(dev_priv, XELPDP_DP_AUX_CH_CTL(dev_priv, aux_ch)) &
1845 static void xe2lpd_pica_power_well_enable(struct drm_i915_private *dev_priv,
1848 intel_de_write(dev_priv, XE2LPD_PICA_PW_CTL,
1851 if (intel_de_wait_for_set(dev_priv, XE2LPD_PICA_PW_CTL,
1853 drm_dbg_kms(&dev_priv->drm, "pica power well enable timeout\n");
1855 drm_WARN(&dev_priv->drm, 1, "Power well PICA timeout when enabled");
1859 static void xe2lpd_pica_power_well_disable(struct drm_i915_private *dev_priv,
1862 intel_de_write(dev_priv, XE2LPD_PICA_PW_CTL, 0);
1864 if (intel_de_wait_for_clear(dev_priv, XE2LPD_PICA_PW_CTL,
1866 drm_dbg_kms(&dev_priv->drm, "pica power well disable timeout\n");
1868 drm_WARN(&dev_priv->drm, 1, "Power well PICA timeout when disabled");
1872 static bool xe2lpd_pica_power_well_enabled(struct drm_i915_private *dev_priv,
1875 return intel_de_read(dev_priv, XE2LPD_PICA_PW_CTL) &