Lines Matching refs:dev_priv

120 void intel_cdclk_get_cdclk(struct drm_i915_private *dev_priv,
123 dev_priv->display.funcs.cdclk->get_cdclk(dev_priv, cdclk_config);
126 static void intel_cdclk_set_cdclk(struct drm_i915_private *dev_priv,
130 dev_priv->display.funcs.cdclk->set_cdclk(dev_priv, cdclk_config, pipe);
133 static int intel_cdclk_modeset_calc_cdclk(struct drm_i915_private *dev_priv,
136 return dev_priv->display.funcs.cdclk->modeset_calc_cdclk(cdclk_config);
139 static u8 intel_cdclk_calc_voltage_level(struct drm_i915_private *dev_priv,
142 return dev_priv->display.funcs.cdclk->calc_voltage_level(cdclk);
145 static void fixed_133mhz_get_cdclk(struct drm_i915_private *dev_priv,
151 static void fixed_200mhz_get_cdclk(struct drm_i915_private *dev_priv,
157 static void fixed_266mhz_get_cdclk(struct drm_i915_private *dev_priv,
163 static void fixed_333mhz_get_cdclk(struct drm_i915_private *dev_priv,
169 static void fixed_400mhz_get_cdclk(struct drm_i915_private *dev_priv,
175 static void fixed_450mhz_get_cdclk(struct drm_i915_private *dev_priv,
181 static void i85x_get_cdclk(struct drm_i915_private *dev_priv,
184 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
223 static void i915gm_get_cdclk(struct drm_i915_private *dev_priv,
226 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
247 static void i945gm_get_cdclk(struct drm_i915_private *dev_priv,
250 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
271 static unsigned int intel_hpll_vco(struct drm_i915_private *dev_priv)
315 if (IS_GM45(dev_priv))
317 else if (IS_G45(dev_priv))
319 else if (IS_I965GM(dev_priv))
321 else if (IS_PINEVIEW(dev_priv))
323 else if (IS_G33(dev_priv))
328 tmp = intel_de_read(dev_priv,
329 IS_PINEVIEW(dev_priv) || IS_MOBILE(dev_priv) ? HPLLVCO_MOBILE : HPLLVCO);
333 drm_err(&dev_priv->drm, "Bad HPLL VCO (HPLLVCO=0x%02x)\n",
336 drm_dbg_kms(&dev_priv->drm, "HPLL VCO %u kHz\n", vco);
341 static void g33_get_cdclk(struct drm_i915_private *dev_priv,
344 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
353 cdclk_config->vco = intel_hpll_vco(dev_priv);
384 drm_err(&dev_priv->drm,
390 static void pnv_get_cdclk(struct drm_i915_private *dev_priv,
393 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
412 drm_err(&dev_priv->drm,
424 static void i965gm_get_cdclk(struct drm_i915_private *dev_priv,
427 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
435 cdclk_config->vco = intel_hpll_vco(dev_priv);
463 drm_err(&dev_priv->drm,
469 static void gm45_get_cdclk(struct drm_i915_private *dev_priv,
472 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
476 cdclk_config->vco = intel_hpll_vco(dev_priv);
492 drm_err(&dev_priv->drm,
500 static void hsw_get_cdclk(struct drm_i915_private *dev_priv,
503 u32 lcpll = intel_de_read(dev_priv, LCPLL_CTL);
508 else if (intel_de_read(dev_priv, FUSE_STRAP) & HSW_CDCLK_LIMIT)
512 else if (IS_HASWELL_ULT(dev_priv))
518 static int vlv_calc_cdclk(struct drm_i915_private *dev_priv, int min_cdclk)
520 int freq_320 = (dev_priv->hpll_freq << 1) % 320000 != 0 ?
528 if (IS_VALLEYVIEW(dev_priv) && min_cdclk > freq_320)
538 static u8 vlv_calc_voltage_level(struct drm_i915_private *dev_priv, int cdclk)
540 if (IS_VALLEYVIEW(dev_priv)) {
553 return DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1, cdclk) - 1;
557 static void vlv_get_cdclk(struct drm_i915_private *dev_priv,
562 vlv_iosf_sb_get(dev_priv,
565 cdclk_config->vco = vlv_get_hpll_vco(dev_priv);
566 cdclk_config->cdclk = vlv_get_cck_clock(dev_priv, "cdclk",
570 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
572 vlv_iosf_sb_put(dev_priv,
575 if (IS_VALLEYVIEW(dev_priv))
583 static void vlv_program_pfi_credits(struct drm_i915_private *dev_priv)
587 if (IS_CHERRYVIEW(dev_priv))
592 if (dev_priv->display.cdclk.hw.cdclk >= dev_priv->czclk_freq) {
594 if (IS_CHERRYVIEW(dev_priv))
606 intel_de_write(dev_priv, GCI_CONTROL,
609 intel_de_write(dev_priv, GCI_CONTROL,
616 drm_WARN_ON(&dev_priv->drm,
617 intel_de_read(dev_priv, GCI_CONTROL) & PFI_CREDIT_RESEND);
620 static void vlv_set_cdclk(struct drm_i915_private *dev_priv,
646 wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
648 vlv_iosf_sb_get(dev_priv,
653 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
656 vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
657 if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
660 drm_err(&dev_priv->drm,
667 divider = DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1,
671 val = vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL);
674 vlv_cck_write(dev_priv, CCK_DISPLAY_CLOCK_CONTROL, val);
676 if (wait_for((vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL) &
679 drm_err(&dev_priv->drm,
684 val = vlv_bunit_read(dev_priv, BUNIT_REG_BISOC);
695 vlv_bunit_write(dev_priv, BUNIT_REG_BISOC, val);
697 vlv_iosf_sb_put(dev_priv,
702 intel_update_cdclk(dev_priv);
704 vlv_program_pfi_credits(dev_priv);
706 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
709 static void chv_set_cdclk(struct drm_i915_private *dev_priv,
734 wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
736 vlv_punit_get(dev_priv);
737 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
740 vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
741 if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
744 drm_err(&dev_priv->drm,
748 vlv_punit_put(dev_priv);
750 intel_update_cdclk(dev_priv);
752 vlv_program_pfi_credits(dev_priv);
754 intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
784 static void bdw_get_cdclk(struct drm_i915_private *dev_priv,
787 u32 lcpll = intel_de_read(dev_priv, LCPLL_CTL);
792 else if (intel_de_read(dev_priv, FUSE_STRAP) & HSW_CDCLK_LIMIT)
828 static void bdw_set_cdclk(struct drm_i915_private *dev_priv,
835 if (drm_WARN(&dev_priv->drm,
836 (intel_de_read(dev_priv, LCPLL_CTL) &
844 ret = snb_pcode_write(&dev_priv->uncore, BDW_PCODE_DISPLAY_FREQ_CHANGE_REQ, 0x0);
846 drm_err(&dev_priv->drm,
851 intel_de_rmw(dev_priv, LCPLL_CTL,
858 if (wait_for_us(intel_de_read(dev_priv, LCPLL_CTL) &
860 drm_err(&dev_priv->drm, "Switching to FCLK failed\n");
862 intel_de_rmw(dev_priv, LCPLL_CTL,
865 intel_de_rmw(dev_priv, LCPLL_CTL,
868 if (wait_for_us((intel_de_read(dev_priv, LCPLL_CTL) &
870 drm_err(&dev_priv->drm, "Switching back to LCPLL failed\n");
872 snb_pcode_write(&dev_priv->uncore, HSW_PCODE_DE_WRITE_FREQ_REQ,
875 intel_de_write(dev_priv, CDCLK_FREQ,
878 intel_update_cdclk(dev_priv);
916 static void skl_dpll0_update(struct drm_i915_private *dev_priv,
924 val = intel_de_read(dev_priv, LCPLL1_CTL);
928 if (drm_WARN_ON(&dev_priv->drm, (val & LCPLL_PLL_LOCK) == 0))
931 val = intel_de_read(dev_priv, DPLL_CTRL1);
933 if (drm_WARN_ON(&dev_priv->drm,
957 static void skl_get_cdclk(struct drm_i915_private *dev_priv,
962 skl_dpll0_update(dev_priv, cdclk_config);
969 cdctl = intel_de_read(dev_priv, CDCLK_CTL);
1034 static u32 skl_dpll0_link_rate(struct drm_i915_private *dev_priv, int vco)
1036 drm_WARN_ON(&dev_priv->drm, vco != 8100000 && vco != 8640000);
1053 static void skl_dpll0_enable(struct drm_i915_private *dev_priv, int vco)
1055 intel_de_rmw(dev_priv, DPLL_CTRL1,
1060 skl_dpll0_link_rate(dev_priv, vco));
1061 intel_de_posting_read(dev_priv, DPLL_CTRL1);
1063 intel_de_rmw(dev_priv, LCPLL1_CTL,
1066 if (intel_de_wait_for_set(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 5))
1067 drm_err(&dev_priv->drm, "DPLL0 not locked\n");
1069 dev_priv->display.cdclk.hw.vco = vco;
1072 skl_set_preferred_cdclk_vco(dev_priv, vco);
1075 static void skl_dpll0_disable(struct drm_i915_private *dev_priv)
1077 intel_de_rmw(dev_priv, LCPLL1_CTL,
1080 if (intel_de_wait_for_clear(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 1))
1081 drm_err(&dev_priv->drm, "Couldn't disable DPLL0\n");
1083 dev_priv->display.cdclk.hw.vco = 0;
1086 static u32 skl_cdclk_freq_sel(struct drm_i915_private *dev_priv,
1091 drm_WARN_ON(&dev_priv->drm,
1092 cdclk != dev_priv->display.cdclk.hw.bypass);
1093 drm_WARN_ON(&dev_priv->drm, vco != 0);
1109 static void skl_set_cdclk(struct drm_i915_private *dev_priv,
1126 drm_WARN_ON_ONCE(&dev_priv->drm,
1127 IS_SKYLAKE(dev_priv) && vco == 8640000);
1129 ret = skl_pcode_request(&dev_priv->uncore, SKL_PCODE_CDCLK_CONTROL,
1134 drm_err(&dev_priv->drm,
1139 freq_select = skl_cdclk_freq_sel(dev_priv, cdclk, vco);
1141 if (dev_priv->display.cdclk.hw.vco != 0 &&
1142 dev_priv->display.cdclk.hw.vco != vco)
1143 skl_dpll0_disable(dev_priv);
1145 cdclk_ctl = intel_de_read(dev_priv, CDCLK_CTL);
1147 if (dev_priv->display.cdclk.hw.vco != vco) {
1151 intel_de_write(dev_priv, CDCLK_CTL, cdclk_ctl);
1156 intel_de_write(dev_priv, CDCLK_CTL, cdclk_ctl);
1157 intel_de_posting_read(dev_priv, CDCLK_CTL);
1159 if (dev_priv->display.cdclk.hw.vco != vco)
1160 skl_dpll0_enable(dev_priv, vco);
1164 intel_de_write(dev_priv, CDCLK_CTL, cdclk_ctl);
1167 intel_de_write(dev_priv, CDCLK_CTL, cdclk_ctl);
1171 intel_de_write(dev_priv, CDCLK_CTL, cdclk_ctl);
1172 intel_de_posting_read(dev_priv, CDCLK_CTL);
1175 snb_pcode_write(&dev_priv->uncore, SKL_PCODE_CDCLK_CONTROL,
1178 intel_update_cdclk(dev_priv);
1181 static void skl_sanitize_cdclk(struct drm_i915_private *dev_priv)
1190 if ((intel_de_read(dev_priv, SWF_ILK(0x18)) & 0x00FFFFFF) == 0)
1193 intel_update_cdclk(dev_priv);
1194 intel_cdclk_dump_config(dev_priv, &dev_priv->display.cdclk.hw, "Current CDCLK");
1197 if (dev_priv->display.cdclk.hw.vco == 0 ||
1198 dev_priv->display.cdclk.hw.cdclk == dev_priv->display.cdclk.hw.bypass)
1207 cdctl = intel_de_read(dev_priv, CDCLK_CTL);
1209 skl_cdclk_decimal(dev_priv->display.cdclk.hw.cdclk);
1215 drm_dbg_kms(&dev_priv->drm, "Sanitizing cdclk programmed by pre-os\n");
1218 dev_priv->display.cdclk.hw.cdclk = 0;
1220 dev_priv->display.cdclk.hw.vco = ~0;
1223 static void skl_cdclk_init_hw(struct drm_i915_private *dev_priv)
1227 skl_sanitize_cdclk(dev_priv);
1229 if (dev_priv->display.cdclk.hw.cdclk != 0 &&
1230 dev_priv->display.cdclk.hw.vco != 0) {
1235 if (dev_priv->display.cdclk.skl_preferred_vco_freq == 0)
1236 skl_set_preferred_cdclk_vco(dev_priv,
1237 dev_priv->display.cdclk.hw.vco);
1241 cdclk_config = dev_priv->display.cdclk.hw;
1243 cdclk_config.vco = dev_priv->display.cdclk.skl_preferred_vco_freq;
1249 skl_set_cdclk(dev_priv, &cdclk_config, INVALID_PIPE);
1252 static void skl_cdclk_uninit_hw(struct drm_i915_private *dev_priv)
1254 struct intel_cdclk_config cdclk_config = dev_priv->display.cdclk.hw;
1260 skl_set_cdclk(dev_priv, &cdclk_config, INVALID_PIPE);
1460 static int bxt_calc_cdclk(struct drm_i915_private *dev_priv, int min_cdclk)
1462 const struct intel_cdclk_vals *table = dev_priv->display.cdclk.table;
1466 if (table[i].refclk == dev_priv->display.cdclk.hw.ref &&
1470 drm_WARN(&dev_priv->drm, 1,
1472 min_cdclk, dev_priv->display.cdclk.hw.ref);
1476 static int bxt_calc_cdclk_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
1478 const struct intel_cdclk_vals *table = dev_priv->display.cdclk.table;
1481 if (cdclk == dev_priv->display.cdclk.hw.bypass)
1485 if (table[i].refclk == dev_priv->display.cdclk.hw.ref &&
1487 return dev_priv->display.cdclk.hw.ref * table[i].ratio;
1489 drm_WARN(&dev_priv->drm, 1, "cdclk %d not valid for refclk %u\n",
1490 cdclk, dev_priv->display.cdclk.hw.ref);
1572 static void icl_readout_refclk(struct drm_i915_private *dev_priv,
1575 u32 dssm = intel_de_read(dev_priv, SKL_DSSM) & ICL_DSSM_CDCLK_PLL_REFCLK_MASK;
1593 static void bxt_de_pll_readout(struct drm_i915_private *dev_priv,
1598 if (IS_DG2(dev_priv))
1600 else if (DISPLAY_VER(dev_priv) >= 11)
1601 icl_readout_refclk(dev_priv, cdclk_config);
1605 val = intel_de_read(dev_priv, BXT_DE_PLL_ENABLE);
1620 if (DISPLAY_VER(dev_priv) >= 11)
1623 ratio = intel_de_read(dev_priv, BXT_DE_PLL_CTL) & BXT_DE_PLL_RATIO_MASK;
1628 static void bxt_get_cdclk(struct drm_i915_private *dev_priv,
1635 bxt_de_pll_readout(dev_priv, cdclk_config);
1637 if (DISPLAY_VER(dev_priv) >= 12)
1639 else if (DISPLAY_VER(dev_priv) >= 11)
1649 divider = intel_de_read(dev_priv, CDCLK_CTL) & BXT_CDCLK_CD2X_DIV_SEL_MASK;
1669 if (HAS_CDCLK_SQUASH(dev_priv))
1670 squash_ctl = intel_de_read(dev_priv, CDCLK_SQUASH_CTL);
1686 if (DISPLAY_VER(dev_priv) >= 20)
1687 cdclk_config->joined_mbus = intel_de_read(dev_priv, MBUS_CTL) & MBUS_JOIN;
1693 intel_cdclk_calc_voltage_level(dev_priv, cdclk_config->cdclk);
1696 static void bxt_de_pll_disable(struct drm_i915_private *dev_priv)
1698 intel_de_write(dev_priv, BXT_DE_PLL_ENABLE, 0);
1701 if (intel_de_wait_for_clear(dev_priv,
1703 drm_err(&dev_priv->drm, "timeout waiting for DE PLL unlock\n");
1705 dev_priv->display.cdclk.hw.vco = 0;
1708 static void bxt_de_pll_enable(struct drm_i915_private *dev_priv, int vco)
1710 int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->display.cdclk.hw.ref);
1712 intel_de_rmw(dev_priv, BXT_DE_PLL_CTL,
1715 intel_de_write(dev_priv, BXT_DE_PLL_ENABLE, BXT_DE_PLL_PLL_ENABLE);
1718 if (intel_de_wait_for_set(dev_priv,
1720 drm_err(&dev_priv->drm, "timeout waiting for DE PLL lock\n");
1722 dev_priv->display.cdclk.hw.vco = vco;
1725 static void icl_cdclk_pll_disable(struct drm_i915_private *dev_priv)
1727 intel_de_rmw(dev_priv, BXT_DE_PLL_ENABLE,
1731 if (intel_de_wait_for_clear(dev_priv, BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1732 drm_err(&dev_priv->drm, "timeout waiting for CDCLK PLL unlock\n");
1734 dev_priv->display.cdclk.hw.vco = 0;
1737 static void icl_cdclk_pll_enable(struct drm_i915_private *dev_priv, int vco)
1739 int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->display.cdclk.hw.ref);
1743 intel_de_write(dev_priv, BXT_DE_PLL_ENABLE, val);
1746 intel_de_write(dev_priv, BXT_DE_PLL_ENABLE, val);
1749 if (intel_de_wait_for_set(dev_priv, BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1750 drm_err(&dev_priv->drm, "timeout waiting for CDCLK PLL lock\n");
1752 dev_priv->display.cdclk.hw.vco = vco;
1755 static void adlp_cdclk_pll_crawl(struct drm_i915_private *dev_priv, int vco)
1757 int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->display.cdclk.hw.ref);
1762 intel_de_write(dev_priv, BXT_DE_PLL_ENABLE, val);
1766 intel_de_write(dev_priv, BXT_DE_PLL_ENABLE, val);
1769 if (intel_de_wait_for_set(dev_priv, BXT_DE_PLL_ENABLE,
1771 drm_err(&dev_priv->drm, "timeout waiting for FREQ change request ack\n");
1774 intel_de_write(dev_priv, BXT_DE_PLL_ENABLE, val);
1776 dev_priv->display.cdclk.hw.vco = vco;
1779 static u32 bxt_cdclk_cd2x_pipe(struct drm_i915_private *dev_priv, enum pipe pipe)
1781 if (DISPLAY_VER(dev_priv) >= 12) {
1786 } else if (DISPLAY_VER(dev_priv) >= 11) {
1799 static u32 bxt_cdclk_cd2x_div_sel(struct drm_i915_private *dev_priv,
1805 drm_WARN_ON(&dev_priv->drm,
1806 cdclk != dev_priv->display.cdclk.hw.bypass);
1807 drm_WARN_ON(&dev_priv->drm, vco != 0);
1820 static u16 cdclk_squash_waveform(struct drm_i915_private *dev_priv,
1823 const struct intel_cdclk_vals *table = dev_priv->display.cdclk.table;
1826 if (cdclk == dev_priv->display.cdclk.hw.bypass)
1830 if (table[i].refclk == dev_priv->display.cdclk.hw.ref &&
1834 drm_WARN(&dev_priv->drm, 1, "cdclk %d not valid for refclk %u\n",
1835 cdclk, dev_priv->display.cdclk.hw.ref);
1986 static bool pll_enable_wa_needed(struct drm_i915_private *dev_priv)
1988 return (DISPLAY_VER_FULL(dev_priv) == IP_VER(20, 0) ||
1989 DISPLAY_VER_FULL(dev_priv) == IP_VER(14, 0) ||
1990 IS_DG2(dev_priv)) &&
1991 dev_priv->display.cdclk.hw.vco > 0;
2024 static void _bxt_set_cdclk(struct drm_i915_private *dev_priv,
2031 if (HAS_CDCLK_CRAWL(dev_priv) && dev_priv->display.cdclk.hw.vco > 0 && vco > 0 &&
2032 !cdclk_pll_is_unknown(dev_priv->display.cdclk.hw.vco)) {
2033 if (dev_priv->display.cdclk.hw.vco != vco)
2034 adlp_cdclk_pll_crawl(dev_priv, vco);
2035 } else if (DISPLAY_VER(dev_priv) >= 11) {
2037 if (pll_enable_wa_needed(dev_priv))
2038 dg2_cdclk_squash_program(dev_priv, 0);
2040 icl_cdclk_pll_update(dev_priv, vco);
2042 bxt_cdclk_pll_update(dev_priv, vco);
2044 if (HAS_CDCLK_SQUASH(dev_priv)) {
2045 u16 waveform = cdclk_squash_waveform(dev_priv, cdclk);
2047 dg2_cdclk_squash_program(dev_priv, waveform);
2050 intel_de_write(dev_priv, CDCLK_CTL, bxt_cdclk_ctl(dev_priv, cdclk_config, pipe));
2053 intel_crtc_wait_for_next_vblank(intel_crtc_for_pipe(dev_priv, pipe));
2056 static void bxt_set_cdclk(struct drm_i915_private *dev_priv,
2070 if (DISPLAY_VER(dev_priv) >= 14 || IS_DG2(dev_priv))
2072 else if (DISPLAY_VER(dev_priv) >= 11)
2073 ret = skl_pcode_request(&dev_priv->uncore, SKL_PCODE_CDCLK_CONTROL,
2082 ret = snb_pcode_write_timeout(&dev_priv->uncore,
2087 drm_err(&dev_priv->drm,
2093 if (DISPLAY_VER(dev_priv) >= 20 && cdclk < dev_priv->display.cdclk.hw.cdclk)
2094 xe2lpd_mdclk_cdclk_ratio_program(dev_priv, cdclk_config);
2096 if (cdclk_compute_crawl_and_squash_midpoint(dev_priv, &dev_priv->display.cdclk.hw,
2098 _bxt_set_cdclk(dev_priv, &mid_cdclk_config, pipe);
2099 _bxt_set_cdclk(dev_priv, cdclk_config, pipe);
2101 _bxt_set_cdclk(dev_priv, cdclk_config, pipe);
2104 if (DISPLAY_VER(dev_priv) >= 20 && cdclk > dev_priv->display.cdclk.hw.cdclk)
2105 xe2lpd_mdclk_cdclk_ratio_program(dev_priv, cdclk_config);
2107 if (DISPLAY_VER(dev_priv) >= 14)
2112 else if (DISPLAY_VER(dev_priv) >= 11 && !IS_DG2(dev_priv))
2113 ret = snb_pcode_write(&dev_priv->uncore, SKL_PCODE_CDCLK_CONTROL,
2115 if (DISPLAY_VER(dev_priv) < 11) {
2122 ret = snb_pcode_write_timeout(&dev_priv->uncore,
2128 drm_err(&dev_priv->drm,
2134 intel_update_cdclk(dev_priv);
2136 if (DISPLAY_VER(dev_priv) >= 11)
2141 dev_priv->display.cdclk.hw.voltage_level = cdclk_config->voltage_level;
2144 static void bxt_sanitize_cdclk(struct drm_i915_private *dev_priv)
2149 intel_update_cdclk(dev_priv);
2150 intel_cdclk_dump_config(dev_priv, &dev_priv->display.cdclk.hw, "Current CDCLK");
2152 if (dev_priv->display.cdclk.hw.vco == 0 ||
2153 dev_priv->display.cdclk.hw.cdclk == dev_priv->display.cdclk.hw.bypass)
2157 cdclk = bxt_calc_cdclk(dev_priv, dev_priv->display.cdclk.hw.cdclk);
2158 if (cdclk != dev_priv->display.cdclk.hw.cdclk)
2162 vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
2163 if (vco != dev_priv->display.cdclk.hw.vco)
2171 cdctl = intel_de_read(dev_priv, CDCLK_CTL);
2172 expected = bxt_cdclk_ctl(dev_priv, &dev_priv->display.cdclk.hw, INVALID_PIPE);
2179 cdctl &= ~bxt_cdclk_cd2x_pipe(dev_priv, INVALID_PIPE);
2180 expected &= ~bxt_cdclk_cd2x_pipe(dev_priv, INVALID_PIPE);
2187 drm_dbg_kms(&dev_priv->drm, "Sanitizing cdclk programmed by pre-os\n");
2190 dev_priv->display.cdclk.hw.cdclk = 0;
2193 dev_priv->display.cdclk.hw.vco = ~0;
2196 static void bxt_cdclk_init_hw(struct drm_i915_private *dev_priv)
2200 bxt_sanitize_cdclk(dev_priv);
2202 if (dev_priv->display.cdclk.hw.cdclk != 0 &&
2203 dev_priv->display.cdclk.hw.vco != 0)
2206 cdclk_config = dev_priv->display.cdclk.hw;
2213 cdclk_config.cdclk = bxt_calc_cdclk(dev_priv, 0);
2214 cdclk_config.vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk_config.cdclk);
2216 intel_cdclk_calc_voltage_level(dev_priv, cdclk_config.cdclk);
2218 bxt_set_cdclk(dev_priv, &cdclk_config, INVALID_PIPE);
2221 static void bxt_cdclk_uninit_hw(struct drm_i915_private *dev_priv)
2223 struct intel_cdclk_config cdclk_config = dev_priv->display.cdclk.hw;
2228 intel_cdclk_calc_voltage_level(dev_priv, cdclk_config.cdclk);
2230 bxt_set_cdclk(dev_priv, &cdclk_config, INVALID_PIPE);
2237 * Initialize CDCLK. This consists mainly of initializing dev_priv->display.cdclk.hw and
2287 static bool intel_cdclk_can_crawl(struct drm_i915_private *dev_priv,
2293 if (!HAS_CDCLK_CRAWL(dev_priv))
2309 static bool intel_cdclk_can_squash(struct drm_i915_private *dev_priv,
2319 if (!HAS_CDCLK_SQUASH(dev_priv))
2348 * @dev_priv: i915 device
2356 static bool intel_cdclk_can_cd2x_update(struct drm_i915_private *dev_priv,
2361 if (DISPLAY_VER(dev_priv) < 10 && !IS_BROXTON(dev_priv))
2370 if (HAS_CDCLK_SQUASH(dev_priv))
2436 static void intel_set_cdclk(struct drm_i915_private *dev_priv,
2442 if (!intel_cdclk_changed(&dev_priv->display.cdclk.hw, cdclk_config))
2445 if (drm_WARN_ON_ONCE(&dev_priv->drm, !dev_priv->display.funcs.cdclk->set_cdclk))
2448 intel_cdclk_dump_config(dev_priv, cdclk_config, context);
2450 for_each_intel_encoder_with_psr(&dev_priv->drm, encoder) {
2456 intel_audio_cdclk_change_pre(dev_priv);
2463 mutex_lock(&dev_priv->display.gmbus.mutex);
2464 for_each_intel_dp(&dev_priv->drm, encoder) {
2468 &dev_priv->display.gmbus.mutex);
2471 intel_cdclk_set_cdclk(dev_priv, cdclk_config, pipe);
2473 for_each_intel_dp(&dev_priv->drm, encoder) {
2478 mutex_unlock(&dev_priv->display.gmbus.mutex);
2480 for_each_intel_encoder_with_psr(&dev_priv->drm, encoder) {
2486 intel_audio_cdclk_change_post(dev_priv);
2488 if (drm_WARN(&dev_priv->drm,
2489 intel_cdclk_changed(&dev_priv->display.cdclk.hw, cdclk_config),
2491 intel_cdclk_dump_config(dev_priv, &dev_priv->display.cdclk.hw, "[hw state]");
2492 intel_cdclk_dump_config(dev_priv, cdclk_config, "[sw state]");
2680 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2683 if (DISPLAY_VER(dev_priv) >= 10)
2685 else if (DISPLAY_VER(dev_priv) == 9 ||
2686 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
2688 else if (IS_CHERRYVIEW(dev_priv))
2699 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2703 for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane)
2754 struct drm_i915_private *dev_priv =
2764 if (IS_BROADWELL(dev_priv) && hsw_crtc_state_ips_capable(crtc_state))
2776 if (DISPLAY_VER(dev_priv) == 10) {
2779 } else if (DISPLAY_VER(dev_priv) == 9 || IS_BROADWELL(dev_priv)) {
2789 if (crtc_state->has_audio && DISPLAY_VER(dev_priv) >= 9)
2799 if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
2808 IS_VALLEYVIEW(dev_priv))
2817 IS_GEMINILAKE(dev_priv))
2832 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2858 min_cdclk = intel_bw_min_cdclk(dev_priv, bw_state);
2873 for_each_pipe(dev_priv, pipe)
2884 if (IS_GEMINILAKE(dev_priv) && cdclk_state->active_pipes &&
2888 if (min_cdclk > dev_priv->display.cdclk.max_cdclk_freq) {
2889 drm_dbg_kms(&dev_priv->drm,
2891 min_cdclk, dev_priv->display.cdclk.max_cdclk_freq);
2914 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2940 for_each_pipe(dev_priv, pipe)
2950 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2957 cdclk = vlv_calc_cdclk(dev_priv, min_cdclk);
2961 vlv_calc_voltage_level(dev_priv, cdclk);
2964 cdclk = vlv_calc_cdclk(dev_priv, cdclk_state->force_min_cdclk);
2968 vlv_calc_voltage_level(dev_priv, cdclk);
3006 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3013 vco = dev_priv->display.cdclk.skl_preferred_vco_freq;
3074 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3085 cdclk = bxt_calc_cdclk(dev_priv, min_cdclk);
3086 vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
3092 intel_cdclk_calc_voltage_level(dev_priv, cdclk));
3095 cdclk = bxt_calc_cdclk(dev_priv, cdclk_state->force_min_cdclk);
3096 vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
3101 intel_cdclk_calc_voltage_level(dev_priv, cdclk);
3153 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3156 cdclk_state = intel_atomic_get_global_obj_state(state, &dev_priv->display.cdclk.obj);
3212 int intel_cdclk_init(struct drm_i915_private *dev_priv)
3220 intel_atomic_global_obj_init(dev_priv, &dev_priv->display.cdclk.obj,
3243 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
3258 ret = intel_cdclk_modeset_calc_cdclk(dev_priv, new_cdclk_state);
3262 if (intel_cdclk_need_serialize(dev_priv, old_cdclk_state, new_cdclk_state)) {
3282 intel_cdclk_can_cd2x_update(dev_priv,
3289 crtc = intel_crtc_for_pipe(dev_priv, pipe);
3299 if (intel_cdclk_can_crawl_and_squash(dev_priv,
3302 drm_dbg_kms(&dev_priv->drm,
3304 } else if (intel_cdclk_can_squash(dev_priv,
3307 drm_dbg_kms(&dev_priv->drm,
3309 } else if (intel_cdclk_can_crawl(dev_priv,
3312 drm_dbg_kms(&dev_priv->drm,
3317 drm_dbg_kms(&dev_priv->drm,
3329 drm_dbg_kms(&dev_priv->drm,
3333 if (intel_mdclk_cdclk_ratio(dev_priv, &old_cdclk_state->actual) !=
3334 intel_mdclk_cdclk_ratio(dev_priv, &new_cdclk_state->actual)) {
3335 int ratio = intel_mdclk_cdclk_ratio(dev_priv, &new_cdclk_state->actual);
3342 drm_dbg_kms(&dev_priv->drm,
3346 drm_dbg_kms(&dev_priv->drm,
3354 static int intel_compute_max_dotclk(struct drm_i915_private *dev_priv)
3356 int max_cdclk_freq = dev_priv->display.cdclk.max_cdclk_freq;
3358 if (DISPLAY_VER(dev_priv) >= 10)
3360 else if (DISPLAY_VER(dev_priv) == 9 ||
3361 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
3363 else if (IS_CHERRYVIEW(dev_priv))
3365 else if (DISPLAY_VER(dev_priv) < 4)
3373 * @dev_priv: i915 device
3379 void intel_update_max_cdclk(struct drm_i915_private *dev_priv)
3381 if (IS_JASPERLAKE(dev_priv) || IS_ELKHARTLAKE(dev_priv)) {
3382 if (dev_priv->display.cdclk.hw.ref == 24000)
3383 dev_priv->display.cdclk.max_cdclk_freq = 552000;
3385 dev_priv->display.cdclk.max_cdclk_freq = 556800;
3386 } else if (DISPLAY_VER(dev_priv) >= 11) {
3387 if (dev_priv->display.cdclk.hw.ref == 24000)
3388 dev_priv->display.cdclk.max_cdclk_freq = 648000;
3390 dev_priv->display.cdclk.max_cdclk_freq = 652800;
3391 } else if (IS_GEMINILAKE(dev_priv)) {
3392 dev_priv->display.cdclk.max_cdclk_freq = 316800;
3393 } else if (IS_BROXTON(dev_priv)) {
3394 dev_priv->display.cdclk.max_cdclk_freq = 624000;
3395 } else if (DISPLAY_VER(dev_priv) == 9) {
3396 u32 limit = intel_de_read(dev_priv, SKL_DFSM) & SKL_DFSM_CDCLK_LIMIT_MASK;
3399 vco = dev_priv->display.cdclk.skl_preferred_vco_freq;
3400 drm_WARN_ON(&dev_priv->drm, vco != 8100000 && vco != 8640000);
3416 dev_priv->display.cdclk.max_cdclk_freq = skl_calc_cdclk(max_cdclk, vco);
3417 } else if (IS_BROADWELL(dev_priv)) {
3424 if (intel_de_read(dev_priv, FUSE_STRAP) & HSW_CDCLK_LIMIT)
3425 dev_priv->display.cdclk.max_cdclk_freq = 450000;
3426 else if (IS_BROADWELL_ULX(dev_priv))
3427 dev_priv->display.cdclk.max_cdclk_freq = 450000;
3428 else if (IS_BROADWELL_ULT(dev_priv))
3429 dev_priv->display.cdclk.max_cdclk_freq = 540000;
3431 dev_priv->display.cdclk.max_cdclk_freq = 675000;
3432 } else if (IS_CHERRYVIEW(dev_priv)) {
3433 dev_priv->display.cdclk.max_cdclk_freq = 320000;
3434 } else if (IS_VALLEYVIEW(dev_priv)) {
3435 dev_priv->display.cdclk.max_cdclk_freq = 400000;
3438 dev_priv->display.cdclk.max_cdclk_freq = dev_priv->display.cdclk.hw.cdclk;
3441 dev_priv->display.cdclk.max_dotclk_freq = intel_compute_max_dotclk(dev_priv);
3443 drm_dbg(&dev_priv->drm, "Max CD clock rate: %d kHz\n",
3444 dev_priv->display.cdclk.max_cdclk_freq);
3446 drm_dbg(&dev_priv->drm, "Max dotclock rate: %d kHz\n",
3447 dev_priv->display.cdclk.max_dotclk_freq);
3452 * @dev_priv: i915 device
3456 void intel_update_cdclk(struct drm_i915_private *dev_priv)
3458 intel_cdclk_get_cdclk(dev_priv, &dev_priv->display.cdclk.hw);
3466 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
3467 intel_de_write(dev_priv, GMBUSFREQ_VLV,
3468 DIV_ROUND_UP(dev_priv->display.cdclk.hw.cdclk, 1000));
3471 static int dg1_rawclk(struct drm_i915_private *dev_priv)
3477 intel_de_write(dev_priv, PCH_RAWCLK_FREQ,
3483 static int cnp_rawclk(struct drm_i915_private *dev_priv)
3488 if (intel_de_read(dev_priv, SFUSE_STRAP) & SFUSE_STRAP_RAW_FREQUENCY) {
3504 if (INTEL_PCH_TYPE(dev_priv) >= PCH_ICP)
3508 intel_de_write(dev_priv, PCH_RAWCLK_FREQ, rawclk);
3512 static int pch_rawclk(struct drm_i915_private *dev_priv)
3514 return (intel_de_read(dev_priv, PCH_RAWCLK_FREQ) & RAWCLK_FREQ_MASK) * 1000;
3517 static int vlv_hrawclk(struct drm_i915_private *dev_priv)
3520 return vlv_get_cck_clock_hpll(dev_priv, "hrawclk",
3524 static int i9xx_hrawclk(struct drm_i915_private *dev_priv)
3538 clkcfg = intel_de_read(dev_priv, CLKCFG) & CLKCFG_FSB_MASK;
3540 if (IS_MOBILE(dev_priv)) {
3582 * @dev_priv: i915 device
3587 u32 intel_read_rawclk(struct drm_i915_private *dev_priv)
3591 if (INTEL_PCH_TYPE(dev_priv) >= PCH_MTL)
3598 else if (INTEL_PCH_TYPE(dev_priv) >= PCH_DG1)
3599 freq = dg1_rawclk(dev_priv);
3600 else if (INTEL_PCH_TYPE(dev_priv) >= PCH_CNP)
3601 freq = cnp_rawclk(dev_priv);
3602 else if (HAS_PCH_SPLIT(dev_priv))
3603 freq = pch_rawclk(dev_priv);
3604 else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
3605 freq = vlv_hrawclk(dev_priv);
3606 else if (DISPLAY_VER(dev_priv) >= 3)
3607 freq = i9xx_hrawclk(dev_priv);
3774 * @dev_priv: i915 device
3776 void intel_init_cdclk_hooks(struct drm_i915_private *dev_priv)
3778 if (DISPLAY_VER(dev_priv) >= 20) {
3779 dev_priv->display.funcs.cdclk = &rplu_cdclk_funcs;
3780 dev_priv->display.cdclk.table = xe2lpd_cdclk_table;
3781 } else if (DISPLAY_VER(dev_priv) >= 14) {
3782 dev_priv->display.funcs.cdclk = &rplu_cdclk_funcs;
3783 dev_priv->display.cdclk.table = mtl_cdclk_table;
3784 } else if (IS_DG2(dev_priv)) {
3785 dev_priv->display.funcs.cdclk = &tgl_cdclk_funcs;
3786 dev_priv->display.cdclk.table = dg2_cdclk_table;
3787 } else if (IS_ALDERLAKE_P(dev_priv)) {
3789 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(dev_priv, STEP_A0, STEP_B0)) {
3790 dev_priv->display.cdclk.table = adlp_a_step_cdclk_table;
3791 dev_priv->display.funcs.cdclk = &tgl_cdclk_funcs;
3792 } else if (IS_RAPTORLAKE_U(dev_priv)) {
3793 dev_priv->display.cdclk.table = rplu_cdclk_table;
3794 dev_priv->display.funcs.cdclk = &rplu_cdclk_funcs;
3796 dev_priv->display.cdclk.table = adlp_cdclk_table;
3797 dev_priv->display.funcs.cdclk = &tgl_cdclk_funcs;
3799 } else if (IS_ROCKETLAKE(dev_priv)) {
3800 dev_priv->display.funcs.cdclk = &tgl_cdclk_funcs;
3801 dev_priv->display.cdclk.table = rkl_cdclk_table;
3802 } else if (DISPLAY_VER(dev_priv) >= 12) {
3803 dev_priv->display.funcs.cdclk = &tgl_cdclk_funcs;
3804 dev_priv->display.cdclk.table = icl_cdclk_table;
3805 } else if (IS_JASPERLAKE(dev_priv) || IS_ELKHARTLAKE(dev_priv)) {
3806 dev_priv->display.funcs.cdclk = &ehl_cdclk_funcs;
3807 dev_priv->display.cdclk.table = icl_cdclk_table;
3808 } else if (DISPLAY_VER(dev_priv) >= 11) {
3809 dev_priv->display.funcs.cdclk = &icl_cdclk_funcs;
3810 dev_priv->display.cdclk.table = icl_cdclk_table;
3811 } else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) {
3812 dev_priv->display.funcs.cdclk = &bxt_cdclk_funcs;
3813 if (IS_GEMINILAKE(dev_priv))
3814 dev_priv->display.cdclk.table = glk_cdclk_table;
3816 dev_priv->display.cdclk.table = bxt_cdclk_table;
3817 } else if (DISPLAY_VER(dev_priv) == 9) {
3818 dev_priv->display.funcs.cdclk = &skl_cdclk_funcs;
3819 } else if (IS_BROADWELL(dev_priv)) {
3820 dev_priv->display.funcs.cdclk = &bdw_cdclk_funcs;
3821 } else if (IS_HASWELL(dev_priv)) {
3822 dev_priv->display.funcs.cdclk = &hsw_cdclk_funcs;
3823 } else if (IS_CHERRYVIEW(dev_priv)) {
3824 dev_priv->display.funcs.cdclk = &chv_cdclk_funcs;
3825 } else if (IS_VALLEYVIEW(dev_priv)) {
3826 dev_priv->display.funcs.cdclk = &vlv_cdclk_funcs;
3827 } else if (IS_SANDYBRIDGE(dev_priv) || IS_IVYBRIDGE(dev_priv)) {
3828 dev_priv->display.funcs.cdclk = &fixed_400mhz_cdclk_funcs;
3829 } else if (IS_IRONLAKE(dev_priv)) {
3830 dev_priv->display.funcs.cdclk = &ilk_cdclk_funcs;
3831 } else if (IS_GM45(dev_priv)) {
3832 dev_priv->display.funcs.cdclk = &gm45_cdclk_funcs;
3833 } else if (IS_G45(dev_priv)) {
3834 dev_priv->display.funcs.cdclk = &g33_cdclk_funcs;
3835 } else if (IS_I965GM(dev_priv)) {
3836 dev_priv->display.funcs.cdclk = &i965gm_cdclk_funcs;
3837 } else if (IS_I965G(dev_priv)) {
3838 dev_priv->display.funcs.cdclk = &fixed_400mhz_cdclk_funcs;
3839 } else if (IS_PINEVIEW(dev_priv)) {
3840 dev_priv->display.funcs.cdclk = &pnv_cdclk_funcs;
3841 } else if (IS_G33(dev_priv)) {
3842 dev_priv->display.funcs.cdclk = &g33_cdclk_funcs;
3843 } else if (IS_I945GM(dev_priv)) {
3844 dev_priv->display.funcs.cdclk = &i945gm_cdclk_funcs;
3845 } else if (IS_I945G(dev_priv)) {
3846 dev_priv->display.funcs.cdclk = &fixed_400mhz_cdclk_funcs;
3847 } else if (IS_I915GM(dev_priv)) {
3848 dev_priv->display.funcs.cdclk = &i915gm_cdclk_funcs;
3849 } else if (IS_I915G(dev_priv)) {
3850 dev_priv->display.funcs.cdclk = &i915g_cdclk_funcs;
3851 } else if (IS_I865G(dev_priv)) {
3852 dev_priv->display.funcs.cdclk = &i865g_cdclk_funcs;
3853 } else if (IS_I85X(dev_priv)) {
3854 dev_priv->display.funcs.cdclk = &i85x_cdclk_funcs;
3855 } else if (IS_I845G(dev_priv)) {
3856 dev_priv->display.funcs.cdclk = &i845g_cdclk_funcs;
3857 } else if (IS_I830(dev_priv)) {
3858 dev_priv->display.funcs.cdclk = &i830_cdclk_funcs;
3861 if (drm_WARN(&dev_priv->drm, !dev_priv->display.funcs.cdclk,
3863 dev_priv->display.funcs.cdclk = &i830_cdclk_funcs;