Lines Matching refs:dev_priv

96 static void chv_set_memory_dvfs(struct drm_i915_private *dev_priv, bool enable)
100 vlv_punit_get(dev_priv);
102 val = vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2);
109 vlv_punit_write(dev_priv, PUNIT_REG_DDR_SETUP2, val);
111 if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2) &
113 drm_err(&dev_priv->drm,
116 vlv_punit_put(dev_priv);
119 static void chv_set_memory_pm5(struct drm_i915_private *dev_priv, bool enable)
123 vlv_punit_get(dev_priv);
125 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
130 vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
132 vlv_punit_put(dev_priv);
138 static bool _intel_set_memory_cxsr(struct drm_i915_private *dev_priv, bool enable)
143 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
144 was_enabled = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF_VLV) & FW_CSPWRDWNEN;
145 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF_VLV, enable ? FW_CSPWRDWNEN : 0);
146 intel_uncore_posting_read(&dev_priv->uncore, FW_BLC_SELF_VLV);
147 } else if (IS_G4X(dev_priv) || IS_I965GM(dev_priv)) {
148 was_enabled = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF) & FW_BLC_SELF_EN;
149 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF, enable ? FW_BLC_SELF_EN : 0);
150 intel_uncore_posting_read(&dev_priv->uncore, FW_BLC_SELF);
151 } else if (IS_PINEVIEW(dev_priv)) {
152 val = intel_uncore_read(&dev_priv->uncore, DSPFW3);
158 intel_uncore_write(&dev_priv->uncore, DSPFW3, val);
159 intel_uncore_posting_read(&dev_priv->uncore, DSPFW3);
160 } else if (IS_I945G(dev_priv) || IS_I945GM(dev_priv)) {
161 was_enabled = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF) & FW_BLC_SELF_EN;
164 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF, val);
165 intel_uncore_posting_read(&dev_priv->uncore, FW_BLC_SELF);
166 } else if (IS_I915GM(dev_priv)) {
172 was_enabled = intel_uncore_read(&dev_priv->uncore, INSTPM) & INSTPM_SELF_EN;
175 intel_uncore_write(&dev_priv->uncore, INSTPM, val);
176 intel_uncore_posting_read(&dev_priv->uncore, INSTPM);
181 trace_intel_memory_cxsr(dev_priv, was_enabled, enable);
183 drm_dbg_kms(&dev_priv->drm, "memory self-refresh is %s (was %s)\n",
192 * @dev_priv: i915 device
227 bool intel_set_memory_cxsr(struct drm_i915_private *dev_priv, bool enable)
231 mutex_lock(&dev_priv->display.wm.wm_mutex);
232 ret = _intel_set_memory_cxsr(dev_priv, enable);
233 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
234 dev_priv->display.wm.vlv.cxsr = enable;
235 else if (IS_G4X(dev_priv))
236 dev_priv->display.wm.g4x.cxsr = enable;
237 mutex_unlock(&dev_priv->display.wm.wm_mutex);
264 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
272 dsparb = intel_uncore_read(&dev_priv->uncore, DSPARB);
273 dsparb2 = intel_uncore_read(&dev_priv->uncore, DSPARB2);
278 dsparb = intel_uncore_read(&dev_priv->uncore, DSPARB);
279 dsparb2 = intel_uncore_read(&dev_priv->uncore, DSPARB2);
284 dsparb2 = intel_uncore_read(&dev_priv->uncore, DSPARB2);
285 dsparb3 = intel_uncore_read(&dev_priv->uncore, DSPARB3);
300 static int i9xx_get_fifo_size(struct drm_i915_private *dev_priv,
303 u32 dsparb = intel_uncore_read(&dev_priv->uncore, DSPARB);
310 drm_dbg_kms(&dev_priv->drm, "FIFO size - (0x%08x) %c: %d\n",
316 static int i830_get_fifo_size(struct drm_i915_private *dev_priv,
319 u32 dsparb = intel_uncore_read(&dev_priv->uncore, DSPARB);
327 drm_dbg_kms(&dev_priv->drm, "FIFO size - (0x%08x) %c: %d\n",
333 static int i845_get_fifo_size(struct drm_i915_private *dev_priv,
336 u32 dsparb = intel_uncore_read(&dev_priv->uncore, DSPARB);
342 drm_dbg_kms(&dev_priv->drm, "FIFO size - (0x%08x) %c: %d\n",
616 static struct intel_crtc *single_enabled_crtc(struct drm_i915_private *dev_priv)
620 for_each_intel_crtc(&dev_priv->drm, crtc) {
631 static void pnv_update_wm(struct drm_i915_private *dev_priv)
638 latency = intel_get_cxsr_latency(dev_priv);
640 drm_dbg_kms(&dev_priv->drm,
642 intel_set_memory_cxsr(dev_priv, false);
646 crtc = single_enabled_crtc(dev_priv);
654 wm = intel_calculate_wm(dev_priv, pixel_rate,
658 reg = intel_uncore_read(&dev_priv->uncore, DSPFW1);
661 intel_uncore_write(&dev_priv->uncore, DSPFW1, reg);
662 drm_dbg_kms(&dev_priv->drm, "DSPFW1 register is %x\n", reg);
665 wm = intel_calculate_wm(dev_priv, pixel_rate,
669 intel_uncore_rmw(&dev_priv->uncore, DSPFW3, DSPFW_CURSOR_SR_MASK,
673 wm = intel_calculate_wm(dev_priv, pixel_rate,
677 intel_uncore_rmw(&dev_priv->uncore, DSPFW3, DSPFW_HPLL_SR_MASK, FW_WM(wm, HPLL_SR));
680 wm = intel_calculate_wm(dev_priv, pixel_rate,
684 reg = intel_uncore_read(&dev_priv->uncore, DSPFW3);
687 intel_uncore_write(&dev_priv->uncore, DSPFW3, reg);
688 drm_dbg_kms(&dev_priv->drm, "DSPFW3 register is %x\n", reg);
690 intel_set_memory_cxsr(dev_priv, true);
692 intel_set_memory_cxsr(dev_priv, false);
713 static void g4x_write_wm_values(struct drm_i915_private *dev_priv,
718 for_each_pipe(dev_priv, pipe)
719 trace_g4x_wm(intel_crtc_for_pipe(dev_priv, pipe), wm);
721 intel_uncore_write(&dev_priv->uncore, DSPFW1,
726 intel_uncore_write(&dev_priv->uncore, DSPFW2,
733 intel_uncore_write(&dev_priv->uncore, DSPFW3,
739 intel_uncore_posting_read(&dev_priv->uncore, DSPFW1);
745 static void vlv_write_wm_values(struct drm_i915_private *dev_priv,
750 for_each_pipe(dev_priv, pipe) {
751 trace_vlv_wm(intel_crtc_for_pipe(dev_priv, pipe), wm);
753 intel_uncore_write(&dev_priv->uncore, VLV_DDL(pipe),
765 intel_uncore_write(&dev_priv->uncore, DSPHOWM, 0);
766 intel_uncore_write(&dev_priv->uncore, DSPHOWM1, 0);
767 intel_uncore_write(&dev_priv->uncore, DSPFW4, 0);
768 intel_uncore_write(&dev_priv->uncore, DSPFW5, 0);
769 intel_uncore_write(&dev_priv->uncore, DSPFW6, 0);
771 intel_uncore_write(&dev_priv->uncore, DSPFW1,
776 intel_uncore_write(&dev_priv->uncore, DSPFW2,
780 intel_uncore_write(&dev_priv->uncore, DSPFW3,
783 if (IS_CHERRYVIEW(dev_priv)) {
784 intel_uncore_write(&dev_priv->uncore, DSPFW7_CHV,
787 intel_uncore_write(&dev_priv->uncore, DSPFW8_CHV,
790 intel_uncore_write(&dev_priv->uncore, DSPFW9_CHV,
793 intel_uncore_write(&dev_priv->uncore, DSPHOWM,
805 intel_uncore_write(&dev_priv->uncore, DSPFW7,
808 intel_uncore_write(&dev_priv->uncore, DSPHOWM,
818 intel_uncore_posting_read(&dev_priv->uncore, DSPFW1);
823 static void g4x_setup_wm_latency(struct drm_i915_private *dev_priv)
826 dev_priv->display.wm.pri_latency[G4X_WM_LEVEL_NORMAL] = 5;
827 dev_priv->display.wm.pri_latency[G4X_WM_LEVEL_SR] = 12;
828 dev_priv->display.wm.pri_latency[G4X_WM_LEVEL_HPLL] = 35;
830 dev_priv->display.wm.num_levels = G4X_WM_LEVEL_HPLL + 1;
880 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
883 unsigned int latency = dev_priv->display.wm.pri_latency[level] * 10;
934 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
937 for (; level < dev_priv->display.wm.num_levels; level++) {
950 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
956 for (; level < dev_priv->display.wm.num_levels; level++) {
974 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
986 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
1026 drm_dbg_kms(&dev_priv->drm,
1034 drm_dbg_kms(&dev_priv->drm,
1054 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1056 if (level >= dev_priv->display.wm.num_levels)
1198 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1228 drm_WARN_ON(&dev_priv->drm, intermediate->wm.plane[plane_id] >
1246 drm_WARN_ON(&dev_priv->drm,
1252 drm_WARN_ON(&dev_priv->drm,
1259 drm_WARN_ON(&dev_priv->drm,
1262 drm_WARN_ON(&dev_priv->drm,
1277 static void g4x_merge_wm(struct drm_i915_private *dev_priv,
1287 for_each_intel_crtc(&dev_priv->drm, crtc) {
1309 for_each_intel_crtc(&dev_priv->drm, crtc) {
1321 static void g4x_program_watermarks(struct drm_i915_private *dev_priv)
1323 struct g4x_wm_values *old_wm = &dev_priv->display.wm.g4x;
1326 g4x_merge_wm(dev_priv, &new_wm);
1332 _intel_set_memory_cxsr(dev_priv, false);
1334 g4x_write_wm_values(dev_priv, &new_wm);
1337 _intel_set_memory_cxsr(dev_priv, true);
1345 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1349 mutex_lock(&dev_priv->display.wm.wm_mutex);
1351 g4x_program_watermarks(dev_priv);
1352 mutex_unlock(&dev_priv->display.wm.wm_mutex);
1358 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1365 mutex_lock(&dev_priv->display.wm.wm_mutex);
1367 g4x_program_watermarks(dev_priv);
1368 mutex_unlock(&dev_priv->display.wm.wm_mutex);
1387 static void vlv_setup_wm_latency(struct drm_i915_private *dev_priv)
1390 dev_priv->display.wm.pri_latency[VLV_WM_LEVEL_PM2] = 3;
1392 dev_priv->display.wm.num_levels = VLV_WM_LEVEL_PM2 + 1;
1394 if (IS_CHERRYVIEW(dev_priv)) {
1395 dev_priv->display.wm.pri_latency[VLV_WM_LEVEL_PM5] = 12;
1396 dev_priv->display.wm.pri_latency[VLV_WM_LEVEL_DDR_DVFS] = 33;
1398 dev_priv->display.wm.num_levels = VLV_WM_LEVEL_DDR_DVFS + 1;
1407 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
1412 if (dev_priv->display.wm.pri_latency[level] == 0)
1433 dev_priv->display.wm.pri_latency[level] * 10);
1448 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1517 drm_WARN_ON(&dev_priv->drm, active_planes != 0 && fifo_left != 0);
1521 drm_WARN_ON(&dev_priv->drm, fifo_left != fifo_size);
1532 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1534 for (; level < dev_priv->display.wm.num_levels; level++) {
1560 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1563 for (; level < dev_priv->display.wm.num_levels; level++) {
1577 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1587 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
1604 drm_dbg_kms(&dev_priv->drm,
1636 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1646 wm_state->num_levels = dev_priv->display.wm.num_levels;
1656 const int sr_fifo_size = INTEL_NUM_PIPES(dev_priv) * 512 - 1;
1756 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1757 struct intel_uncore *uncore = &dev_priv->uncore;
1772 drm_WARN_ON(&dev_priv->drm, fifo_state->plane[PLANE_CURSOR] != 63);
1773 drm_WARN_ON(&dev_priv->drm, fifo_size != 511);
1903 static void vlv_merge_wm(struct drm_i915_private *dev_priv,
1909 wm->level = dev_priv->display.wm.num_levels - 1;
1912 for_each_intel_crtc(&dev_priv->drm, crtc) {
1931 for_each_intel_crtc(&dev_priv->drm, crtc) {
1946 static void vlv_program_watermarks(struct drm_i915_private *dev_priv)
1948 struct vlv_wm_values *old_wm = &dev_priv->display.wm.vlv;
1951 vlv_merge_wm(dev_priv, &new_wm);
1957 chv_set_memory_dvfs(dev_priv, false);
1960 chv_set_memory_pm5(dev_priv, false);
1963 _intel_set_memory_cxsr(dev_priv, false);
1965 vlv_write_wm_values(dev_priv, &new_wm);
1968 _intel_set_memory_cxsr(dev_priv, true);
1971 chv_set_memory_pm5(dev_priv, true);
1974 chv_set_memory_dvfs(dev_priv, true);
1982 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1986 mutex_lock(&dev_priv->display.wm.wm_mutex);
1988 vlv_program_watermarks(dev_priv);
1989 mutex_unlock(&dev_priv->display.wm.wm_mutex);
1995 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2002 mutex_lock(&dev_priv->display.wm.wm_mutex);
2004 vlv_program_watermarks(dev_priv);
2005 mutex_unlock(&dev_priv->display.wm.wm_mutex);
2008 static void i965_update_wm(struct drm_i915_private *dev_priv)
2016 crtc = single_enabled_crtc(dev_priv);
2037 drm_dbg_kms(&dev_priv->drm,
2052 drm_dbg_kms(&dev_priv->drm,
2060 intel_set_memory_cxsr(dev_priv, false);
2063 drm_dbg_kms(&dev_priv->drm,
2068 intel_uncore_write(&dev_priv->uncore, DSPFW1, FW_WM(srwm, SR) |
2072 intel_uncore_write(&dev_priv->uncore, DSPFW2, FW_WM(8, CURSORA) |
2075 intel_uncore_write(&dev_priv->uncore, DSPFW3, FW_WM(cursor_sr, CURSOR_SR));
2078 intel_set_memory_cxsr(dev_priv, true);
2097 static void i9xx_update_wm(struct drm_i915_private *dev_priv)
2107 if (IS_I945GM(dev_priv))
2109 else if (DISPLAY_VER(dev_priv) != 2)
2114 if (DISPLAY_VER(dev_priv) == 2)
2115 fifo_size = i830_get_fifo_size(dev_priv, PLANE_A);
2117 fifo_size = i9xx_get_fifo_size(dev_priv, PLANE_A);
2118 crtc = intel_crtc_for_plane(dev_priv, PLANE_A);
2124 if (DISPLAY_VER(dev_priv) == 2)
2129 planea_wm = intel_calculate_wm(dev_priv, crtc->config->pixel_rate,
2138 if (DISPLAY_VER(dev_priv) == 2)
2141 if (DISPLAY_VER(dev_priv) == 2)
2142 fifo_size = i830_get_fifo_size(dev_priv, PLANE_B);
2144 fifo_size = i9xx_get_fifo_size(dev_priv, PLANE_B);
2145 crtc = intel_crtc_for_plane(dev_priv, PLANE_B);
2151 if (DISPLAY_VER(dev_priv) == 2)
2156 planeb_wm = intel_calculate_wm(dev_priv, crtc->config->pixel_rate,
2165 drm_dbg_kms(&dev_priv->drm,
2168 crtc = single_enabled_crtc(dev_priv);
2169 if (IS_I915GM(dev_priv) && crtc) {
2185 intel_set_memory_cxsr(dev_priv, false);
2188 if (HAS_FW_BLC(dev_priv) && crtc) {
2201 if (IS_I915GM(dev_priv) || IS_I945GM(dev_priv))
2209 drm_dbg_kms(&dev_priv->drm,
2215 if (IS_I945G(dev_priv) || IS_I945GM(dev_priv))
2216 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF,
2219 intel_uncore_write(&dev_priv->uncore, FW_BLC_SELF, srwm & 0x3f);
2222 drm_dbg_kms(&dev_priv->drm,
2233 intel_uncore_write(&dev_priv->uncore, FW_BLC, fwater_lo);
2234 intel_uncore_write(&dev_priv->uncore, FW_BLC2, fwater_hi);
2237 intel_set_memory_cxsr(dev_priv, true);
2240 static void i845_update_wm(struct drm_i915_private *dev_priv)
2246 crtc = single_enabled_crtc(dev_priv);
2250 planea_wm = intel_calculate_wm(dev_priv, crtc->config->pixel_rate,
2252 i845_get_fifo_size(dev_priv, PLANE_A),
2254 fwater_lo = intel_uncore_read(&dev_priv->uncore, FW_BLC) & ~0xfff;
2257 drm_dbg_kms(&dev_priv->drm,
2260 intel_uncore_write(&dev_priv->uncore, FW_BLC, fwater_lo);
2415 ilk_display_fifo_size(const struct drm_i915_private *dev_priv)
2417 if (DISPLAY_VER(dev_priv) >= 8)
2419 else if (DISPLAY_VER(dev_priv) >= 7)
2426 ilk_plane_wm_reg_max(const struct drm_i915_private *dev_priv,
2429 if (DISPLAY_VER(dev_priv) >= 8)
2432 else if (DISPLAY_VER(dev_priv) >= 7)
2444 ilk_cursor_wm_reg_max(const struct drm_i915_private *dev_priv, int level)
2446 if (DISPLAY_VER(dev_priv) >= 7)
2452 static unsigned int ilk_fbc_wm_reg_max(const struct drm_i915_private *dev_priv)
2454 if (DISPLAY_VER(dev_priv) >= 8)
2461 static unsigned int ilk_plane_wm_max(const struct drm_i915_private *dev_priv,
2467 unsigned int fifo_size = ilk_display_fifo_size(dev_priv);
2475 fifo_size /= INTEL_NUM_PIPES(dev_priv);
2482 if (DISPLAY_VER(dev_priv) < 7)
2498 return min(fifo_size, ilk_plane_wm_reg_max(dev_priv, level, is_sprite));
2502 static unsigned int ilk_cursor_wm_max(const struct drm_i915_private *dev_priv,
2511 return ilk_cursor_wm_reg_max(dev_priv, level);
2514 static void ilk_compute_wm_maximums(const struct drm_i915_private *dev_priv,
2520 max->pri = ilk_plane_wm_max(dev_priv, level, config, ddb_partitioning, false);
2521 max->spr = ilk_plane_wm_max(dev_priv, level, config, ddb_partitioning, true);
2522 max->cur = ilk_cursor_wm_max(dev_priv, level, config);
2523 max->fbc = ilk_fbc_wm_reg_max(dev_priv);
2526 static void ilk_compute_wm_reg_maximums(const struct drm_i915_private *dev_priv,
2530 max->pri = ilk_plane_wm_reg_max(dev_priv, level, false);
2531 max->spr = ilk_plane_wm_reg_max(dev_priv, level, true);
2532 max->cur = ilk_cursor_wm_reg_max(dev_priv, level);
2533 max->fbc = ilk_fbc_wm_reg_max(dev_priv);
2581 static void ilk_compute_wm_level(const struct drm_i915_private *dev_priv,
2590 u16 pri_latency = dev_priv->display.wm.pri_latency[level];
2591 u16 spr_latency = dev_priv->display.wm.spr_latency[level];
2592 u16 cur_latency = dev_priv->display.wm.cur_latency[level];
2661 static void intel_fixup_spr_wm_latency(struct drm_i915_private *dev_priv,
2665 if (DISPLAY_VER(dev_priv) == 5)
2669 static void intel_fixup_cur_wm_latency(struct drm_i915_private *dev_priv,
2673 if (DISPLAY_VER(dev_priv) == 5)
2677 static bool ilk_increase_wm_latency(struct drm_i915_private *dev_priv,
2686 for (level = 1; level < dev_priv->display.wm.num_levels; level++)
2692 static void snb_wm_latency_quirk(struct drm_i915_private *dev_priv)
2700 changed = ilk_increase_wm_latency(dev_priv, dev_priv->display.wm.pri_latency, 12);
2701 changed |= ilk_increase_wm_latency(dev_priv, dev_priv->display.wm.spr_latency, 12);
2702 changed |= ilk_increase_wm_latency(dev_priv, dev_priv->display.wm.cur_latency, 12);
2707 drm_dbg_kms(&dev_priv->drm,
2709 intel_print_wm_latency(dev_priv, "Primary", dev_priv->display.wm.pri_latency);
2710 intel_print_wm_latency(dev_priv, "Sprite", dev_priv->display.wm.spr_latency);
2711 intel_print_wm_latency(dev_priv, "Cursor", dev_priv->display.wm.cur_latency);
2714 static void snb_wm_lp3_irq_quirk(struct drm_i915_private *dev_priv)
2727 if (dev_priv->display.wm.pri_latency[3] == 0 &&
2728 dev_priv->display.wm.spr_latency[3] == 0 &&
2729 dev_priv->display.wm.cur_latency[3] == 0)
2732 dev_priv->display.wm.pri_latency[3] = 0;
2733 dev_priv->display.wm.spr_latency[3] = 0;
2734 dev_priv->display.wm.cur_latency[3] = 0;
2736 drm_dbg_kms(&dev_priv->drm,
2738 intel_print_wm_latency(dev_priv, "Primary", dev_priv->display.wm.pri_latency);
2739 intel_print_wm_latency(dev_priv, "Sprite", dev_priv->display.wm.spr_latency);
2740 intel_print_wm_latency(dev_priv, "Cursor", dev_priv->display.wm.cur_latency);
2743 static void ilk_setup_wm_latency(struct drm_i915_private *dev_priv)
2745 if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
2746 hsw_read_wm_latency(dev_priv, dev_priv->display.wm.pri_latency);
2747 else if (DISPLAY_VER(dev_priv) >= 6)
2748 snb_read_wm_latency(dev_priv, dev_priv->display.wm.pri_latency);
2750 ilk_read_wm_latency(dev_priv, dev_priv->display.wm.pri_latency);
2752 memcpy(dev_priv->display.wm.spr_latency, dev_priv->display.wm.pri_latency,
2753 sizeof(dev_priv->display.wm.pri_latency));
2754 memcpy(dev_priv->display.wm.cur_latency, dev_priv->display.wm.pri_latency,
2755 sizeof(dev_priv->display.wm.pri_latency));
2757 intel_fixup_spr_wm_latency(dev_priv, dev_priv->display.wm.spr_latency);
2758 intel_fixup_cur_wm_latency(dev_priv, dev_priv->display.wm.cur_latency);
2760 intel_print_wm_latency(dev_priv, "Primary", dev_priv->display.wm.pri_latency);
2761 intel_print_wm_latency(dev_priv, "Sprite", dev_priv->display.wm.spr_latency);
2762 intel_print_wm_latency(dev_priv, "Cursor", dev_priv->display.wm.cur_latency);
2764 if (DISPLAY_VER(dev_priv) == 6) {
2765 snb_wm_latency_quirk(dev_priv);
2766 snb_wm_lp3_irq_quirk(dev_priv);
2770 static bool ilk_validate_pipe_wm(struct drm_i915_private *dev_priv,
2782 ilk_compute_wm_maximums(dev_priv, 0, &config, INTEL_DDB_PART_1_2, &max);
2785 if (!ilk_validate_wm_level(dev_priv, 0, &max, &pipe_wm->wm[0])) {
2786 drm_dbg_kms(&dev_priv->drm, "LP0 watermark invalid\n");
2797 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2824 usable_level = dev_priv->display.wm.num_levels - 1;
2827 if (DISPLAY_VER(dev_priv) < 7 && pipe_wm->sprites_enabled)
2835 ilk_compute_wm_level(dev_priv, crtc, 0, crtc_state,
2838 if (!ilk_validate_pipe_wm(dev_priv, pipe_wm))
2841 ilk_compute_wm_reg_maximums(dev_priv, 1, &max);
2846 ilk_compute_wm_level(dev_priv, crtc, level, crtc_state,
2854 if (!ilk_validate_wm_level(dev_priv, level, &max, wm)) {
2871 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2895 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
2912 if (!ilk_validate_pipe_wm(dev_priv, a))
2928 static void ilk_merge_wm_level(struct drm_i915_private *dev_priv,
2936 for_each_intel_crtc(&dev_priv->drm, crtc) {
2961 static void ilk_wm_merge(struct drm_i915_private *dev_priv,
2966 int level, num_levels = dev_priv->display.wm.num_levels;
2970 if ((DISPLAY_VER(dev_priv) < 7 || IS_IVYBRIDGE(dev_priv)) &&
2975 merged->fbc_wm_enabled = DISPLAY_VER(dev_priv) >= 6;
2981 ilk_merge_wm_level(dev_priv, level, wm);
2985 else if (!ilk_validate_wm_level(dev_priv, level, max, wm))
3001 if (DISPLAY_VER(dev_priv) == 5 && HAS_FBC(dev_priv) &&
3002 dev_priv->display.params.enable_fbc && !merged->fbc_wm_enabled) {
3018 static unsigned int ilk_wm_lp_latency(struct drm_i915_private *dev_priv,
3021 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
3024 return dev_priv->display.wm.pri_latency[level];
3027 static void ilk_compute_wm_results(struct drm_i915_private *dev_priv,
3051 WM_LP_LATENCY(ilk_wm_lp_latency(dev_priv, level)) |
3058 if (DISPLAY_VER(dev_priv) >= 8)
3069 if (DISPLAY_VER(dev_priv) < 7 && r->spr_val) {
3070 drm_WARN_ON(&dev_priv->drm, wm_lp != 1);
3076 for_each_intel_crtc(&dev_priv->drm, crtc) {
3081 if (drm_WARN_ON(&dev_priv->drm, !r->enable))
3096 ilk_find_best_result(struct drm_i915_private *dev_priv,
3102 for (level = 1; level < dev_priv->display.wm.num_levels; level++) {
3128 static unsigned int ilk_compute_wm_dirty(struct drm_i915_private *dev_priv,
3136 for_each_pipe(dev_priv, pipe) {
3174 static bool _ilk_disable_lp_wm(struct drm_i915_private *dev_priv,
3177 struct ilk_wm_values *previous = &dev_priv->display.wm.hw;
3182 intel_uncore_write(&dev_priv->uncore, WM3_LP_ILK, previous->wm_lp[2]);
3187 intel_uncore_write(&dev_priv->uncore, WM2_LP_ILK, previous->wm_lp[1]);
3192 intel_uncore_write(&dev_priv->uncore, WM1_LP_ILK, previous->wm_lp[0]);
3208 static void ilk_write_wm_values(struct drm_i915_private *dev_priv,
3211 struct ilk_wm_values *previous = &dev_priv->display.wm.hw;
3214 dirty = ilk_compute_wm_dirty(dev_priv, previous, results);
3218 _ilk_disable_lp_wm(dev_priv, dirty);
3221 intel_uncore_write(&dev_priv->uncore, WM0_PIPE_ILK(PIPE_A), results->wm_pipe[0]);
3223 intel_uncore_write(&dev_priv->uncore, WM0_PIPE_ILK(PIPE_B), results->wm_pipe[1]);
3225 intel_uncore_write(&dev_priv->uncore, WM0_PIPE_ILK(PIPE_C), results->wm_pipe[2]);
3228 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
3229 intel_uncore_rmw(&dev_priv->uncore, WM_MISC, WM_MISC_DATA_PARTITION_5_6,
3233 intel_uncore_rmw(&dev_priv->uncore, DISP_ARB_CTL2, DISP_DATA_PARTITION_5_6,
3239 intel_uncore_rmw(&dev_priv->uncore, DISP_ARB_CTL, DISP_FBC_WM_DIS,
3244 intel_uncore_write(&dev_priv->uncore, WM1S_LP_ILK, results->wm_lp_spr[0]);
3246 if (DISPLAY_VER(dev_priv) >= 7) {
3248 intel_uncore_write(&dev_priv->uncore, WM2S_LP_IVB, results->wm_lp_spr[1]);
3250 intel_uncore_write(&dev_priv->uncore, WM3S_LP_IVB, results->wm_lp_spr[2]);
3254 intel_uncore_write(&dev_priv->uncore, WM1_LP_ILK, results->wm_lp[0]);
3256 intel_uncore_write(&dev_priv->uncore, WM2_LP_ILK, results->wm_lp[1]);
3258 intel_uncore_write(&dev_priv->uncore, WM3_LP_ILK, results->wm_lp[2]);
3260 dev_priv->display.wm.hw = *results;
3263 bool ilk_disable_lp_wm(struct drm_i915_private *dev_priv)
3265 return _ilk_disable_lp_wm(dev_priv, WM_DIRTY_LP_ALL);
3268 static void ilk_compute_wm_config(struct drm_i915_private *dev_priv,
3274 for_each_intel_crtc(&dev_priv->drm, crtc) {
3286 static void ilk_program_watermarks(struct drm_i915_private *dev_priv)
3294 ilk_compute_wm_config(dev_priv, &config);
3296 ilk_compute_wm_maximums(dev_priv, 1, &config, INTEL_DDB_PART_1_2, &max);
3297 ilk_wm_merge(dev_priv, &config, &max, &lp_wm_1_2);
3300 if (DISPLAY_VER(dev_priv) >= 7 &&
3302 ilk_compute_wm_maximums(dev_priv, 1, &config, INTEL_DDB_PART_5_6, &max);
3303 ilk_wm_merge(dev_priv, &config, &max, &lp_wm_5_6);
3305 best_lp_wm = ilk_find_best_result(dev_priv, &lp_wm_1_2, &lp_wm_5_6);
3313 ilk_compute_wm_results(dev_priv, best_lp_wm, partitioning, &results);
3315 ilk_write_wm_values(dev_priv, &results);
3321 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3325 mutex_lock(&dev_priv->display.wm.wm_mutex);
3327 ilk_program_watermarks(dev_priv);
3328 mutex_unlock(&dev_priv->display.wm.wm_mutex);
3334 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3341 mutex_lock(&dev_priv->display.wm.wm_mutex);
3343 ilk_program_watermarks(dev_priv);
3344 mutex_unlock(&dev_priv->display.wm.wm_mutex);
3350 struct drm_i915_private *dev_priv = to_i915(dev);
3351 struct ilk_wm_values *hw = &dev_priv->display.wm.hw;
3356 hw->wm_pipe[pipe] = intel_uncore_read(&dev_priv->uncore, WM0_PIPE_ILK(pipe));
3383 for (level = 0; level < dev_priv->display.wm.num_levels; level++)
3432 void ilk_wm_sanitize(struct drm_i915_private *dev_priv)
3443 if (!dev_priv->display.funcs.wm->optimize_watermarks)
3446 if (drm_WARN_ON(&dev_priv->drm, DISPLAY_VER(dev_priv) >= 9))
3449 state = drm_atomic_state_alloc(&dev_priv->drm);
3450 if (drm_WARN_ON(&dev_priv->drm, !state))
3466 if (!HAS_GMCH(dev_priv))
3473 ret = intel_atomic_check(&dev_priv->drm, state);
3503 drm_WARN(&dev_priv->drm, ret,
3517 static void g4x_read_wm_values(struct drm_i915_private *dev_priv,
3522 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW1);
3528 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW2);
3536 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW3);
3543 static void vlv_read_wm_values(struct drm_i915_private *dev_priv,
3549 for_each_pipe(dev_priv, pipe) {
3550 tmp = intel_uncore_read(&dev_priv->uncore, VLV_DDL(pipe));
3562 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW1);
3568 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW2);
3573 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW3);
3576 if (IS_CHERRYVIEW(dev_priv)) {
3577 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW7_CHV);
3581 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW8_CHV);
3585 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW9_CHV);
3589 tmp = intel_uncore_read(&dev_priv->uncore, DSPHOWM);
3601 tmp = intel_uncore_read(&dev_priv->uncore, DSPFW7);
3605 tmp = intel_uncore_read(&dev_priv->uncore, DSPHOWM);
3619 static void g4x_wm_get_hw_state(struct drm_i915_private *dev_priv)
3621 struct g4x_wm_values *wm = &dev_priv->display.wm.g4x;
3624 g4x_read_wm_values(dev_priv, wm);
3626 wm->cxsr = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF) & FW_BLC_SELF_EN;
3628 for_each_intel_crtc(&dev_priv->drm, crtc) {
3693 drm_dbg_kms(&dev_priv->drm,
3701 drm_dbg_kms(&dev_priv->drm,
3704 drm_dbg_kms(&dev_priv->drm,
3707 drm_dbg_kms(&dev_priv->drm, "Initial SR=%s HPLL=%s FBC=%s\n",
3712 static void g4x_wm_sanitize(struct drm_i915_private *dev_priv)
3717 mutex_lock(&dev_priv->display.wm.wm_mutex);
3719 for_each_intel_plane(&dev_priv->drm, plane) {
3721 intel_crtc_for_pipe(dev_priv, plane->pipe);
3732 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
3743 for_each_intel_crtc(&dev_priv->drm, crtc) {
3749 drm_WARN_ON(&dev_priv->drm, ret);
3756 g4x_program_watermarks(dev_priv);
3758 mutex_unlock(&dev_priv->display.wm.wm_mutex);
3767 static void vlv_wm_get_hw_state(struct drm_i915_private *dev_priv)
3769 struct vlv_wm_values *wm = &dev_priv->display.wm.vlv;
3773 vlv_read_wm_values(dev_priv, wm);
3775 wm->cxsr = intel_uncore_read(&dev_priv->uncore, FW_BLC_SELF_VLV) & FW_CSPWRDWNEN;
3778 if (IS_CHERRYVIEW(dev_priv)) {
3779 vlv_punit_get(dev_priv);
3781 val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
3794 val = vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2);
3796 vlv_punit_write(dev_priv, PUNIT_REG_DDR_SETUP2, val);
3798 if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2) &
3800 drm_dbg_kms(&dev_priv->drm,
3803 dev_priv->display.wm.num_levels = VLV_WM_LEVEL_PM5 + 1;
3805 val = vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2);
3810 vlv_punit_put(dev_priv);
3813 for_each_intel_crtc(&dev_priv->drm, crtc) {
3853 drm_dbg_kms(&dev_priv->drm,
3862 drm_dbg_kms(&dev_priv->drm,
3867 static void vlv_wm_sanitize(struct drm_i915_private *dev_priv)
3872 mutex_lock(&dev_priv->display.wm.wm_mutex);
3874 for_each_intel_plane(&dev_priv->drm, plane) {
3876 intel_crtc_for_pipe(dev_priv, plane->pipe);
3887 for (level = 0; level < dev_priv->display.wm.num_levels; level++) {
3895 for_each_intel_crtc(&dev_priv->drm, crtc) {
3901 drm_WARN_ON(&dev_priv->drm, ret);
3908 vlv_program_watermarks(dev_priv);
3910 mutex_unlock(&dev_priv->display.wm.wm_mutex);
3923 static void ilk_init_lp_watermarks(struct drm_i915_private *dev_priv)
3925 intel_uncore_rmw(&dev_priv->uncore, WM3_LP_ILK, WM_LP_ENABLE, 0);
3926 intel_uncore_rmw(&dev_priv->uncore, WM2_LP_ILK, WM_LP_ENABLE, 0);
3927 intel_uncore_rmw(&dev_priv->uncore, WM1_LP_ILK, WM_LP_ENABLE, 0);
3935 static void ilk_wm_get_hw_state(struct drm_i915_private *dev_priv)
3937 struct ilk_wm_values *hw = &dev_priv->display.wm.hw;
3940 ilk_init_lp_watermarks(dev_priv);
3942 for_each_intel_crtc(&dev_priv->drm, crtc)
3945 hw->wm_lp[0] = intel_uncore_read(&dev_priv->uncore, WM1_LP_ILK);
3946 hw->wm_lp[1] = intel_uncore_read(&dev_priv->uncore, WM2_LP_ILK);
3947 hw->wm_lp[2] = intel_uncore_read(&dev_priv->uncore, WM3_LP_ILK);
3949 hw->wm_lp_spr[0] = intel_uncore_read(&dev_priv->uncore, WM1S_LP_ILK);
3950 if (DISPLAY_VER(dev_priv) >= 7) {
3951 hw->wm_lp_spr[1] = intel_uncore_read(&dev_priv->uncore, WM2S_LP_IVB);
3952 hw->wm_lp_spr[2] = intel_uncore_read(&dev_priv->uncore, WM3S_LP_IVB);
3955 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
3956 hw->partitioning = (intel_uncore_read(&dev_priv->uncore, WM_MISC) &
3959 else if (IS_IVYBRIDGE(dev_priv))
3960 hw->partitioning = (intel_uncore_read(&dev_priv->uncore, DISP_ARB_CTL2) &
3965 !(intel_uncore_read(&dev_priv->uncore, DISP_ARB_CTL) & DISP_FBC_WM_DIS);
4012 void i9xx_wm_init(struct drm_i915_private *dev_priv)
4015 if (HAS_PCH_SPLIT(dev_priv)) {
4016 ilk_setup_wm_latency(dev_priv);
4017 dev_priv->display.funcs.wm = &ilk_wm_funcs;
4018 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
4019 vlv_setup_wm_latency(dev_priv);
4020 dev_priv->display.funcs.wm = &vlv_wm_funcs;
4021 } else if (IS_G4X(dev_priv)) {
4022 g4x_setup_wm_latency(dev_priv);
4023 dev_priv->display.funcs.wm = &g4x_wm_funcs;
4024 } else if (IS_PINEVIEW(dev_priv)) {
4025 if (!intel_get_cxsr_latency(dev_priv)) {
4026 drm_info(&dev_priv->drm,
4030 (dev_priv->is_ddr3 == 1) ? "3" : "2",
4031 dev_priv->fsb_freq, dev_priv->mem_freq);
4033 intel_set_memory_cxsr(dev_priv, false);
4034 dev_priv->display.funcs.wm = &nop_funcs;
4036 dev_priv->display.funcs.wm = &pnv_wm_funcs;
4038 } else if (DISPLAY_VER(dev_priv) == 4) {
4039 dev_priv->display.funcs.wm = &i965_wm_funcs;
4040 } else if (DISPLAY_VER(dev_priv) == 3) {
4041 dev_priv->display.funcs.wm = &i9xx_wm_funcs;
4042 } else if (DISPLAY_VER(dev_priv) == 2) {
4043 if (INTEL_NUM_PIPES(dev_priv) == 1)
4044 dev_priv->display.funcs.wm = &i845_wm_funcs;
4046 dev_priv->display.funcs.wm = &i9xx_wm_funcs;
4048 drm_err(&dev_priv->drm,
4050 dev_priv->display.funcs.wm = &nop_funcs;