Lines Matching refs:if_num

68 static unsigned short load_dac_override(struct ddr_priv *priv, int if_num,
82 static void cn7xxx_lmc_ddr3_reset(struct ddr_priv *priv, int if_num, int reset)
95 debug("LMC%d %s DDR_RESET_L\n", if_num,
99 reset_ctl.u64 = lmc_rd(priv, CVMX_LMCX_RESET_CTL(if_num));
101 lmc_wr(priv, CVMX_LMCX_RESET_CTL(if_num), reset_ctl.u64);
103 lmc_rd(priv, CVMX_LMCX_RESET_CTL(if_num));
108 static void perform_lmc_reset(struct ddr_priv *priv, int node, int if_num)
147 lmc_rd(priv, CVMX_LMCX_RESET_CTL(if_num));
166 cn7xxx_lmc_ddr3_reset(priv, if_num, LMC_DDR3_RESET_DEASSERT);
170 cn7xxx_lmc_ddr3_reset(priv, if_num, LMC_DDR3_RESET_ASSERT);
171 cn7xxx_lmc_ddr3_reset(priv, if_num, LMC_DDR3_RESET_DEASSERT);
175 void oct3_ddr3_seq(struct ddr_priv *priv, int rank_mask, int if_num,
224 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
226 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), lmc_config.u64);
238 debug("LMC%d: Exiting Self-refresh Rank_mask:%x\n", if_num,
241 lmc_wr(priv, CVMX_LMCX_SEQ_CTL(if_num), seq_ctl.u64);
242 lmc_rd(priv, CVMX_LMCX_SEQ_CTL(if_num));
247 seq_ctl.u64 = lmc_rd(priv, CVMX_LMCX_SEQ_CTL(if_num));
493 static void ddr4_mrw(struct ddr_priv *priv, int if_num, int rank,
505 lmc_wr(priv, CVMX_LMCX_MR_MPR_CTL(if_num), lmc_mr_mpr_ctl.u64);
508 oct3_ddr3_seq(priv, 1 << rank, if_num, 0x8);
514 int if_num, int dimm_count, int mpr, int bg1)
526 ddr4_mrw(priv, if_num, rankx, mpr << 2, 3, bg1);
529 ddr4_mrw(priv, if_num, rankx, INV_A0_17(mpr << 2), ~3,
535 static void do_ddr4_mpr_read(struct ddr_priv *priv, int if_num,
540 lmc_mr_mpr_ctl.u64 = lmc_rd(priv, CVMX_LMCX_MR_MPR_CTL(if_num));
546 lmc_wr(priv, CVMX_LMCX_MR_MPR_CTL(if_num), lmc_mr_mpr_ctl.u64);
549 oct3_ddr3_seq(priv, 1 << rank, if_num, 0x9);
563 static int set_rdimm_mode(struct ddr_priv *priv, int if_num, int enable)
568 lmc_control.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
572 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), lmc_control.u64);
577 static void ddr4_mpr_read(struct ddr_priv *priv, int if_num, int rank,
580 do_ddr4_mpr_read(priv, if_num, rank, page, location);
582 mpr_data[0] = lmc_rd(priv, CVMX_LMCX_MPR_DATA0(if_num));
587 int if_num, int page)
597 if_num, rankx, page);
599 ddr4_mpr_read(priv, if_num, rankx, page, location,
608 static void ddr4_mpr_write(struct ddr_priv *priv, int if_num, int rank,
619 lmc_wr(priv, CVMX_LMCX_MR_MPR_CTL(if_num), lmc_mr_mpr_ctl.u64);
622 oct3_ddr3_seq(priv, 1 << rank, if_num, 0x9);
636 static void set_vref(struct ddr_priv *priv, int if_num, int rank,
645 CVMX_LMCX_MODEREG_PARAMS3(if_num));
656 lmc_wr(priv, CVMX_LMCX_MR_MPR_CTL(if_num), lmc_mr_mpr_ctl.u64);
659 oct3_ddr3_seq(priv, 1 << rank, if_num, 0x8);
665 oct3_ddr3_seq(priv, 1 << rank, if_num, 0x8);
669 lmc_wr(priv, CVMX_LMCX_MR_MPR_CTL(if_num), lmc_mr_mpr_ctl.u64);
672 static void set_dram_output_inversion(struct ddr_priv *priv, int if_num,
683 lmc_wr(priv, CVMX_LMCX_DDR4_DIMM_CTL(if_num), lmc_ddr4_dimm_ctl.u64);
690 lmc_rd(priv, CVMX_LMCX_DIMMX_PARAMS(dimm_no, if_num));
695 CVMX_LMCX_DIMMX_PARAMS(dimm_no, if_num),
700 lmc_dimm_ctl.u64 = lmc_rd(priv, CVMX_LMCX_DIMM_CTL(if_num));
706 lmc_wr(priv, CVMX_LMCX_DIMM_CTL(if_num), lmc_dimm_ctl.u64);
708 oct3_ddr3_seq(priv, rank_mask, if_num, 0x7); /* Init RCW */
712 int if_num, int dimm_count, int pattern,
725 ddr4_mpr_write(priv, if_num, rankx,
733 int if_num, int dimm_count)
772 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
775 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), lmc_config.u64);
787 set_mpr_mode(priv, rank_mask, if_num, dimm_count, 1, 0);
789 set_mpr_mode(priv, rank_mask, if_num, dimm_count, 1, 1);
814 set_dram_output_inversion(priv, if_num, dimm_count, rank_mask, 1);
823 set_rdimm_mode(priv, if_num, 0);
834 write_mpr_page0_pattern(priv, rank_mask, if_num, dimm_count, 0x55, 0x8);
840 set_rdimm_mode(priv, if_num, 1);
846 set_dram_output_inversion(priv, if_num, dimm_count, rank_mask, 0);
861 set_mpr_mode(priv, rank_mask, if_num, dimm_count,
873 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
875 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), lmc_config.u64);
898 static int validate_hw_wl_settings(int if_num,
968 static void get_deskew_settings(struct ddr_priv *priv, int if_num,
979 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
984 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
994 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
999 lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1009 static void display_deskew_settings(struct ddr_priv *priv, int if_num,
1020 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
1025 if_num);
1034 if_num, byte_lane,
1051 static void override_deskew_settings(struct ddr_priv *priv, int if_num,
1060 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
1063 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1076 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
1077 lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1103 lmc_wr(priv, CVMX_LMCX_GENERAL_PURPOSE0(if_num), csr_data);
1104 lmc_rd(priv, CVMX_LMCX_GENERAL_PURPOSE0(if_num));
1109 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
1114 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1121 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
1124 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
1127 static void process_by_rank_dac(struct ddr_priv *priv, int if_num,
1137 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
1149 display_dac_dbi_settings(if_num, /*dac */ 1,
1173 display_dac_dbi_settings(if_num, /*dac */ 1, lmc_config.s.ecc_ena,
1178 if_num, lane_probs);
1183 load_dac_override(priv, if_num, dacsum.bytes[byte_lane],
1188 static void process_by_rank_dsk(struct ddr_priv *priv, int if_num,
1197 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
1244 display_deskew_settings(priv, if_num, &dsksum, /*VBL_TME */ 3);
1247 override_deskew_settings(priv, if_num, &dsksum);
1262 int if_num, struct deskew_counts *counts,
1276 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
1281 get_deskew_settings(priv, if_num, &dskdat);
1285 if_num);
1294 if_num, byte_lane,
1381 static unsigned short load_dac_override(struct ddr_priv *priv, int if_num,
1388 ddr_dll_ctl3.u64 = lmc_rd(priv, CVMX_LMCX_DLL_CTL3(if_num));
1394 lmc_wr(priv, CVMX_LMCX_DLL_CTL3(if_num), ddr_dll_ctl3.u64);
1397 lmc_wr(priv, CVMX_LMCX_DLL_CTL3(if_num), ddr_dll_ctl3.u64);
1400 lmc_wr(priv, CVMX_LMCX_DLL_CTL3(if_num), ddr_dll_ctl3.u64);
1403 lmc_wr(priv, CVMX_LMCX_DLL_CTL3(if_num), ddr_dll_ctl3.u64);
1405 lmc_rd(priv, CVMX_LMCX_DLL_CTL3(if_num)); // flush writes
1413 static int read_dac_dbi_settings(struct ddr_priv *priv, int if_num,
1427 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1429 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
1441 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
1444 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1446 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
1450 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1542 int if_num)
1555 lmc_phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1568 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), lmc_phy_ctl.u64);
1578 oct3_ddr3_seq(priv, rank_mask, if_num, 0x0B);
1582 int rank_mask, int if_num)
1588 ddr_dll_ctl3.u64 = lmc_rd(priv, CVMX_LMCX_DLL_CTL3(if_num));
1592 lmc_wr(priv, CVMX_LMCX_DLL_CTL3(if_num), ddr_dll_ctl3.u64);
1595 lmc_wr(priv, CVMX_LMCX_DLL_CTL3(if_num), ddr_dll_ctl3.u64);
1598 lmc_wr(priv, CVMX_LMCX_DLL_CTL3(if_num), ddr_dll_ctl3.u64);
1607 ext_config.u64 = lmc_rd(priv, CVMX_LMCX_EXT_CONFIG(if_num));
1613 lmc_wr(priv, CVMX_LMCX_EXT_CONFIG(if_num), ext_config.u64);
1622 oct3_ddr3_seq(priv, rank_mask, if_num, 0x0A);
1684 int if_num, int spd_rawcard_aorb)
1698 debug("N0.LMC%d: Performing Deskew Training.\n", if_num);
1731 ext_config.u64 = lmc_rd(priv, CVMX_LMCX_EXT_CONFIG(if_num));
1738 lmc_wr(priv, CVMX_LMCX_EXT_CONFIG(if_num), ext_config.u64);
1747 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1749 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
1752 oct3_ddr3_seq(priv, rank_mask, if_num, 0x0A);
1758 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
1760 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
1763 oct3_ddr3_seq(priv, rank_mask, if_num, 0x0A);
1770 validate_deskew_training(priv, rank_mask, if_num, &dsk_counts,
1789 if_num, lock_retries_limit);
1795 if_num, lock_retries);
1807 if_num);
1814 if_num, (sat_retries >= DEFAULT_SAT_RETRY_LIMIT) ?
1825 if_num);
1827 validate_deskew_training(priv, rank_mask, if_num,
1955 static int compute_vref_val(struct ddr_priv *priv, int if_num, int rankx,
1972 if_num, rankx, __func__, dram_connection);
1980 if_num, rankx);
1989 if_num, rankx, new_vref, new_vref);
2003 lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS1(if_num));
2005 lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS2(if_num));
2006 comp_ctl2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
2049 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
2050 lmc_control.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
2084 if_num, rankx, computed_final_vref_value,
2173 static void do_display_rl(int if_num,
2201 if_num, rank, lmc_rlevel_rank.s.status, msg_buf,
2209 static void display_rl(int if_num,
2212 do_display_rl(if_num, lmc_rlevel_rank, rank, 0, 0);
2215 static void display_rl_with_score(int if_num,
2219 do_display_rl(if_num, lmc_rlevel_rank, rank, 1, score);
2222 static void display_rl_with_final(int if_num,
2226 do_display_rl(if_num, lmc_rlevel_rank, rank, 4, 0);
2229 static void display_rl_with_computed(int if_num,
2233 do_display_rl(if_num, lmc_rlevel_rank, rank, 9, score);
2248 static void display_rl_with_rodt(int if_num,
2271 if_num, rank, set_buf, msg_buf, lmc_rlevel_rank.s.byte8,
2278 static void do_display_wl(int if_num,
2294 if_num, rank, lmc_wlevel_rank.s.status, msg_buf,
2302 static void display_wl(int if_num,
2305 do_display_wl(if_num, lmc_wlevel_rank, rank, WITH_NOTHING);
2308 static void display_wl_with_final(int if_num,
2312 do_display_wl(if_num, lmc_wlevel_rank, rank, WITH_FINAL);
2337 static void do_display_bm(int if_num, int rank, void *bm,
2346 if_num, rank, bitmasks[8], bitmasks[7], bitmasks[6],
2357 if_num, rank, ppbm(rlevel_bitmask[8].bm),
2370 if_num, rank, rlevel_bitmask[8].errs,
2382 if_num, rank, rlevel_byte[XPU(8, ecc)].sqerrs,
2394 static void display_wl_bm(int if_num, int rank, int *bitmasks)
2396 do_display_bm(if_num, rank, (void *)bitmasks, WITH_WL_BITMASKS, 0);
2399 static void display_rl_bm(int if_num, int rank,
2402 do_display_bm(if_num, rank, (void *)bitmasks, WITH_RL_BITMASKS,
2406 static void display_rl_bm_scores(int if_num, int rank,
2409 do_display_bm(if_num, rank, (void *)bitmasks, WITH_RL_MASK_SCORES,
2413 static void display_rl_seq_scores(int if_num, int rank,
2416 do_display_bm(if_num, rank, (void *)bytes, WITH_RL_SEQ_SCORES, ecc_ena);
2566 static int if_num __section(".data");
2797 s = lookup_env(priv, "ddr%d_early_dqx", if_num);
2826 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), cfg.u64);
2834 ctrl.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
2897 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), ctrl.u64);
2906 tp0.u64 = lmc_rd(priv, CVMX_LMCX_TIMING_PARAMS0(if_num));
2970 lmc_wr(priv, CVMX_LMCX_TIMING_PARAMS0(if_num), tp0.u64);
2979 tp1.u64 = lmc_rd(priv, CVMX_LMCX_TIMING_PARAMS1(if_num));
3058 node, if_num, die_capacity);
3063 node, if_num, tp1.cn78xx.trfc_dlr);
3073 lmc_wr(priv, CVMX_LMCX_TIMING_PARAMS1(if_num), tp1.u64);
3083 tp1.u64 = lmc_rd(priv, CVMX_LMCX_TIMING_PARAMS1(if_num));
3084 tp2.u64 = lmc_rd(priv, CVMX_LMCX_TIMING_PARAMS2(if_num));
3106 lmc_wr(priv, CVMX_LMCX_TIMING_PARAMS2(if_num), tp2.u64);
3118 lmc_wr(priv, CVMX_LMCX_TIMING_PARAMS1(if_num), tp1.u64);
3129 mp0.u64 = lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num));
3306 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num), mp0.u64);
3350 s = lookup_env(priv, "ddr%d_rtt_nom_%1d%1d", if_num,
3363 s = lookup_env(priv, "ddr%d_rtt_nom", if_num);
3394 s = lookup_env(priv, "ddr%d_rtt_wr_%1d%1d", if_num,
3467 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS1(if_num), mp1.u64);
3526 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS2(if_num), mp2.u64);
3537 mp3.u64 = lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS3(if_num));
3557 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS3(if_num), mp3.u64);
3569 lmc_nxm.u64 = lmc_rd(priv, CVMX_LMCX_NXM(if_num));
3590 lmc_wr(priv, CVMX_LMCX_NXM(if_num), lmc_nxm.u64);
3606 lmc_wr(priv, CVMX_LMCX_WODT_MASK(if_num), wodt_mask.u64);
3622 lmc_wr(priv, CVMX_LMCX_RODT_MASK(if_num), rodt_mask.u64);
3652 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
3670 s = lookup_env(priv, "ddr%d_rodt_ctl", if_num);
3685 node, if_num, cc2.s.ck_ctl,
3699 node, if_num, ddr4_driver_26_ohm,
3732 lmc_wr(priv, CVMX_LMCX_COMP_CTL2(if_num), cc2.u64);
3739 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
3749 node, if_num, phy_ctl.s.c1_sel);
3754 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
3762 ext_cfg.u64 = lmc_rd(priv, CVMX_LMCX_EXT_CONFIG(if_num));
3791 node, if_num, ext_cfg.s.dimm0_cid);
3794 lmc_wr(priv, CVMX_LMCX_EXT_CONFIG(if_num), ext_cfg.u64);
3809 ext_cfg2.u64 = lmc_rd(priv, CVMX_LMCX_EXT_CONFIG2(if_num));
3820 lmc_wr(priv, CVMX_LMCX_EXT_CONFIG2(if_num), ext_cfg2.u64);
3833 dimm_p.u64 = lmc_rd(priv, CVMX_LMCX_DIMMX_PARAMS(dimmx, if_num));
3950 lmc_wr(priv, CVMX_LMCX_DIMMX_DDR4_PARAMS0(dimmx, if_num),
3953 lmc_wr(priv, CVMX_LMCX_DDR4_DIMM_CTL(if_num), ddr4_ctl.u64);
3955 lmc_wr(priv, CVMX_LMCX_DIMMX_DDR4_PARAMS1(dimmx, if_num),
4067 lmc_wr(priv, CVMX_LMCX_DIMMX_PARAMS(dimmx, if_num), dimm_p.u64);
4081 lmc_wr(priv, CVMX_LMCX_DIMMX_PARAMS(1, if_num), dimm_p.u64);
4096 dimm_ctl.u64 = lmc_rd(priv, CVMX_LMCX_DIMM_CTL(if_num));
4125 lmc_wr(priv, CVMX_LMCX_DIMM_CTL(if_num), dimm_ctl.u64);
4128 oct3_ddr3_seq(priv, rank_mask, if_num, 0x7);
4136 lmc_wr(priv, CVMX_LMCX_DIMM_CTL(if_num), dimm_ctl.u64);
4141 lmc_wr(priv, CVMX_LMCX_DDR4_DIMM_CTL(if_num), 0);
4144 oct3_ddr3_seq(priv, rank_mask, if_num, 0x7);
4147 dimm_ctl.u64 = lmc_rd(priv, CVMX_LMCX_DIMM_CTL(if_num));
4183 lmc_wr(priv, CVMX_LMCX_DIMM_CTL(if_num), dimm_ctl.u64);
4186 oct3_ddr3_seq(priv, rank_mask, if_num, 0x7);
4193 dimm_ctl.u64 = lmc_rd(priv, CVMX_LMCX_DIMM_CTL(if_num));
4196 lmc_wr(priv, CVMX_LMCX_DIMM_CTL(if_num), dimm_ctl.u64);
4233 oct3_ddr3_seq(priv, rank_mask, if_num, 3);
4236 mp0.u64 = lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num));
4238 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num), mp0.u64);
4240 ddr_init_seq(priv, rank_mask, if_num);
4243 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num), mp0.u64);
4248 change_rdimm_mpr_pattern(priv, rank_mask, if_num, dimm_count);
4260 node, if_num);
4284 perform_offset_training(priv, rank_mask, if_num);
4290 perform_internal_vref_training(priv, rank_mask, if_num);
4293 read_dac_dbi_settings(priv, if_num, /*DAC*/ 1,
4296 display_dac_dbi_settings(if_num, /*DAC*/ 1,
4310 node, if_num);
4314 node, if_num);
4339 node, if_num, total_dac_eval_retries, dac_eval_exhausted);
4344 node, if_num);
4349 num_samples, if_num, lane);
4351 display_dac_dbi_settings(if_num, /*DAC*/ 1, use_ecc,
4356 load_dac_override(priv, if_num, dac_settings[lane],
4365 s = lookup_env(priv, "ddr%d_vref_dac_byte%d", if_num, lane);
4370 load_dac_override(priv, if_num, dac_settings[lane],
4375 display_dac_dbi_settings(if_num, /*DAC*/ 1, use_ecc,
4385 load_dac_override(priv, if_num, 127, /* all */ 0x0A);
4387 node, if_num);
4397 perform_deskew_training(priv, rank_mask, if_num,
4409 node, if_num, internal_retries);
4414 node, if_num, internal_retries);
4418 node, if_num, internal_retries);
4425 validate_deskew_training(priv, rank_mask, if_num,
4429 node, if_num);
4430 validate_deskew_training(priv, rank_mask, if_num,
4435 read_dac_dbi_settings(priv, if_num, /*dac */ 1,
4437 get_deskew_settings(priv, if_num, &rank_dsk[by_rank]);
4451 ddr_init_seq(priv, rank_mask, if_num);
4453 process_by_rank_dac(priv, if_num, rank_mask, rank_dac);
4454 process_by_rank_dsk(priv, if_num, rank_mask, rank_dsk);
4471 lmc_config.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
4488 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), lmc_config.u64);
4489 lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
4514 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), lmc_config.u64);
4515 lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
4562 node, if_num, rankx, rank_nom,
4572 wl_ctl.u64 = lmc_rd(priv, CVMX_LMCX_WLEVEL_CTL(if_num));
4583 lmc_wr(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num), 0);
4594 lmc_wr(priv, CVMX_LMCX_WLEVEL_CTL(if_num), wl_ctl.u64);
4601 lmc_wr(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num),
4602 lmc_rd(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num)));
4605 oct3_ddr3_seq(priv, 1 << rankx, if_num, 6);
4610 if_num));
4614 if_num));
4618 if_num,
4632 validate_hw_wl_settings(if_num,
4652 node, if_num, rankx,
4661 node, if_num, rankx,
4672 display_wl_bm(if_num, rankx, wl_mask);
4673 display_wl(if_num, wl_rank, rankx);
4685 lmc_wr(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num),
4687 display_wl(if_num, wl_rank, rankx);
4716 node, if_num, rankx, extra_bumps,
4743 node, if_num, rankx,
4769 if_num,
4810 node, if_num, rankx, byte_idx, mc,
4816 lmc_wr(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num),
4818 display_wl_with_final(if_num, wl_rank, rankx);
4830 node, if_num, rankx, wl_mask_err_rank,
4991 if_num);
4996 if_num);
5002 cfg.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
5006 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), cfg.u64);
5036 mp1.u64 = lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS1(if_num));
5049 node, if_num, default_bitmask);
5053 wl_ctl.u64 = lmc_rd(priv, CVMX_LMCX_WLEVEL_CTL(if_num));
5070 node, if_num, wl_ctl.s.or_dis, wl_ctl.s.bitmask);
5074 lmc_wr(priv, CVMX_LMCX_WLEVEL_CTL(if_num), wl_ctl.u64);
5081 cfg.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
5083 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), cfg.u64);
5092 node, if_num);
5095 validate_deskew_training(priv, rank_mask, if_num,
5106 node, if_num, retry_count);
5107 perform_deskew_training(priv, rank_mask, if_num,
5125 tp1.u64 = lmc_rd(priv, CVMX_LMCX_TIMING_PARAMS1(if_num));
5126 mp0.u64 = lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num));
5127 ctrl.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
5138 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num),
5142 lmc_wr(priv, CVMX_LMCX_TIMING_PARAMS1(if_num), tp1.u64);
5146 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), ctrl.u64);
5153 ddr4_mrw(priv, if_num, rankx, -1, 1, 0);
5164 node, if_num);
5165 validate_deskew_training(priv, rank_mask, if_num, &dsk_counts,
5184 phy_ctl2.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL2(if_num));
5185 ext_cfg.u64 = lmc_rd(priv, CVMX_LMCX_EXT_CONFIG(if_num));
5186 dll_ctl3.u64 = lmc_rd(priv, CVMX_LMCX_DLL_CTL3(if_num));
5197 lmc_wr(priv, CVMX_LMCX_EXT_CONFIG(if_num), ext_cfg.u64);
5199 if_num);
5252 set_vref(priv, if_num, rankx, vrange, vvalue);
5259 node, if_num, rankx,
5300 node, if_num, rankx, vvlo, vrlo,
5314 CVMX_LMCX_MODEREG_PARAMS2(if_num));
5321 node, if_num, rankx, final_vref_val,
5329 if_num, !!(rankx & 2), !!(rankx & 1));
5333 set_vref(priv, if_num, rankx, final_vref_range, final_vref_val);
5358 lmc_wr(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num), wl_rank.u64);
5359 wl_rank.u64 = lmc_rd(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num));
5363 errors = run_best_hw_patterns(priv, if_num, rank_addr,
5367 start_dram_dclk = lmc_rd(priv, CVMX_LMCX_DCLK_CNT(if_num));
5368 start_dram_ops = lmc_rd(priv, CVMX_LMCX_OPS_CNT(if_num));
5369 errors = test_dram_byte64(priv, if_num, rank_addr, bytemask,
5372 stop_dram_dclk = lmc_rd(priv, CVMX_LMCX_DCLK_CNT(if_num));
5373 stop_dram_ops = lmc_rd(priv, CVMX_LMCX_OPS_CNT(if_num));
5428 node, if_num,
5457 node, if_num, rankx,
5534 node, if_num, rankx,
5594 lmc_wr(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num),
5598 if_num));
5602 errors = run_best_hw_patterns(priv, if_num,
5607 errors = test_dram_byte64(priv, if_num,
5691 if_num));
5736 s = lookup_env(priv, "ddr%d_dram_connection", if_num);
5762 cfg.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
5764 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), cfg.u64);
5791 node, if_num, rankx,
5798 compute_vref_val(priv, if_num, rankx, dimm_count,
5813 if_num));
5849 node, if_num, rankx, active_rank,
5892 node, if_num, rankx, sum_dram_ops,
5908 node, if_num, rankx);
5976 node, if_num, rankx, vref_val,
5984 node, if_num, rankx, vref_val);
6004 node, if_num, rankx);
6008 lmc_wr(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num),
6012 if_num));
6025 errors = run_best_hw_patterns(priv, if_num,
6030 errors = test_dram_byte64(priv, if_num,
6037 node, if_num, rankx, errors);
6045 node, if_num, rankx, wl_rank.s.status, wl_rank.u64,
6063 node, if_num, rankx);
6069 node, if_num, rankx);
6084 if_num));
6088 if_num, rankx, i);
6097 s = lookup_env_ull(priv, "ddr%d_wlevel_rank%d", if_num, rankx);
6105 lmc_wr(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num),
6108 lmc_rd(priv, CVMX_LMCX_WLEVEL_RANKX(rankx, if_num));
6109 display_wl(if_num, wl_rank, rankx);
6115 node, if_num, rankx);
6122 node, if_num, rankx, 1);
6125 if_num),
6132 node, if_num, rankx, 2);
6135 if_num),
6144 node, if_num, rankx, 3);
6147 if_num),
6155 cfg.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
6163 lmc_wr(priv, CVMX_LMCX_CONFIG(if_num), cfg.u64);
6174 ddr_dll_ctl3.u64 = lmc_rd(priv, CVMX_LMCX_DLL_CTL3(if_num));
6178 lmc_wr(priv, CVMX_LMCX_DLL_CTL3(if_num), ddr_dll_ctl3.u64);
6179 lmc_rd(priv, CVMX_LMCX_DLL_CTL3(if_num));
6180 ddr_dll_ctl3.u64 = lmc_rd(priv, CVMX_LMCX_DLL_CTL3(if_num));
6182 debug("%d. LMC%d_DLL_CTL3[%d] = %016llx %d\n", i, if_num,
6188 node, if_num, "DLL90 Setting 8:0",
6192 process_custom_dll_offsets(priv, if_num, "ddr_dll_write_offset",
6195 process_custom_dll_offsets(priv, if_num, "ddr_dll_read_offset",
6218 slot_ctl0.u64 = lmc_rd(priv, CVMX_LMCX_SLOT_CTL0(if_num));
6219 slot_ctl1.u64 = lmc_rd(priv, CVMX_LMCX_SLOT_CTL1(if_num));
6220 slot_ctl2.u64 = lmc_rd(priv, CVMX_LMCX_SLOT_CTL2(if_num));
6222 ext_cfg.u64 = lmc_rd(priv, CVMX_LMCX_EXT_CONFIG(if_num));
6236 lmc_wr(priv, CVMX_LMCX_SLOT_CTL0(if_num), slot_ctl0.u64);
6237 lmc_wr(priv, CVMX_LMCX_SLOT_CTL1(if_num), slot_ctl1.u64);
6238 lmc_wr(priv, CVMX_LMCX_SLOT_CTL2(if_num), slot_ctl2.u64);
6247 slot_ctl1.u64 = lmc_rd(priv, CVMX_LMCX_SLOT_CTL1(if_num));
6250 lmc_wr(priv, CVMX_LMCX_SLOT_CTL1(if_num), slot_ctl1.u64);
6252 slot_ctl2.u64 = lmc_rd(priv, CVMX_LMCX_SLOT_CTL2(if_num));
6255 lmc_wr(priv, CVMX_LMCX_SLOT_CTL2(if_num), slot_ctl2.u64);
6308 lmc_wr(priv, CVMX_LMCX_INT(if_num), -1ULL);
6309 lmc_rd(priv, CVMX_LMCX_INT(if_num));
6326 lmc_rd(priv, CVMX_LMCX_INT(if_num)));
6340 ctrl.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
6341 lmc_scramble_cfg0.u64 = lmc_rd(priv, CVMX_LMCX_SCRAMBLE_CFG0(if_num));
6342 lmc_scramble_cfg1.u64 = lmc_rd(priv, CVMX_LMCX_SCRAMBLE_CFG1(if_num));
6346 lmc_rd(priv, CVMX_LMCX_SCRAMBLE_CFG2(if_num));
6348 lmc_ns_ctl.u64 = lmc_rd(priv, CVMX_LMCX_NS_CTL(if_num));
6372 lmc_wr(priv, CVMX_LMCX_SCRAMBLE_CFG0(if_num), lmc_scramble_cfg0.u64);
6381 lmc_wr(priv, CVMX_LMCX_SCRAMBLE_CFG1(if_num), lmc_scramble_cfg1.u64);
6391 lmc_wr(priv, CVMX_LMCX_SCRAMBLE_CFG2(if_num),
6399 lmc_wr(priv, CVMX_LMCX_NS_CTL(if_num), lmc_ns_ctl.u64);
6401 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), ctrl.u64);
6477 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
6479 lmc_wr(priv, CVMX_LMCX_COMP_CTL2(if_num), cc2.u64);
6480 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
6504 lmc_wr(priv, CVMX_LMCX_RLEVEL_RANKX(rankx, if_num), 0);
6507 oct3_ddr3_seq(priv, 1 << rankx, if_num, 1);
6513 if_num));
6518 lmc_rd(priv, CVMX_LMCX_RLEVEL_RANKX(rankx, if_num));
6530 lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num));
6533 CVMX_LMCX_MODEREG_PARAMS0(if_num),
6537 lmc_wr(priv, CVMX_LMCX_RLEVEL_RANKX(rankx, if_num), 0);
6540 oct3_ddr3_seq(priv, 1 << rankx, if_num, 1);
6546 if_num));
6551 if_num));
6555 rl_mask[0].bm = lmc_ddr3_rl_dbg_read(priv, if_num, 0);
6556 rl_mask[1].bm = lmc_ddr3_rl_dbg_read(priv, if_num, 1);
6557 rl_mask[2].bm = lmc_ddr3_rl_dbg_read(priv, if_num, 2);
6558 rl_mask[3].bm = lmc_ddr3_rl_dbg_read(priv, if_num, 3);
6559 rl_mask[8].bm = lmc_ddr3_rl_dbg_read(priv, if_num, 8);
6564 lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num));
6566 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num),
6570 lmc_wr(priv, CVMX_LMCX_RLEVEL_RANKX(rankx, if_num), 0);
6573 oct3_ddr3_seq(priv, 1 << rankx, if_num, 1);
6579 if_num));
6584 if_num));
6586 rl_mask[4].bm = lmc_ddr3_rl_dbg_read(priv, if_num, 4);
6587 rl_mask[5].bm = lmc_ddr3_rl_dbg_read(priv, if_num, 5);
6588 rl_mask[6].bm = lmc_ddr3_rl_dbg_read(priv, if_num, 6);
6589 rl_mask[7].bm = lmc_ddr3_rl_dbg_read(priv, if_num, 7);
6600 lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num));
6602 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS0(if_num),
6620 lmc_ddr3_rl_dbg_read(priv, if_num, i);
6728 display_rl_bm(if_num, rankx, rl_mask, ecc_ena);
6729 display_rl_bm_scores(if_num, rankx, rl_mask,
6731 display_rl_seq_scores(if_num, rankx, rl_byte,
6734 display_rl_with_score(if_num, rl_rank, rankx,
6768 display_rl_with_computed(if_num,
6809 rl_rank.u64 = lmc_rd(priv, CVMX_LMCX_RLEVEL_RANKX(rankx, if_num));
6826 display_rl_bm(if_num, rankx, rl_mask, ecc_ena);
6827 display_rl_bm_scores(if_num, rankx, rl_mask, ecc_ena);
6828 display_rl_seq_scores(if_num, rankx, rl_byte, ecc_ena);
6830 display_rl_with_rodt(if_num, rl_rank, rankx,
6859 node, if_num, rankx,
6921 node, if_num, rankx, rtt_nom,
6935 node, if_num, rankx, rodt_ctl,
6970 node, if_num, rankx, orankx, rodt_ctl, next_ohms, next_score,
6990 node, if_num, rankx);
7013 lmc_wr(priv, CVMX_LMCX_RLEVEL_RANKX(rankx, if_num), rl_rank.u64);
7014 rl_rank.u64 = lmc_rd(priv, CVMX_LMCX_RLEVEL_RANKX(rankx, if_num));
7090 display_rl_with_rodt(if_num,
7101 node, if_num, rankx, selected_rows[0], selected_rows[1],
7188 node, if_num, rankx,
7272 node, if_num, rankx,
7317 node, if_num, rankx,
7349 node, if_num, rankx, i, maj_byte,
7356 node, if_num, rankx, i, new_byte);
7384 node, if_num,
7403 if_num,
7418 if_num,
7439 node, if_num, rankx, i, new_byte);
7459 node, if_num, rankx, i, value_mask >> i,
7502 node, if_num, rankx, i, value_mask >> i,
7603 node, if_num, rankx, i,
7618 node, if_num, rankx, i,
7634 node, if_num, rankx, i,
7642 node, if_num, rankx, i,
7652 node, if_num, rankx, i,
7660 node, if_num, rankx, i,
7669 node, if_num, rankx, i, orig_best_byte,
7685 lmc_wr(priv, CVMX_LMCX_RLEVEL_RANKX(rankx, if_num),
7688 CVMX_LMCX_RLEVEL_RANKX(rankx, if_num));
7693 display_rl_with_final(if_num, rl_rank, rankx);
7724 node, if_num, rankx, 1);
7727 if_num),
7734 node, if_num, rankx, 2);
7737 if_num),
7746 node, if_num, rankx, 3);
7747 lmc_wr(priv, CVMX_LMCX_RLEVEL_RANKX(3, if_num),
7896 ctl.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
7899 cfg.u64 = lmc_rd(priv, CVMX_LMCX_CONFIG(if_num));
7906 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), ctl.u64);
7908 debug("LMC%d: Performing Read-Leveling\n", if_num);
7910 rl_ctl.u64 = lmc_rd(priv, CVMX_LMCX_RLEVEL_CTL(if_num));
7974 CVMX_LMCX_RLEVEL_CTL(if_num),
7987 s = lookup_env(priv, "ddr%d_rlevel_debug_loops", if_num);
8055 if_num));
8068 c_cfg->rl_tbl[i].rl_rank[if_num][rankx];
8071 if_num),
8076 if_num));
8077 display_rl(if_num, rl_rank, rankx);
8100 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
8116 ctl.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
8120 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), ctl.u64);
8123 lmc_wr(priv, CVMX_LMCX_COMP_CTL2(if_num), cc2.u64);
8124 lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
8126 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
8131 mp1.u64 = lmc_rd(priv, CVMX_LMCX_MODEREG_PARAMS1(if_num));
8160 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS1(if_num),
8176 ddr_init_seq(priv, rank_mask, if_num);
8191 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
8196 lmc_wr(priv, CVMX_LMCX_COMP_CTL2(if_num), cc2.u64);
8198 lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
8201 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
8205 ctl.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
8208 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), ctl.u64);
8227 ctl.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
8231 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), ctl.u64);
8234 lmc_wr(priv, CVMX_LMCX_COMP_CTL2(if_num), cc2.u64);
8236 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
8332 CVMX_LMCX_MODEREG_PARAMS1(if_num));
8354 lmc_wr(priv, CVMX_LMCX_MODEREG_PARAMS1(if_num),
8396 CVMX_LMCX_MODEREG_PARAMS2(if_num));
8423 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
8430 lmc_wr(priv, CVMX_LMCX_COMP_CTL2(if_num), cc2.u64);
8431 cc2.u64 = lmc_rd(priv, CVMX_LMCX_COMP_CTL2(if_num));
8449 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), ctl.u64);
8450 ctl.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
8455 ddr_init_seq(priv, rank_mask, if_num);
8465 if_num));
8469 if_num, rankx, i);
8478 s = lookup_env_ull(priv, "ddr%d_rlevel_rank%d", if_num, rankx);
8487 CVMX_LMCX_RLEVEL_RANKX(rankx, if_num),
8491 if_num));
8492 display_rl(if_num, rl_rank, rankx);
8507 if_num = _if_num;
8574 node, if_num, ddr_hertz, ddr_ref_hertz, read_c0_prid());
8607 printf(" --ddr%dspd=0x%02x", if_num,
8624 dimm_count, if_num);
8632 ddr_ref_hertz, if_num, if_mask);
9007 s = lookup_env(priv, "ddr%d_wlevel_loops", if_num);
9397 node, if_num, trfc, new_trfc);
9598 perform_lmc_reset(priv, node, if_num);
9601 ctrl.u64 = lmc_rd(priv, CVMX_LMCX_CONTROL(if_num));
9603 lmc_wr(priv, CVMX_LMCX_CONTROL(if_num), ctrl.u64);
9605 lmc_wr(priv, CVMX_LMCX_SCRAMBLE_CFG0(if_num), 0);
9606 lmc_wr(priv, CVMX_LMCX_SCRAMBLE_CFG1(if_num), 0);
9608 lmc_wr(priv, CVMX_LMCX_SCRAMBLE_CFG2(if_num), 0);
9720 display_mpr_page(priv, rank_mask, if_num, i);
9885 static int test_dram_byte_hw(struct ddr_priv *priv, int if_num, u64 p,
9923 node, if_num, temp, kshift);
9926 node, if_num, kshift, temp);
9934 rlevel_ctl.u64 = lmc_rd(priv, CVMX_LMCX_RLEVEL_CTL(if_num));
9938 lmc_wr(priv, CVMX_LMCX_RLEVEL_CTL(if_num), rlevel_ctl.u64);
9953 p |= (if_num << 7); /* Map address into proper interface */
9976 if (lmc != if_num) {
9986 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
9988 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
10022 dbtrain_ctl.u64 = lmc_rd(priv, CVMX_LMCX_DBTRAIN_CTL(if_num));
10051 lmc_wr(priv, CVMX_LMCX_DBTRAIN_CTL(if_num),
10062 oct3_ddr3_seq(priv, prank, if_num, 14);
10071 mpr_data0 = lmc_rd(priv, CVMX_LMCX_MPR_DATA0(if_num));
10072 mpr_data1 = lmc_rd(priv, CVMX_LMCX_MPR_DATA1(if_num));
10080 phy_ctl.u64 = lmc_rd(priv, CVMX_LMCX_PHY_CTL(if_num));
10082 lmc_wr(priv, CVMX_LMCX_PHY_CTL(if_num), phy_ctl.u64);
10109 lmc_wr(priv, CVMX_LMCX_RLEVEL_CTL(if_num), rlevel_ctl.u64);