Lines Matching refs:seq

57 	((non_skip_value) & seq->skip_delay_mask)
74 static void set_failing_group_stage(struct socfpga_sdrseq *seq,
81 if (seq->gbl.error_stage == CAL_STAGE_NIL) {
82 seq->gbl.error_substage = substage;
83 seq->gbl.error_stage = stage;
84 seq->gbl.error_group = group;
109 static void phy_mgr_initialize(struct socfpga_sdrseq *seq)
131 if ((seq->dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL)
134 ratio = seq->rwcfg->mem_dq_per_read_dqs /
135 seq->rwcfg->mem_virtual_groups_per_read_dqs;
136 seq->param.read_correct_mask_vg = (1 << ratio) - 1;
137 seq->param.write_correct_mask_vg = (1 << ratio) - 1;
138 seq->param.read_correct_mask = (1 << seq->rwcfg->mem_dq_per_read_dqs)
140 seq->param.write_correct_mask = (1 << seq->rwcfg->mem_dq_per_write_dqs)
151 static void set_rank_and_odt_mask(struct socfpga_sdrseq *seq,
162 switch (seq->rwcfg->mem_number_of_ranks) {
169 if (seq->rwcfg->mem_number_of_cs_per_dimm == 1) {
324 static void scc_mgr_set_dqs_io_in_delay(struct socfpga_sdrseq *seq,
328 seq->rwcfg->mem_dq_per_write_dqs, delay);
331 static void scc_mgr_set_dm_in_delay(struct socfpga_sdrseq *seq, u32 dm,
335 seq->rwcfg->mem_dq_per_write_dqs + 1 + dm,
344 static void scc_mgr_set_dqs_out1_delay(struct socfpga_sdrseq *seq,
348 seq->rwcfg->mem_dq_per_write_dqs, delay);
351 static void scc_mgr_set_dm_out1_delay(struct socfpga_sdrseq *seq, u32 dm,
355 seq->rwcfg->mem_dq_per_write_dqs + 1 + dm,
393 static void scc_mgr_set_all_ranks(struct socfpga_sdrseq *seq,
399 for (r = 0; r < seq->rwcfg->mem_number_of_ranks;
410 static void scc_mgr_set_dqs_en_phase_all_ranks(struct socfpga_sdrseq *seq,
421 scc_mgr_set_all_ranks(seq, SCC_MGR_DQS_EN_PHASE_OFFSET,
425 static void scc_mgr_set_dqdqs_output_phase_all_ranks(struct socfpga_sdrseq *seq,
436 scc_mgr_set_all_ranks(seq, SCC_MGR_DQDQS_OUT_PHASE_OFFSET,
440 static void scc_mgr_set_dqs_en_delay_all_ranks(struct socfpga_sdrseq *seq,
451 scc_mgr_set_all_ranks(seq, SCC_MGR_DQS_EN_DELAY_OFFSET,
462 static void scc_mgr_set_oct_out1_delay(struct socfpga_sdrseq *seq,
465 const int ratio = seq->rwcfg->mem_if_read_dqs_width /
466 seq->rwcfg->mem_if_write_dqs_width;
514 static void scc_mgr_zero_all(struct socfpga_sdrseq *seq)
522 for (r = 0; r < seq->rwcfg->mem_number_of_ranks;
524 for (i = 0; i < seq->rwcfg->mem_if_read_dqs_width; i++) {
531 seq->iocfg->dqs_in_reserve
537 for (i = 0; i < seq->rwcfg->mem_if_write_dqs_width; i++) {
540 scc_mgr_set_oct_out1_delay(seq, i,
541 seq->iocfg->dqs_out_reserve);
578 static void scc_mgr_load_dqs_for_write_group(struct socfpga_sdrseq *seq,
581 const int ratio = seq->rwcfg->mem_if_read_dqs_width /
582 seq->rwcfg->mem_if_write_dqs_width;
601 static void scc_mgr_zero_group(struct socfpga_sdrseq *seq,
606 for (r = 0; r < seq->rwcfg->mem_number_of_ranks;
609 for (i = 0; i < seq->rwcfg->mem_dq_per_write_dqs; i++) {
621 scc_mgr_set_dm_in_delay(seq, i, 0);
622 scc_mgr_set_dm_out1_delay(seq, i, 0);
630 scc_mgr_set_dqs_io_in_delay(seq, 0);
633 scc_mgr_set_dqs_out1_delay(seq, seq->iocfg->dqs_out_reserve);
634 scc_mgr_set_oct_out1_delay(seq, write_group,
635 seq->iocfg->dqs_out_reserve);
636 scc_mgr_load_dqs_for_write_group(seq, write_group);
650 static void scc_mgr_apply_group_dq_in_delay(struct socfpga_sdrseq *seq,
655 for (i = 0, p = group_bgn; i < seq->rwcfg->mem_dq_per_read_dqs;
669 static void scc_mgr_apply_group_dq_out1_delay(struct socfpga_sdrseq *seq,
674 for (i = 0; i < seq->rwcfg->mem_dq_per_write_dqs; i++) {
681 static void scc_mgr_apply_group_dm_out1_delay(struct socfpga_sdrseq *seq,
687 scc_mgr_set_dm_out1_delay(seq, i, delay1);
694 static void scc_mgr_apply_group_dqs_io_and_oct_out1(struct socfpga_sdrseq *seq,
697 scc_mgr_set_dqs_out1_delay(seq, delay);
700 scc_mgr_set_oct_out1_delay(seq, write_group, delay);
701 scc_mgr_load_dqs_for_write_group(seq, write_group);
712 static void scc_mgr_apply_group_all_out_delay_add(struct socfpga_sdrseq *seq,
719 for (i = 0; i < seq->rwcfg->mem_dq_per_write_dqs; i++)
728 if (new_delay > seq->iocfg->io_out2_delay_max) {
732 seq->iocfg->io_out2_delay_max,
733 new_delay - seq->iocfg->io_out2_delay_max);
734 new_delay -= seq->iocfg->io_out2_delay_max;
735 scc_mgr_set_dqs_out1_delay(seq, new_delay);
742 if (new_delay > seq->iocfg->io_out2_delay_max) {
746 new_delay, seq->iocfg->io_out2_delay_max,
747 new_delay - seq->iocfg->io_out2_delay_max);
748 new_delay -= seq->iocfg->io_out2_delay_max;
749 scc_mgr_set_oct_out1_delay(seq, write_group, new_delay);
752 scc_mgr_load_dqs_for_write_group(seq, write_group);
764 scc_mgr_apply_group_all_out_delay_add_all_ranks(struct socfpga_sdrseq *seq,
770 for (r = 0; r < seq->rwcfg->mem_number_of_ranks;
772 scc_mgr_apply_group_all_out_delay_add(seq, write_group, delay);
783 static void set_jump_as_return(struct socfpga_sdrseq *seq)
791 writel(seq->rwcfg->rreturn, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
800 static void delay_for_n_mem_clocks(struct socfpga_sdrseq *seq,
811 afi_clocks = DIV_ROUND_UP(clocks, seq->misccfg->afi_rate_ratio);
847 writel(seq->rwcfg->idle_loop1,
850 writel(seq->rwcfg->idle_loop1, SDR_PHYGRP_RWMGRGRP_ADDRESS |
859 writel(seq->rwcfg->idle_loop2,
862 writel(seq->rwcfg->idle_loop2,
866 writel(seq->rwcfg->idle_loop2,
874 static void delay_for_n_ns(struct socfpga_sdrseq *seq, const u32 ns)
876 delay_for_n_mem_clocks(seq, (ns * seq->misccfg->afi_clk_freq *
877 seq->misccfg->afi_rate_ratio) / 1000);
889 static void rw_mgr_mem_init_load_regs(struct socfpga_sdrseq *seq,
918 static void rw_mgr_mem_load_user_ddr2(struct socfpga_sdrseq *seq,
925 for (r = 0; r < seq->rwcfg->mem_number_of_ranks; r++) {
927 set_rank_and_odt_mask(seq, r, RW_MGR_ODT_MODE_OFF);
930 writel(seq->rwcfg->precharge_all, grpaddr);
932 writel(seq->rwcfg->emr2, grpaddr);
933 writel(seq->rwcfg->emr3, grpaddr);
934 writel(seq->rwcfg->emr, grpaddr);
937 writel(seq->rwcfg->mr_user, grpaddr);
941 writel(seq->rwcfg->mr_dll_reset, grpaddr);
943 writel(seq->rwcfg->precharge_all, grpaddr);
945 writel(seq->rwcfg->refresh, grpaddr);
946 delay_for_n_ns(seq, 200);
947 writel(seq->rwcfg->refresh, grpaddr);
948 delay_for_n_ns(seq, 200);
950 writel(seq->rwcfg->mr_calib, grpaddr);
951 writel(/*seq->rwcfg->*/0x0b, grpaddr); // EMR_OCD_ENABLE
952 writel(seq->rwcfg->emr, grpaddr);
953 delay_for_n_mem_clocks(seq, 200);
965 static void rw_mgr_mem_load_user_ddr3(struct socfpga_sdrseq *seq,
973 for (r = 0; r < seq->rwcfg->mem_number_of_ranks; r++) {
975 set_rank_and_odt_mask(seq, r, RW_MGR_ODT_MODE_OFF);
979 writel(seq->rwcfg->precharge_all, grpaddr);
985 if ((seq->rwcfg->mem_address_mirroring >> r) & 0x1) {
986 set_jump_as_return(seq);
987 writel(seq->rwcfg->mrs2_mirr, grpaddr);
988 delay_for_n_mem_clocks(seq, 4);
989 set_jump_as_return(seq);
990 writel(seq->rwcfg->mrs3_mirr, grpaddr);
991 delay_for_n_mem_clocks(seq, 4);
992 set_jump_as_return(seq);
993 writel(seq->rwcfg->mrs1_mirr, grpaddr);
994 delay_for_n_mem_clocks(seq, 4);
995 set_jump_as_return(seq);
998 set_jump_as_return(seq);
999 writel(seq->rwcfg->mrs2, grpaddr);
1000 delay_for_n_mem_clocks(seq, 4);
1001 set_jump_as_return(seq);
1002 writel(seq->rwcfg->mrs3, grpaddr);
1003 delay_for_n_mem_clocks(seq, 4);
1004 set_jump_as_return(seq);
1005 writel(seq->rwcfg->mrs1, grpaddr);
1006 set_jump_as_return(seq);
1013 set_jump_as_return(seq);
1014 writel(seq->rwcfg->zqcl, grpaddr);
1017 delay_for_n_mem_clocks(seq, 512);
1029 static void rw_mgr_mem_load_user(struct socfpga_sdrseq *seq,
1034 rw_mgr_mem_load_user_ddr2(seq, precharge);
1036 rw_mgr_mem_load_user_ddr3(seq, fin1, fin2, precharge);
1045 static void rw_mgr_mem_initialize(struct socfpga_sdrseq *seq)
1078 rw_mgr_mem_init_load_regs(seq, seq->misccfg->tinit_cntr0_val,
1079 seq->misccfg->tinit_cntr1_val,
1080 seq->misccfg->tinit_cntr2_val,
1081 seq->rwcfg->init_reset_0_cke_0);
1087 writel(seq->rwcfg->nop, SDR_PHYGRP_RWMGRGRP_ADDRESS |
1093 delay_for_n_ns(seq, 400);
1109 rw_mgr_mem_init_load_regs(seq, seq->misccfg->treset_cntr0_val,
1110 seq->misccfg->treset_cntr1_val,
1111 seq->misccfg->treset_cntr2_val,
1112 seq->rwcfg->init_reset_1_cke_0);
1116 delay_for_n_mem_clocks(seq, 250);
1119 rw_mgr_mem_load_user(seq, seq->rwcfg->mrs0_dll_reset_mirr,
1120 seq->rwcfg->mrs0_dll_reset, 0);
1129 static void rw_mgr_mem_handoff(struct socfpga_sdrseq *seq)
1131 rw_mgr_mem_load_user(seq, seq->rwcfg->mrs0_user_mirr,
1132 seq->rwcfg->mrs0_user, 1);
1148 static void rw_mgr_mem_calibrate_write_test_issue(struct socfpga_sdrseq *seq,
1153 seq->misccfg->enable_super_quick_calibration;
1183 rw_wl_nop_cycles = seq->gbl.rw_wl_nop_cycles;
1196 mcc_instruction = seq->rwcfg->lfsr_wr_rd_dm_bank_0_wl_1;
1197 writel(seq->rwcfg->lfsr_wr_rd_dm_bank_0_data,
1199 writel(seq->rwcfg->lfsr_wr_rd_dm_bank_0_nop,
1202 mcc_instruction = seq->rwcfg->lfsr_wr_rd_bank_0_wl_1;
1203 writel(seq->rwcfg->lfsr_wr_rd_bank_0_data,
1205 writel(seq->rwcfg->lfsr_wr_rd_bank_0_nop,
1218 mcc_instruction = seq->rwcfg->lfsr_wr_rd_dm_bank_0;
1219 writel(seq->rwcfg->lfsr_wr_rd_dm_bank_0_dqs,
1222 mcc_instruction = seq->rwcfg->lfsr_wr_rd_bank_0;
1223 writel(seq->rwcfg->lfsr_wr_rd_bank_0_dqs,
1241 mcc_instruction = seq->rwcfg->lfsr_wr_rd_dm_bank_0;
1242 writel(seq->rwcfg->lfsr_wr_rd_dm_bank_0_nop,
1245 mcc_instruction = seq->rwcfg->lfsr_wr_rd_bank_0;
1246 writel(seq->rwcfg->lfsr_wr_rd_bank_0_nop,
1268 writel(seq->rwcfg->lfsr_wr_rd_dm_bank_0_wait,
1271 writel(seq->rwcfg->lfsr_wr_rd_bank_0_wait,
1293 rw_mgr_mem_calibrate_write_test(struct socfpga_sdrseq *seq,
1299 seq->rwcfg->mem_number_of_ranks :
1301 const u32 shift_ratio = seq->rwcfg->mem_dq_per_write_dqs /
1302 seq->rwcfg->mem_virtual_groups_per_write_dqs;
1303 const u32 correct_mask_vg = seq->param.write_correct_mask_vg;
1308 *bit_chk = seq->param.write_correct_mask;
1312 set_rank_and_odt_mask(seq, r, RW_MGR_ODT_MODE_READ_WRITE);
1315 for (vg = seq->rwcfg->mem_virtual_groups_per_write_dqs - 1;
1321 seq->rwcfg->mem_virtual_groups_per_write_dqs
1323 rw_mgr_mem_calibrate_write_test_issue(seq, group,
1334 set_rank_and_odt_mask(seq, 0, RW_MGR_ODT_MODE_OFF);
1339 seq->param.write_correct_mask,
1340 *bit_chk == seq->param.write_correct_mask);
1341 return *bit_chk == seq->param.write_correct_mask;
1360 rw_mgr_mem_calibrate_read_test_patterns(struct socfpga_sdrseq *seq,
1367 (group * seq->rwcfg->mem_virtual_groups_per_read_dqs)
1370 seq->rwcfg->mem_number_of_ranks :
1372 const u32 shift_ratio = seq->rwcfg->mem_dq_per_read_dqs /
1373 seq->rwcfg->mem_virtual_groups_per_read_dqs;
1374 const u32 correct_mask_vg = seq->param.read_correct_mask_vg;
1380 bit_chk = seq->param.read_correct_mask;
1384 set_rank_and_odt_mask(seq, r, RW_MGR_ODT_MODE_READ_WRITE);
1388 writel(seq->rwcfg->guaranteed_read,
1392 writel(seq->rwcfg->guaranteed_read_cont,
1396 for (vg = seq->rwcfg->mem_virtual_groups_per_read_dqs - 1;
1402 writel(seq->rwcfg->guaranteed_read,
1413 writel(seq->rwcfg->clear_dqs_enable, addr + (group << 2));
1415 set_rank_and_odt_mask(seq, 0, RW_MGR_ODT_MODE_OFF);
1417 if (bit_chk != seq->param.read_correct_mask)
1423 seq->param.read_correct_mask, ret);
1436 static void rw_mgr_mem_calibrate_read_load_patterns(struct socfpga_sdrseq *seq,
1441 seq->rwcfg->mem_number_of_ranks :
1449 set_rank_and_odt_mask(seq, r, RW_MGR_ODT_MODE_READ_WRITE);
1454 writel(seq->rwcfg->guaranteed_write_wait0,
1459 writel(seq->rwcfg->guaranteed_write_wait1,
1464 writel(seq->rwcfg->guaranteed_write_wait2,
1469 writel(seq->rwcfg->guaranteed_write_wait3,
1472 writel(seq->rwcfg->guaranteed_write,
1477 set_rank_and_odt_mask(seq, 0, RW_MGR_ODT_MODE_OFF);
1495 rw_mgr_mem_calibrate_read_test(struct socfpga_sdrseq *seq,
1501 const u32 rank_end = all_ranks ? seq->rwcfg->mem_number_of_ranks :
1505 seq->misccfg->enable_super_quick_calibration);
1506 u32 correct_mask_vg = seq->param.read_correct_mask_vg;
1513 *bit_chk = seq->param.read_correct_mask;
1517 set_rank_and_odt_mask(seq, r, RW_MGR_ODT_MODE_READ_WRITE);
1521 writel(seq->rwcfg->read_b2b_wait1,
1525 writel(seq->rwcfg->read_b2b_wait2,
1536 writel(seq->rwcfg->read_b2b,
1539 writel(seq->rwcfg->mem_if_read_dqs_width *
1540 seq->rwcfg->mem_virtual_groups_per_read_dqs - 1,
1545 writel(seq->rwcfg->read_b2b,
1549 for (vg = seq->rwcfg->mem_virtual_groups_per_read_dqs - 1;
1564 writel(seq->rwcfg->read_b2b, addr +
1566 seq->rwcfg->mem_virtual_groups_per_read_dqs +
1571 seq->rwcfg->mem_dq_per_read_dqs /
1572 seq->rwcfg->mem_virtual_groups_per_read_dqs;
1580 writel(seq->rwcfg->clear_dqs_enable, addr + (group << 2));
1582 set_rank_and_odt_mask(seq, 0, RW_MGR_ODT_MODE_OFF);
1585 ret = (*bit_chk == seq->param.read_correct_mask);
1589 seq->param.read_correct_mask, ret);
1611 rw_mgr_mem_calibrate_read_test_all_ranks(struct socfpga_sdrseq *seq,
1617 return rw_mgr_mem_calibrate_read_test(seq, 0, grp, num_tries,
1639 static void rw_mgr_decr_vfifo(struct socfpga_sdrseq *seq, const u32 grp)
1643 for (i = 0; i < seq->misccfg->read_valid_fifo_size - 1; i++)
1653 static int find_vfifo_failing_read(struct socfpga_sdrseq *seq,
1658 for (v = 0; v < seq->misccfg->read_valid_fifo_size; v++) {
1661 ret = rw_mgr_mem_calibrate_read_test_all_ranks(seq, grp, 1,
1690 static int sdr_find_phase_delay(struct socfpga_sdrseq *seq, int working,
1694 const u32 max = delay ? seq->iocfg->dqs_en_delay_max :
1695 seq->iocfg->dqs_en_phase_max;
1700 scc_mgr_set_dqs_en_delay_all_ranks(seq, grp, *pd);
1702 scc_mgr_set_dqs_en_phase_all_ranks(seq, grp, *pd);
1704 ret = rw_mgr_mem_calibrate_read_test_all_ranks(seq, grp, 1,
1728 static int sdr_find_phase(struct socfpga_sdrseq *seq, int working,
1731 const u32 end = seq->misccfg->read_valid_fifo_size + (working ? 0 : 1);
1738 ret = sdr_find_phase_delay(seq, working, 0, grp, work,
1739 seq->iocfg->delay_per_opa_tap, p);
1743 if (*p > seq->iocfg->dqs_en_phase_max) {
1764 static int sdr_working_phase(struct socfpga_sdrseq *seq, const u32 grp,
1767 const u32 dtaps_per_ptap = seq->iocfg->delay_per_opa_tap /
1768 seq->iocfg->delay_per_dqs_en_dchain_tap;
1775 scc_mgr_set_dqs_en_delay_all_ranks(seq, grp, *d);
1776 ret = sdr_find_phase(seq, 1, grp, work_bgn, i, p);
1779 *work_bgn += seq->iocfg->delay_per_dqs_en_dchain_tap;
1796 static void sdr_backup_phase(struct socfpga_sdrseq *seq, const u32 grp,
1804 *p = seq->iocfg->dqs_en_phase_max;
1805 rw_mgr_decr_vfifo(seq, grp);
1809 tmp_delay = *work_bgn - seq->iocfg->delay_per_opa_tap;
1810 scc_mgr_set_dqs_en_phase_all_ranks(seq, grp, *p);
1812 for (d = 0; d <= seq->iocfg->dqs_en_delay_max && tmp_delay < *work_bgn;
1814 scc_mgr_set_dqs_en_delay_all_ranks(seq, grp, d);
1816 ret = rw_mgr_mem_calibrate_read_test_all_ranks(seq, grp, 1,
1823 tmp_delay += seq->iocfg->delay_per_dqs_en_dchain_tap;
1828 if (*p > seq->iocfg->dqs_en_phase_max) {
1833 scc_mgr_set_dqs_en_delay_all_ranks(seq, grp, 0);
1845 static int sdr_nonworking_phase(struct socfpga_sdrseq *seq,
1851 *work_end += seq->iocfg->delay_per_opa_tap;
1852 if (*p > seq->iocfg->dqs_en_phase_max) {
1858 ret = sdr_find_phase(seq, 0, grp, work_end, i, p);
1876 static int sdr_find_window_center(struct socfpga_sdrseq *seq,
1889 tmp_delay = (seq->iocfg->dqs_en_phase_max + 1)
1890 * seq->iocfg->delay_per_opa_tap;
1896 tmp_delay = rounddown(work_mid, seq->iocfg->delay_per_opa_tap);
1897 if (tmp_delay > seq->iocfg->dqs_en_phase_max
1898 * seq->iocfg->delay_per_opa_tap) {
1899 tmp_delay = seq->iocfg->dqs_en_phase_max
1900 * seq->iocfg->delay_per_opa_tap;
1902 p = tmp_delay / seq->iocfg->delay_per_opa_tap;
1907 seq->iocfg->delay_per_dqs_en_dchain_tap);
1908 if (d > seq->iocfg->dqs_en_delay_max)
1909 d = seq->iocfg->dqs_en_delay_max;
1910 tmp_delay += d * seq->iocfg->delay_per_dqs_en_dchain_tap;
1914 scc_mgr_set_dqs_en_phase_all_ranks(seq, grp, p);
1915 scc_mgr_set_dqs_en_delay_all_ranks(seq, grp, d);
1921 for (i = 0; i < seq->misccfg->read_valid_fifo_size; i++) {
1923 if (rw_mgr_mem_calibrate_read_test_all_ranks(seq, grp, 1,
1949 rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(struct socfpga_sdrseq *seq,
1962 scc_mgr_set_dqs_en_delay_all_ranks(seq, grp, 0);
1963 scc_mgr_set_dqs_en_phase_all_ranks(seq, grp, 0);
1966 dtaps_per_ptap = seq->iocfg->delay_per_opa_tap /
1967 seq->iocfg->delay_per_dqs_en_dchain_tap;
1970 find_vfifo_failing_read(seq, grp);
1974 ret = sdr_working_phase(seq, grp, &work_bgn, &d, &p, &i);
1990 sdr_backup_phase(seq, grp, &work_bgn, &p);
1996 ret = sdr_nonworking_phase(seq, grp, &work_end, &p, &i);
2004 p = seq->iocfg->dqs_en_phase_max;
2005 rw_mgr_decr_vfifo(seq, grp);
2010 work_end -= seq->iocfg->delay_per_opa_tap;
2011 scc_mgr_set_dqs_en_phase_all_ranks(seq, grp, p);
2020 sdr_find_phase_delay(seq, 0, 1, grp, &work_end,
2021 seq->iocfg->delay_per_dqs_en_dchain_tap, &d);
2025 work_end -= seq->iocfg->delay_per_dqs_en_dchain_tap;
2051 p = seq->iocfg->dqs_en_phase_max;
2052 rw_mgr_decr_vfifo(seq, grp);
2061 scc_mgr_set_dqs_en_phase_all_ranks(seq, grp, p);
2075 found_passing_read = !sdr_find_phase_delay(seq, 1, 1, grp, NULL, 0, &d);
2081 found_failing_read = !sdr_find_phase_delay(seq, 0, 1, grp, NULL,
2092 * (seq->iocfg->dqs_en_delay_max). Otherwise, dtaps_per_ptap retains its
2103 ret = sdr_find_window_center(seq, grp, work_bgn, work_end);
2121 static u32 search_stop_check(struct socfpga_sdrseq *seq, const int write,
2127 const u32 ratio = seq->rwcfg->mem_if_read_dqs_width /
2128 seq->rwcfg->mem_if_write_dqs_width;
2129 const u32 correct_mask = write ? seq->param.write_correct_mask :
2130 seq->param.read_correct_mask;
2131 const u32 per_dqs = write ? seq->rwcfg->mem_dq_per_write_dqs :
2132 seq->rwcfg->mem_dq_per_read_dqs;
2139 ret = !rw_mgr_mem_calibrate_write_test(seq, rank_bgn,
2144 ret = !rw_mgr_mem_calibrate_read_test(seq, rank_bgn, read_group,
2149 rw_mgr_mem_calibrate_write_test(seq, rank_bgn, write_group, 0,
2178 static void search_left_edge(struct socfpga_sdrseq *seq, const int write,
2184 const u32 delay_max = write ? seq->iocfg->io_out1_delay_max :
2185 seq->iocfg->io_in_delay_max;
2186 const u32 dqs_max = write ? seq->iocfg->io_out1_delay_max :
2187 seq->iocfg->dqs_in_delay_max;
2188 const u32 per_dqs = write ? seq->rwcfg->mem_dq_per_write_dqs :
2189 seq->rwcfg->mem_dq_per_read_dqs;
2195 scc_mgr_apply_group_dq_out1_delay(seq, d);
2197 scc_mgr_apply_group_dq_in_delay(seq, test_bgn, d);
2201 stop = search_stop_check(seq, write, d, rank_bgn, write_group,
2231 scc_mgr_apply_group_dq_out1_delay(seq, 0);
2233 scc_mgr_apply_group_dq_in_delay(seq, test_bgn, 0);
2289 static int search_right_edge(struct socfpga_sdrseq *seq, const int write,
2296 const u32 delay_max = write ? seq->iocfg->io_out1_delay_max :
2297 seq->iocfg->io_in_delay_max;
2298 const u32 dqs_max = write ? seq->iocfg->io_out1_delay_max :
2299 seq->iocfg->dqs_in_delay_max;
2300 const u32 per_dqs = write ? seq->rwcfg->mem_dq_per_write_dqs :
2301 seq->rwcfg->mem_dq_per_read_dqs;
2307 scc_mgr_apply_group_dqs_io_and_oct_out1(seq,
2312 if (seq->iocfg->shift_dqs_en_when_shift_dqs) {
2314 if (delay > seq->iocfg->dqs_en_delay_max)
2315 delay = seq->iocfg->dqs_en_delay_max;
2323 stop = search_stop_check(seq, write, d, rank_bgn, write_group,
2329 i < seq->rwcfg->mem_dq_per_write_dqs;
2417 static int get_window_mid_index(struct socfpga_sdrseq *seq,
2421 const u32 per_dqs = write ? seq->rwcfg->mem_dq_per_write_dqs :
2422 seq->rwcfg->mem_dq_per_read_dqs;
2465 static void center_dq_windows(struct socfpga_sdrseq *seq,
2471 const s32 delay_max = write ? seq->iocfg->io_out1_delay_max :
2472 seq->iocfg->io_in_delay_max;
2473 const s32 per_dqs = write ? seq->rwcfg->mem_dq_per_write_dqs :
2474 seq->rwcfg->mem_dq_per_read_dqs;
2541 static int rw_mgr_mem_calibrate_vfifo_center(struct socfpga_sdrseq *seq,
2556 s32 left_edge[seq->rwcfg->mem_dq_per_read_dqs];
2557 s32 right_edge[seq->rwcfg->mem_dq_per_read_dqs];
2567 if (seq->iocfg->shift_dqs_en_when_shift_dqs)
2568 start_dqs_en = readl(addr - seq->iocfg->dqs_en_delay_offset);
2571 /* use (seq->iocfg->io_in_delay_max + 1) as an illegal value */
2573 for (i = 0; i < seq->rwcfg->mem_dq_per_read_dqs; i++) {
2574 left_edge[i] = seq->iocfg->io_in_delay_max + 1;
2575 right_edge[i] = seq->iocfg->io_in_delay_max + 1;
2579 search_left_edge(seq, 0, rank_bgn, rw_group, rw_group, test_bgn,
2585 ret = search_right_edge(seq, 0, rank_bgn, rw_group, rw_group,
2596 if (seq->iocfg->shift_dqs_en_when_shift_dqs)
2606 set_failing_group_stage(seq, rw_group *
2607 seq->rwcfg->mem_dq_per_read_dqs + i,
2611 set_failing_group_stage(seq, rw_group *
2612 seq->rwcfg->mem_dq_per_read_dqs + i,
2619 min_index = get_window_mid_index(seq, 0, left_edge, right_edge,
2625 if (new_dqs > seq->iocfg->dqs_in_delay_max)
2626 new_dqs = seq->iocfg->dqs_in_delay_max;
2634 if (seq->iocfg->shift_dqs_en_when_shift_dqs) {
2635 if (start_dqs_en - mid_min > seq->iocfg->dqs_en_delay_max)
2637 seq->iocfg->dqs_en_delay_max;
2646 seq->iocfg->shift_dqs_en_when_shift_dqs ? start_dqs_en : -1,
2650 center_dq_windows(seq, 0, left_edge, right_edge, mid_min, orig_mid_min,
2654 if (seq->iocfg->shift_dqs_en_when_shift_dqs) {
2689 static int rw_mgr_mem_calibrate_guaranteed_write(struct socfpga_sdrseq *seq,
2696 scc_mgr_set_dqdqs_output_phase_all_ranks(seq, rw_group, phase);
2706 rw_mgr_mem_calibrate_read_load_patterns(seq, 0, 1);
2708 if (seq->gbl.phy_debug_mode_flags & PHY_DEBUG_DISABLE_GUARANTEED_READ)
2715 ret = rw_mgr_mem_calibrate_read_test_patterns(seq, 0, rw_group, 1);
2732 rw_mgr_mem_calibrate_dqs_enable_calibration(struct socfpga_sdrseq *seq,
2742 const u32 delay_step = seq->iocfg->io_in_delay_max /
2743 (seq->rwcfg->mem_dq_per_read_dqs - 1);
2750 for (r = 0; r < seq->rwcfg->mem_number_of_ranks;
2753 i < seq->rwcfg->mem_dq_per_read_dqs;
2770 ret = rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(seq, rw_group);
2776 for (r = 0; r < seq->rwcfg->mem_number_of_ranks;
2778 scc_mgr_apply_group_dq_in_delay(seq, test_bgn, 0);
2796 rw_mgr_mem_calibrate_dq_dqs_centering(struct socfpga_sdrseq *seq,
2811 rank_bgn < seq->rwcfg->mem_number_of_ranks;
2813 ret = rw_mgr_mem_calibrate_vfifo_center(seq, rank_bgn, rw_group,
2844 static int rw_mgr_mem_calibrate_vfifo(struct socfpga_sdrseq *seq,
2863 dtaps_per_ptap = DIV_ROUND_UP(seq->iocfg->delay_per_opa_tap,
2864 seq->iocfg->delay_per_dqs_en_dchain_tap)
2875 scc_mgr_apply_group_all_out_delay_add_all_ranks(seq,
2880 for (p = 0; p <= seq->iocfg->dqdqs_out_phase_max; p++) {
2882 ret = rw_mgr_mem_calibrate_guaranteed_write(seq,
2889 ret = rw_mgr_mem_calibrate_dqs_enable_calibration(seq,
2902 ret = rw_mgr_mem_calibrate_dq_dqs_centering(seq,
2917 set_failing_group_stage(seq, rw_group, CAL_STAGE_VFIFO,
2929 scc_mgr_zero_group(seq, rw_group, 1);
2944 static int rw_mgr_mem_calibrate_vfifo_end(struct socfpga_sdrseq *seq,
2957 ret = rw_mgr_mem_calibrate_dq_dqs_centering(seq, rw_group, test_bgn, 0,
2960 set_failing_group_stage(seq, rw_group,
2975 static u32 rw_mgr_mem_calibrate_lfifo(struct socfpga_sdrseq *seq)
2986 rw_mgr_mem_calibrate_read_load_patterns(seq, 0, 1);
2989 writel(seq->gbl.curr_read_lat, &phy_mgr_cfg->phy_rlat);
2991 __func__, __LINE__, seq->gbl.curr_read_lat);
2993 if (!rw_mgr_mem_calibrate_read_test_all_ranks(seq, 0,
3003 seq->gbl.curr_read_lat--;
3004 } while (seq->gbl.curr_read_lat > 0);
3011 seq->gbl.curr_read_lat += 2;
3012 writel(seq->gbl.curr_read_lat, &phy_mgr_cfg->phy_rlat);
3015 __func__, __LINE__, seq->gbl.curr_read_lat);
3017 set_failing_group_stage(seq, 0xff, CAL_STAGE_LFIFO,
3022 __func__, __LINE__, seq->gbl.curr_read_lat);
3043 static void search_window(struct socfpga_sdrseq *seq,
3050 const int max = seq->iocfg->io_out1_delay_max - new_dqs;
3057 scc_mgr_apply_group_dm_out1_delay(seq, d);
3065 scc_mgr_apply_group_dqs_io_and_oct_out1(seq,
3072 if (rw_mgr_mem_calibrate_write_test(seq, rank_bgn, write_group,
3082 if (*bgn_curr == seq->iocfg->io_out1_delay_max + 1)
3096 *bgn_curr = seq->iocfg->io_out1_delay_max + 1;
3097 *end_curr = seq->iocfg->io_out1_delay_max + 1;
3108 if (*win_best - 1 > seq->iocfg->io_out1_delay_max
3125 rw_mgr_mem_calibrate_writes_center(struct socfpga_sdrseq *seq,
3132 int left_edge[seq->rwcfg->mem_dq_per_write_dqs];
3133 int right_edge[seq->rwcfg->mem_dq_per_write_dqs];
3138 int bgn_curr = seq->iocfg->io_out1_delay_max + 1;
3139 int end_curr = seq->iocfg->io_out1_delay_max + 1;
3140 int bgn_best = seq->iocfg->io_out1_delay_max + 1;
3141 int end_best = seq->iocfg->io_out1_delay_max + 1;
3152 (seq->rwcfg->mem_dq_per_write_dqs << 2));
3158 * Use (seq->iocfg->io_out1_delay_max + 1) as an illegal value.
3161 for (i = 0; i < seq->rwcfg->mem_dq_per_write_dqs; i++) {
3162 left_edge[i] = seq->iocfg->io_out1_delay_max + 1;
3163 right_edge[i] = seq->iocfg->io_out1_delay_max + 1;
3167 search_left_edge(seq, 1, rank_bgn, write_group, 0, test_bgn,
3172 ret = search_right_edge(seq, 1, rank_bgn, write_group, 0,
3177 set_failing_group_stage(seq, test_bgn + ret - 1,
3183 min_index = get_window_mid_index(seq, 1, left_edge, right_edge,
3195 center_dq_windows(seq, 1, left_edge, right_edge, mid_min, orig_mid_min,
3199 scc_mgr_apply_group_dqs_io_and_oct_out1(seq, write_group, new_dqs);
3206 search_window(seq, 1, rank_bgn, write_group, &bgn_curr, &end_curr,
3210 scc_mgr_apply_group_dm_out1_delay(seq, 0);
3218 bgn_curr = seq->iocfg->io_out1_delay_max + 1;
3219 end_curr = seq->iocfg->io_out1_delay_max + 1;
3223 search_window(seq, 0, rank_bgn, write_group, &bgn_curr, &end_curr,
3234 scc_mgr_apply_group_dqs_io_and_oct_out1(seq, write_group, new_dqs);
3251 scc_mgr_apply_group_dm_out1_delay(seq, mid);
3259 seq->gbl.fom_out += dq_margin + dqs_margin;
3288 static int rw_mgr_mem_calibrate_writes(struct socfpga_sdrseq *seq,
3301 ret = rw_mgr_mem_calibrate_writes_center(seq, rank_bgn, group,
3304 set_failing_group_stage(seq, group, CAL_STAGE_WRITES,
3315 static void mem_precharge_and_activate(struct socfpga_sdrseq *seq)
3319 for (r = 0; r < seq->rwcfg->mem_number_of_ranks; r++) {
3321 set_rank_and_odt_mask(seq, r, RW_MGR_ODT_MODE_OFF);
3324 writel(seq->rwcfg->precharge_all, SDR_PHYGRP_RWMGRGRP_ADDRESS |
3328 writel(seq->rwcfg->activate_0_and_1_wait1,
3332 writel(seq->rwcfg->activate_0_and_1_wait2,
3336 writel(seq->rwcfg->activate_0_and_1,
3347 static void mem_init_latency(struct socfpga_sdrseq *seq)
3354 const u32 max_latency = (1 << seq->misccfg->max_latency_count_width)
3367 seq->gbl.rw_wl_nop_cycles = wlat - 1;
3373 seq->gbl.curr_read_lat = rlat + 16;
3374 if (seq->gbl.curr_read_lat > max_latency)
3375 seq->gbl.curr_read_lat = max_latency;
3377 writel(seq->gbl.curr_read_lat, &phy_mgr_cfg->phy_rlat);
3388 static void mem_skip_calibrate(struct socfpga_sdrseq *seq)
3395 for (r = 0; r < seq->rwcfg->mem_number_of_ranks;
3401 for (i = 0; i < seq->rwcfg->mem_if_read_dqs_width; i++) {
3403 if (seq->iocfg->dll_chain_length == 6)
3426 * (360 / seq->iocfg->dll_chain_length)
3429 (360 / seq->iocfg->dll_chain_length)
3432 * (1.25 * seq->iocfg->dll_chain_length - 2)
3435 ((125 * seq->iocfg->dll_chain_length)
3441 for (i = 0; i < seq->rwcfg->mem_if_write_dqs_width; i++) {
3451 for (i = 0; i < seq->rwcfg->mem_if_read_dqs_width; i++) {
3461 vfifo_offset = seq->misccfg->calib_vfifo_offset;
3470 seq->gbl.curr_read_lat = seq->misccfg->calib_lfifo_offset;
3471 writel(seq->gbl.curr_read_lat, &phy_mgr_cfg->phy_rlat);
3479 static u32 mem_calibrate(struct socfpga_sdrseq *seq)
3489 const u32 rwdqs_ratio = seq->rwcfg->mem_if_read_dqs_width /
3490 seq->rwcfg->mem_if_write_dqs_width;
3495 seq->gbl.error_substage = CAL_SUBSTAGE_NIL;
3496 seq->gbl.error_stage = CAL_STAGE_NIL;
3497 seq->gbl.error_group = 0xff;
3498 seq->gbl.fom_in = 0;
3499 seq->gbl.fom_out = 0;
3502 mem_init_latency(seq);
3505 mem_precharge_and_activate(seq);
3507 for (i = 0; i < seq->rwcfg->mem_if_read_dqs_width; i++) {
3518 if ((seq->dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL) {
3523 mem_skip_calibrate(seq);
3539 scc_mgr_zero_all(seq);
3544 < seq->rwcfg->mem_if_write_dqs_width; write_group++,
3545 write_test_bgn += seq->rwcfg->mem_dq_per_write_dqs) {
3559 scc_mgr_zero_group(seq, write_group, 0);
3565 read_test_bgn += seq->rwcfg->mem_dq_per_read_dqs) {
3570 if (rw_mgr_mem_calibrate_vfifo(seq, read_group,
3574 if (!(seq->gbl.phy_debug_mode_flags &
3584 rank_bgn < seq->rwcfg->mem_number_of_ranks;
3595 if (!rw_mgr_mem_calibrate_writes(seq, rank_bgn,
3601 if (!(seq->gbl.phy_debug_mode_flags &
3614 read_test_bgn += seq->rwcfg->mem_dq_per_read_dqs) {
3618 if (!rw_mgr_mem_calibrate_vfifo_end(seq,
3623 if (!(seq->gbl.phy_debug_mode_flags &
3649 if (!rw_mgr_mem_calibrate_lfifo(seq))
3666 static int run_mem_calibrate(struct socfpga_sdrseq *seq)
3681 phy_mgr_initialize(seq);
3682 rw_mgr_mem_initialize(seq);
3685 pass = mem_calibrate(seq);
3687 mem_precharge_and_activate(seq);
3691 rw_mgr_mem_handoff(seq);
3712 static void debug_mem_calibrate(struct socfpga_sdrseq *seq, int pass)
3719 seq->gbl.fom_in /= 2;
3720 seq->gbl.fom_out /= 2;
3722 if (seq->gbl.fom_in > 0xff)
3723 seq->gbl.fom_in = 0xff;
3725 if (seq->gbl.fom_out > 0xff)
3726 seq->gbl.fom_out = 0xff;
3729 debug_info = seq->gbl.fom_in;
3730 debug_info |= seq->gbl.fom_out << 8;
3738 debug_info = seq->gbl.error_stage;
3739 debug_info |= seq->gbl.error_substage << 8;
3740 debug_info |= seq->gbl.error_group << 16;
3747 debug_info = seq->gbl.error_stage;
3748 debug_info |= seq->gbl.error_substage << 8;
3749 debug_info |= seq->gbl.error_group << 16;
3783 static void initialize_reg_file(struct socfpga_sdrseq *seq)
3786 writel(seq->misccfg->reg_file_init_seq_signature,
3851 static void initialize_tracking(struct socfpga_sdrseq *seq)
3858 writel(DIV_ROUND_UP(seq->iocfg->delay_per_opa_tap,
3859 seq->iocfg->delay_per_dchain_tap) - 1,
3881 writel((seq->rwcfg->idle << 24) |
3882 (seq->rwcfg->activate_1 << 16) |
3883 (seq->rwcfg->sgle_read << 8) |
3884 (seq->rwcfg->precharge_all << 0),
3888 writel(seq->rwcfg->mem_if_read_dqs_width,
3895 writel((seq->rwcfg->refresh_all << 24) | (1000 << 0),
3903 struct socfpga_sdrseq seq;
3912 memset(&seq, 0, sizeof(seq));
3914 seq.rwcfg = socfpga_get_sdram_rwmgr_config();
3915 seq.iocfg = socfpga_get_sdram_io_config();
3916 seq.misccfg = socfpga_get_sdram_misc_config();
3919 seq.gbl.phy_debug_mode_flags |= PHY_DEBUG_ENABLE_CAL_RPT;
3925 seq.gbl.phy_debug_mode_flags |= PHY_DEBUG_DISABLE_GUARANTEED_READ;
3928 initialize_reg_file(&seq);
3935 initialize_tracking(&seq);
3942 seq.rwcfg->mem_number_of_ranks,
3943 seq.rwcfg->mem_number_of_cs_per_dimm,
3944 seq.rwcfg->mem_dq_per_read_dqs,
3945 seq.rwcfg->mem_dq_per_write_dqs,
3946 seq.rwcfg->mem_virtual_groups_per_read_dqs,
3947 seq.rwcfg->mem_virtual_groups_per_write_dqs);
3950 seq.rwcfg->mem_if_read_dqs_width,
3951 seq.rwcfg->mem_if_write_dqs_width,
3952 seq.rwcfg->mem_data_width, seq.rwcfg->mem_data_mask_width,
3953 seq.iocfg->delay_per_opa_tap,
3954 seq.iocfg->delay_per_dchain_tap);
3956 seq.iocfg->delay_per_dqs_en_dchain_tap,
3957 seq.iocfg->dll_chain_length);
3960 seq.iocfg->dqs_en_phase_max, seq.iocfg->dqdqs_out_phase_max,
3961 seq.iocfg->dqs_en_delay_max, seq.iocfg->dqs_in_delay_max);
3963 seq.iocfg->io_in_delay_max, seq.iocfg->io_out1_delay_max,
3964 seq.iocfg->io_out2_delay_max);
3966 seq.iocfg->dqs_in_reserve, seq.iocfg->dqs_out_reserve);
3978 seq.dyn_calib_steps = STATIC_CALIB_STEPS;
3983 if (!(seq.dyn_calib_steps & CALIB_SKIP_DELAY_LOOPS))
3984 seq.skip_delay_mask = 0xff;
3986 seq.skip_delay_mask = 0x0;
3988 pass = run_mem_calibrate(&seq);
3989 debug_mem_calibrate(&seq, pass);