1// SPDX-License-Identifier: GPL-2.0 2/* 3 * Copyright (C) Marvell International Ltd. and its affiliates 4 */ 5 6#include "ddr3_init.h" 7#include "mv_ddr_training_db.h" 8#include "ddr_training_ip_db.h" 9#include "mv_ddr_regs.h" 10 11#define WL_ITERATION_NUM 10 12 13static u32 pup_mask_table[] = { 14 0x000000ff, 15 0x0000ff00, 16 0x00ff0000, 17 0xff000000 18}; 19 20static struct write_supp_result wr_supp_res[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 21 22static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num); 23static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num); 24static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num); 25static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id, 26 u32 bus_id); 27static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id, 28 u32 edge_offset); 29 30enum { 31 PASS, 32 FAIL 33}; 34/***************************************************************************** 35Dynamic read leveling 36******************************************************************************/ 37int ddr3_tip_dynamic_read_leveling(u32 dev_num, u32 freq) 38{ 39 u32 data, mask; 40 unsigned int max_cs = mv_ddr_cs_num_get(); 41 u32 bus_num, if_id, cl_val; 42 enum mv_ddr_speed_bin speed_bin_index; 43 /* save current CS value */ 44 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 }; 45 int is_any_pup_fail = 0; 46 u32 data_read[MAX_INTERFACE_NUM + 1] = { 0 }; 47 u8 rl_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM]; 48 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 49 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 50 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 51 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 52 53 for (effective_cs = 0; effective_cs < MAX_CS_NUM; effective_cs++) 54 for (bus_num = 0; bus_num < MAX_BUS_NUM; bus_num++) 55 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) 56 rl_values[effective_cs][bus_num][if_id] = 0; 57 58 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 59 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 60 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 61 training_result[training_stage][if_id] = TEST_SUCCESS; 62 63 /* save current cs enable reg val */ 64 CHECK_STATUS(ddr3_tip_if_read 65 (dev_num, ACCESS_TYPE_UNICAST, if_id, 66 DUAL_DUNIT_CFG_REG, cs_enable_reg_val, 67 MASK_ALL_BITS)); 68 /* enable single cs */ 69 CHECK_STATUS(ddr3_tip_if_write 70 (dev_num, ACCESS_TYPE_UNICAST, if_id, 71 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3))); 72 } 73 74 ddr3_tip_reset_fifo_ptr(dev_num); 75 76 /* 77 * Phase 1: Load pattern (using ODPG) 78 * 79 * enter Read Leveling mode 80 * only 27 bits are masked 81 * assuming non multi-CS configuration 82 * write to CS = 0 for the non multi CS configuration, note 83 * that the results shall be read back to the required CS !!! 84 */ 85 86 /* BUS count is 0 shifted 26 */ 87 CHECK_STATUS(ddr3_tip_if_write 88 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 89 ODPG_DATA_CTRL_REG, 0x3, 0x3)); 90 CHECK_STATUS(ddr3_tip_configure_odpg 91 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0, 92 pattern_table[PATTERN_RL].num_of_phases_tx, 0, 93 pattern_table[PATTERN_RL].num_of_phases_rx, 0, 0, 94 effective_cs, STRESS_NONE, DURATION_SINGLE)); 95 96 /* load pattern to ODPG */ 97 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST, 98 PARAM_NOT_CARE, PATTERN_RL, 99 pattern_table[PATTERN_RL]. 100 start_addr); 101 102 /* 103 * Phase 2: ODPG to Read Leveling mode 104 */ 105 106 /* General Training Opcode register */ 107 CHECK_STATUS(ddr3_tip_if_write 108 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 109 ODPG_WR_RD_MODE_ENA_REG, 0, 110 MASK_ALL_BITS)); 111 112 CHECK_STATUS(ddr3_tip_if_write 113 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 114 GENERAL_TRAINING_OPCODE_REG, 115 (0x301b01 | effective_cs << 2), 0x3c3fef)); 116 117 /* Object1 opcode register 0 & 1 */ 118 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 119 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 120 speed_bin_index = 121 tm->interface_params[if_id].speed_bin_index; 122 cl_val = mv_ddr_cl_val_get(speed_bin_index, freq); 123 data = (cl_val << 17) | (0x3 << 25); 124 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25); 125 CHECK_STATUS(ddr3_tip_if_write 126 (dev_num, ACCESS_TYPE_UNICAST, if_id, 127 OPCODE_REG0_REG(1), data, mask)); 128 } 129 130 /* Set iteration count to max value */ 131 CHECK_STATUS(ddr3_tip_if_write 132 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 133 OPCODE_REG1_REG(1), 0xd00, 0xd00)); 134 135 /* 136 * Phase 2: Mask config 137 */ 138 139 ddr3_tip_dynamic_read_leveling_seq(dev_num); 140 141 /* 142 * Phase 3: Read Leveling execution 143 */ 144 145 /* temporary jira dunit=14751 */ 146 CHECK_STATUS(ddr3_tip_if_write 147 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 148 TRAINING_DBG_1_REG, 0, (u32)(1 << 31))); 149 /* configure phy reset value */ 150 CHECK_STATUS(ddr3_tip_if_write 151 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 152 TRAINING_DBG_3_REG, (0x7f << 24), 153 (u32)(0xff << 24))); 154 /* data pup rd reset enable */ 155 CHECK_STATUS(ddr3_tip_if_write 156 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 157 SDRAM_CFG_REG, 0, (1 << 30))); 158 /* data pup rd reset disable */ 159 CHECK_STATUS(ddr3_tip_if_write 160 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 161 SDRAM_CFG_REG, (1 << 30), (1 << 30))); 162 /* training SW override & training RL mode */ 163 CHECK_STATUS(ddr3_tip_if_write 164 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 165 TRAINING_SW_2_REG, 0x1, 0x9)); 166 /* training enable */ 167 CHECK_STATUS(ddr3_tip_if_write 168 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 169 TRAINING_REG, (1 << 24) | (1 << 20), 170 (1 << 24) | (1 << 20))); 171 CHECK_STATUS(ddr3_tip_if_write 172 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 173 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31))); 174 175 /* trigger training */ 176 mv_ddr_training_enable(); 177 178 /* check for training done */ 179 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) { 180 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("training done failed\n")); 181 return MV_FAIL; 182 } 183 /* check for training pass */ 184 if (data != PASS) 185 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("training result failed\n")); 186 187 /* disable odpg; switch back to functional mode */ 188 mv_ddr_odpg_disable(); 189 190 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK) { 191 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("odpg disable failed\n")); 192 return MV_FAIL; 193 } 194 195 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 196 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS); 197 198 /* double loop on bus, pup */ 199 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 200 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 201 /* check training done */ 202 is_any_pup_fail = 0; 203 for (bus_num = 0; 204 bus_num < octets_per_if_num; 205 bus_num++) { 206 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 207 if (ddr3_tip_if_polling 208 (dev_num, ACCESS_TYPE_UNICAST, 209 if_id, (1 << 25), (1 << 25), 210 mask_results_pup_reg_map[bus_num], 211 MAX_POLLING_ITERATIONS) != MV_OK) { 212 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 213 ("\n_r_l: DDR3 poll failed(2) for IF %d CS %d bus %d", 214 if_id, effective_cs, bus_num)); 215 is_any_pup_fail = 1; 216 } else { 217 /* read result per pup */ 218 CHECK_STATUS(ddr3_tip_if_read 219 (dev_num, 220 ACCESS_TYPE_UNICAST, 221 if_id, 222 mask_results_pup_reg_map 223 [bus_num], data_read, 224 0xff)); 225 rl_values[effective_cs][bus_num] 226 [if_id] = (u8)data_read[if_id]; 227 } 228 } 229 230 if (is_any_pup_fail == 1) { 231 training_result[training_stage][if_id] = 232 TEST_FAILED; 233 if (debug_mode == 0) 234 return MV_FAIL; 235 } 236 } 237 238 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n")); 239 240 /* 241 * Phase 3: Exit Read Leveling 242 */ 243 244 CHECK_STATUS(ddr3_tip_if_write 245 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 246 TRAINING_SW_2_REG, (1 << 3), (1 << 3))); 247 CHECK_STATUS(ddr3_tip_if_write 248 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 249 TRAINING_SW_1_REG, (1 << 16), (1 << 16))); 250 /* set ODPG to functional */ 251 CHECK_STATUS(ddr3_tip_if_write 252 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 253 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 254 255 /* 256 * Copy the result from the effective CS search to the 257 * real Functional CS 258 */ 259 /*ddr3_tip_write_cs_result(dev_num, RL_PHY_REG(0); */ 260 CHECK_STATUS(ddr3_tip_if_write 261 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 262 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 263 } 264 265 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 266 /* double loop on bus, pup */ 267 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 268 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 269 for (bus_num = 0; 270 bus_num < octets_per_if_num; 271 bus_num++) { 272 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 273 /* read result per pup from arry */ 274 data = rl_values[effective_cs][bus_num][if_id]; 275 data = (data & 0x1f) | 276 (((data & 0xe0) >> 5) << 6); 277 ddr3_tip_bus_write(dev_num, 278 ACCESS_TYPE_UNICAST, 279 if_id, 280 ACCESS_TYPE_UNICAST, 281 bus_num, DDR_PHY_DATA, 282 RL_PHY_REG(effective_cs), 283 data); 284 } 285 } 286 } 287 /* Set to 0 after each loop to avoid illegal value may be used */ 288 effective_cs = 0; 289 290 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 291 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 292 /* restore cs enable value */ 293 CHECK_STATUS(ddr3_tip_if_write 294 (dev_num, ACCESS_TYPE_UNICAST, if_id, 295 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 296 MASK_ALL_BITS)); 297 if (odt_config != 0) { 298 CHECK_STATUS(ddr3_tip_write_additional_odt_setting 299 (dev_num, if_id)); 300 } 301 } 302 303 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 304 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 305 if (training_result[training_stage][if_id] == TEST_FAILED) 306 return MV_FAIL; 307 } 308 309 return MV_OK; 310} 311 312/* 313 * Legacy Dynamic write leveling 314 */ 315int ddr3_tip_legacy_dynamic_write_leveling(u32 dev_num) 316{ 317 u32 c_cs, if_id, cs_mask = 0; 318 unsigned int max_cs = mv_ddr_cs_num_get(); 319 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 320 321 /* 322 * In TRAINIUNG reg (0x15b0) write 0x80000008 | cs_mask: 323 * Trn_start 324 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training 325 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training 326 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training 327 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training 328 * Trn_auto_seq = write leveling 329 */ 330 for (c_cs = 0; c_cs < max_cs; c_cs++) 331 cs_mask = cs_mask | 1 << (20 + c_cs); 332 333 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 334 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 335 CHECK_STATUS(ddr3_tip_if_write 336 (dev_num, ACCESS_TYPE_MULTICAST, 0, 337 TRAINING_REG, (0x80000008 | cs_mask), 338 0xffffffff)); 339 mdelay(20); 340 if (ddr3_tip_if_polling 341 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 342 (u32)0x80000000, TRAINING_REG, 343 MAX_POLLING_ITERATIONS) != MV_OK) { 344 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 345 ("polling failed for Old WL result\n")); 346 return MV_FAIL; 347 } 348 } 349 350 return MV_OK; 351} 352 353/* 354 * Legacy Dynamic read leveling 355 */ 356int ddr3_tip_legacy_dynamic_read_leveling(u32 dev_num) 357{ 358 u32 c_cs, if_id, cs_mask = 0; 359 unsigned int max_cs = mv_ddr_cs_num_get(); 360 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 361 362 /* 363 * In TRAINIUNG reg (0x15b0) write 0x80000040 | cs_mask: 364 * Trn_start 365 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training 366 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training 367 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training 368 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training 369 * Trn_auto_seq = Read Leveling using training pattern 370 */ 371 for (c_cs = 0; c_cs < max_cs; c_cs++) 372 cs_mask = cs_mask | 1 << (20 + c_cs); 373 374 CHECK_STATUS(ddr3_tip_if_write 375 (dev_num, ACCESS_TYPE_MULTICAST, 0, TRAINING_REG, 376 (0x80000040 | cs_mask), 0xffffffff)); 377 mdelay(100); 378 379 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 380 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 381 if (ddr3_tip_if_polling 382 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 383 (u32)0x80000000, TRAINING_REG, 384 MAX_POLLING_ITERATIONS) != MV_OK) { 385 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 386 ("polling failed for Old RL result\n")); 387 return MV_FAIL; 388 } 389 } 390 391 return MV_OK; 392} 393 394/* 395 * Dynamic per bit read leveling 396 */ 397int ddr3_tip_dynamic_per_bit_read_leveling(u32 dev_num, u32 freq) 398{ 399 u32 data, mask; 400 u32 bus_num, if_id, cl_val, bit_num; 401 u32 curr_numb, curr_min_delay; 402 int adll_array[3] = { 0, -0xa, 0x14 }; 403 u32 phyreg3_arr[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 404 enum mv_ddr_speed_bin speed_bin_index; 405 int is_any_pup_fail = 0; 406 int break_loop = 0; 407 u32 cs_enable_reg_val[MAX_INTERFACE_NUM]; /* save current CS value */ 408 u32 data_read[MAX_INTERFACE_NUM]; 409 int per_bit_rl_pup_status[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 410 u32 data2_write[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 411 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 412 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 413 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 414 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 415 416 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 417 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 418 for (bus_num = 0; 419 bus_num <= octets_per_if_num; bus_num++) { 420 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 421 per_bit_rl_pup_status[if_id][bus_num] = 0; 422 data2_write[if_id][bus_num] = 0; 423 /* read current value of phy register 0x3 */ 424 CHECK_STATUS(ddr3_tip_bus_read 425 (dev_num, if_id, ACCESS_TYPE_UNICAST, 426 bus_num, DDR_PHY_DATA, 427 CRX_PHY_REG(0), 428 &phyreg3_arr[if_id][bus_num])); 429 } 430 } 431 432 /* NEW RL machine */ 433 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 434 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 435 training_result[training_stage][if_id] = TEST_SUCCESS; 436 437 /* save current cs enable reg val */ 438 CHECK_STATUS(ddr3_tip_if_read 439 (dev_num, ACCESS_TYPE_UNICAST, if_id, 440 DUAL_DUNIT_CFG_REG, &cs_enable_reg_val[if_id], 441 MASK_ALL_BITS)); 442 /* enable single cs */ 443 CHECK_STATUS(ddr3_tip_if_write 444 (dev_num, ACCESS_TYPE_UNICAST, if_id, 445 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3))); 446 } 447 448 ddr3_tip_reset_fifo_ptr(dev_num); 449 for (curr_numb = 0; curr_numb < 3; curr_numb++) { 450 /* 451 * Phase 1: Load pattern (using ODPG) 452 * 453 * enter Read Leveling mode 454 * only 27 bits are masked 455 * assuming non multi-CS configuration 456 * write to CS = 0 for the non multi CS configuration, note that 457 * the results shall be read back to the required CS !!! 458 */ 459 460 /* BUS count is 0 shifted 26 */ 461 CHECK_STATUS(ddr3_tip_if_write 462 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 463 ODPG_DATA_CTRL_REG, 0x3, 0x3)); 464 CHECK_STATUS(ddr3_tip_configure_odpg 465 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0, 466 pattern_table[PATTERN_TEST].num_of_phases_tx, 0, 467 pattern_table[PATTERN_TEST].num_of_phases_rx, 0, 468 0, 0, STRESS_NONE, DURATION_SINGLE)); 469 470 /* load pattern to ODPG */ 471 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST, 472 PARAM_NOT_CARE, PATTERN_TEST, 473 pattern_table[PATTERN_TEST]. 474 start_addr); 475 476 /* 477 * Phase 2: ODPG to Read Leveling mode 478 */ 479 480 /* General Training Opcode register */ 481 CHECK_STATUS(ddr3_tip_if_write 482 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 483 ODPG_WR_RD_MODE_ENA_REG, 0, 484 MASK_ALL_BITS)); 485 CHECK_STATUS(ddr3_tip_if_write 486 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 487 GENERAL_TRAINING_OPCODE_REG, 0x301b01, 0x3c3fef)); 488 489 /* Object1 opcode register 0 & 1 */ 490 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 491 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 492 speed_bin_index = 493 tm->interface_params[if_id].speed_bin_index; 494 cl_val = mv_ddr_cl_val_get(speed_bin_index, freq); 495 data = (cl_val << 17) | (0x3 << 25); 496 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25); 497 CHECK_STATUS(ddr3_tip_if_write 498 (dev_num, ACCESS_TYPE_UNICAST, if_id, 499 OPCODE_REG0_REG(1), data, mask)); 500 } 501 502 /* Set iteration count to max value */ 503 CHECK_STATUS(ddr3_tip_if_write 504 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 505 OPCODE_REG1_REG(1), 0xd00, 0xd00)); 506 507 /* 508 * Phase 2: Mask config 509 */ 510 511 ddr3_tip_dynamic_per_bit_read_leveling_seq(dev_num); 512 513 /* 514 * Phase 3: Read Leveling execution 515 */ 516 517 /* temporary jira dunit=14751 */ 518 CHECK_STATUS(ddr3_tip_if_write 519 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 520 TRAINING_DBG_1_REG, 0, (u32)(1 << 31))); 521 /* configure phy reset value */ 522 CHECK_STATUS(ddr3_tip_if_write 523 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 524 TRAINING_DBG_3_REG, (0x7f << 24), 525 (u32)(0xff << 24))); 526 /* data pup rd reset enable */ 527 CHECK_STATUS(ddr3_tip_if_write 528 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 529 SDRAM_CFG_REG, 0, (1 << 30))); 530 /* data pup rd reset disable */ 531 CHECK_STATUS(ddr3_tip_if_write 532 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 533 SDRAM_CFG_REG, (1 << 30), (1 << 30))); 534 /* training SW override & training RL mode */ 535 CHECK_STATUS(ddr3_tip_if_write 536 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 537 TRAINING_SW_2_REG, 0x1, 0x9)); 538 /* training enable */ 539 CHECK_STATUS(ddr3_tip_if_write 540 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 541 TRAINING_REG, (1 << 24) | (1 << 20), 542 (1 << 24) | (1 << 20))); 543 CHECK_STATUS(ddr3_tip_if_write 544 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 545 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31))); 546 547 /* trigger training */ 548 mv_ddr_training_enable(); 549 550 /* check for training done */ 551 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) { 552 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("training done failed\n")); 553 return MV_FAIL; 554 } 555 /* check for training pass */ 556 if (data != PASS) 557 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("training result failed\n")); 558 559 /* disable odpg; switch back to functional mode */ 560 mv_ddr_odpg_disable(); 561 562 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK) { 563 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("odpg disable failed\n")); 564 return MV_FAIL; 565 } 566 567 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 568 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS); 569 570 /* double loop on bus, pup */ 571 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 572 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 573 /* check training done */ 574 for (bus_num = 0; 575 bus_num < octets_per_if_num; 576 bus_num++) { 577 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 578 579 if (per_bit_rl_pup_status[if_id][bus_num] 580 == 0) { 581 curr_min_delay = 0; 582 for (bit_num = 0; bit_num < 8; 583 bit_num++) { 584 if (ddr3_tip_if_polling 585 (dev_num, 586 ACCESS_TYPE_UNICAST, 587 if_id, (1 << 25), 588 (1 << 25), 589 mask_results_dq_reg_map 590 [bus_num * 8 + bit_num], 591 MAX_POLLING_ITERATIONS) != 592 MV_OK) { 593 DEBUG_LEVELING 594 (DEBUG_LEVEL_ERROR, 595 ("\n_r_l: DDR3 poll failed(2) for bus %d bit %d\n", 596 bus_num, 597 bit_num)); 598 } else { 599 /* read result per pup */ 600 CHECK_STATUS 601 (ddr3_tip_if_read 602 (dev_num, 603 ACCESS_TYPE_UNICAST, 604 if_id, 605 mask_results_dq_reg_map 606 [bus_num * 8 + 607 bit_num], 608 data_read, 609 MASK_ALL_BITS)); 610 data = 611 (data_read 612 [if_id] & 613 0x1f) | 614 ((data_read 615 [if_id] & 616 0xe0) << 1); 617 if (curr_min_delay == 0) 618 curr_min_delay = 619 data; 620 else if (data < 621 curr_min_delay) 622 curr_min_delay = 623 data; 624 if (data > data2_write[if_id][bus_num]) 625 data2_write 626 [if_id] 627 [bus_num] = 628 data; 629 } 630 } 631 632 if (data2_write[if_id][bus_num] <= 633 (curr_min_delay + 634 MAX_DQ_READ_LEVELING_DELAY)) { 635 per_bit_rl_pup_status[if_id] 636 [bus_num] = 1; 637 } 638 } 639 } 640 } 641 642 /* check if there is need to search new phyreg3 value */ 643 if (curr_numb < 2) { 644 /* if there is DLL that is not checked yet */ 645 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 646 if_id++) { 647 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 648 for (bus_num = 0; 649 bus_num < octets_per_if_num; 650 bus_num++) { 651 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, 652 bus_num); 653 if (per_bit_rl_pup_status[if_id] 654 [bus_num] != 1) { 655 /* go to next ADLL value */ 656 CHECK_STATUS 657 (ddr3_tip_bus_write 658 (dev_num, 659 ACCESS_TYPE_UNICAST, 660 if_id, 661 ACCESS_TYPE_UNICAST, 662 bus_num, DDR_PHY_DATA, 663 CRX_PHY_REG(0), 664 (phyreg3_arr[if_id] 665 [bus_num] + 666 adll_array[curr_numb]))); 667 break_loop = 1; 668 break; 669 } 670 } 671 if (break_loop) 672 break; 673 } 674 } /* if (curr_numb < 2) */ 675 if (!break_loop) 676 break; 677 } /* for ( curr_numb = 0; curr_numb <3; curr_numb++) */ 678 679 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 680 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 681 for (bus_num = 0; bus_num < octets_per_if_num; 682 bus_num++) { 683 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 684 if (per_bit_rl_pup_status[if_id][bus_num] == 1) 685 ddr3_tip_bus_write(dev_num, 686 ACCESS_TYPE_UNICAST, 687 if_id, 688 ACCESS_TYPE_UNICAST, 689 bus_num, DDR_PHY_DATA, 690 RL_PHY_REG(effective_cs), 691 data2_write[if_id] 692 [bus_num]); 693 else 694 is_any_pup_fail = 1; 695 } 696 697 /* TBD flow does not support multi CS */ 698 /* 699 * cs_bitmask = tm->interface_params[if_id]. 700 * as_bus_params[bus_num].cs_bitmask; 701 */ 702 /* divide by 4 is used for retrieving the CS number */ 703 /* 704 * TBD BC2 - what is the PHY address for other 705 * CS ddr3_tip_write_cs_result() ??? 706 */ 707 /* 708 * find what should be written to PHY 709 * - max delay that is less than threshold 710 */ 711 if (is_any_pup_fail == 1) { 712 training_result[training_stage][if_id] = TEST_FAILED; 713 if (debug_mode == 0) 714 return MV_FAIL; 715 } 716 } 717 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n")); 718 719 /* 720 * Phase 3: Exit Read Leveling 721 */ 722 723 CHECK_STATUS(ddr3_tip_if_write 724 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 725 TRAINING_SW_2_REG, (1 << 3), (1 << 3))); 726 CHECK_STATUS(ddr3_tip_if_write 727 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 728 TRAINING_SW_1_REG, (1 << 16), (1 << 16))); 729 /* set ODPG to functional */ 730 CHECK_STATUS(ddr3_tip_if_write 731 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 732 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 733 /* 734 * Copy the result from the effective CS search to the real 735 * Functional CS 736 */ 737 ddr3_tip_write_cs_result(dev_num, RL_PHY_REG(0)); 738 CHECK_STATUS(ddr3_tip_if_write 739 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 740 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 741 742 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 743 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 744 /* restore cs enable value */ 745 CHECK_STATUS(ddr3_tip_if_write 746 (dev_num, ACCESS_TYPE_UNICAST, if_id, 747 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 748 MASK_ALL_BITS)); 749 if (odt_config != 0) { 750 CHECK_STATUS(ddr3_tip_write_additional_odt_setting 751 (dev_num, if_id)); 752 } 753 } 754 755 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 756 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 757 if (training_result[training_stage][if_id] == TEST_FAILED) 758 return MV_FAIL; 759 } 760 761 return MV_OK; 762} 763 764int ddr3_tip_calc_cs_mask(u32 dev_num, u32 if_id, u32 effective_cs, 765 u32 *cs_mask) 766{ 767 u32 all_bus_cs = 0, same_bus_cs; 768 u32 bus_cnt; 769 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 770 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 771 772 *cs_mask = same_bus_cs = CS_BIT_MASK; 773 774 /* 775 * In some of the devices (such as BC2), the CS is per pup and there 776 * for mixed mode is valid on like other devices where CS configuration 777 * is per interface. 778 * In order to know that, we do 'Or' and 'And' operation between all 779 * CS (of the pups). 780 * If they are they are not the same then it's mixed mode so all CS 781 * should be configured (when configuring the MRS) 782 */ 783 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 784 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 785 786 all_bus_cs |= tm->interface_params[if_id]. 787 as_bus_params[bus_cnt].cs_bitmask; 788 same_bus_cs &= tm->interface_params[if_id]. 789 as_bus_params[bus_cnt].cs_bitmask; 790 791 /* cs enable is active low */ 792 *cs_mask &= ~tm->interface_params[if_id]. 793 as_bus_params[bus_cnt].cs_bitmask; 794 } 795 796 if (all_bus_cs == same_bus_cs) 797 *cs_mask = (*cs_mask | (~(1 << effective_cs))) & CS_BIT_MASK; 798 799 return MV_OK; 800} 801 802/* 803 * Dynamic write leveling 804 */ 805int ddr3_tip_dynamic_write_leveling(u32 dev_num, int phase_remove) 806{ 807 u32 reg_data = 0, temp = 0, iter, if_id, bus_cnt; 808 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 }; 809 u32 cs_mask[MAX_INTERFACE_NUM]; 810 u32 read_data_sample_delay_vals[MAX_INTERFACE_NUM] = { 0 }; 811 u32 read_data_ready_delay_vals[MAX_INTERFACE_NUM] = { 0 }; 812 /* 0 for failure */ 813 u32 res_values[MAX_INTERFACE_NUM * MAX_BUS_NUM] = { 0 }; 814 u32 test_res = 0; /* 0 - success for all pup */ 815 u32 data_read[MAX_INTERFACE_NUM]; 816 u8 wl_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM]; 817 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 818 u32 cs_mask0[MAX_INTERFACE_NUM] = { 0 }; 819 unsigned int max_cs = mv_ddr_cs_num_get(); 820 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 821 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 822 823 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 824 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 825 826 training_result[training_stage][if_id] = TEST_SUCCESS; 827 828 /* save Read Data Sample Delay */ 829 CHECK_STATUS(ddr3_tip_if_read 830 (dev_num, ACCESS_TYPE_UNICAST, if_id, 831 RD_DATA_SMPL_DLYS_REG, 832 read_data_sample_delay_vals, MASK_ALL_BITS)); 833 /* save Read Data Ready Delay */ 834 CHECK_STATUS(ddr3_tip_if_read 835 (dev_num, ACCESS_TYPE_UNICAST, if_id, 836 RD_DATA_RDY_DLYS_REG, read_data_ready_delay_vals, 837 MASK_ALL_BITS)); 838 /* save current cs reg val */ 839 CHECK_STATUS(ddr3_tip_if_read 840 (dev_num, ACCESS_TYPE_UNICAST, if_id, 841 DUAL_DUNIT_CFG_REG, cs_enable_reg_val, MASK_ALL_BITS)); 842 } 843 844 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) { 845 /* Enable multi-CS */ 846 CHECK_STATUS(ddr3_tip_if_write 847 (dev_num, ACCESS_TYPE_UNICAST, if_id, 848 DUAL_DUNIT_CFG_REG, 0, (1 << 3))); 849 } 850 851 /* 852 * Phase 1: DRAM 2 Write Leveling mode 853 */ 854 855 /*Assert 10 refresh commands to DRAM to all CS */ 856 for (iter = 0; iter < WL_ITERATION_NUM; iter++) { 857 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 858 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 859 CHECK_STATUS(ddr3_tip_if_write 860 (dev_num, ACCESS_TYPE_UNICAST, 861 if_id, SDRAM_OP_REG, 862 (u32)((~(0xf) << 8) | 0x2), 0xf1f)); 863 } 864 } 865 /* check controller back to normal */ 866 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 867 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 868 if (ddr3_tip_if_polling 869 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f, 870 SDRAM_OP_REG, MAX_POLLING_ITERATIONS) != MV_OK) { 871 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 872 ("WL: DDR3 poll failed(3)")); 873 } 874 } 875 876 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 877 /*enable write leveling to all cs - Q off , WL n */ 878 /* calculate interface cs mask */ 879 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MR_CMD1, 880 0x1000, 0x1080)); 881 882 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 883 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 884 /* cs enable is active low */ 885 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs, 886 &cs_mask[if_id]); 887 } 888 889 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 890 /* Enable Output buffer to relevant CS - Q on , WL on */ 891 CHECK_STATUS(ddr3_tip_write_mrs_cmd 892 (dev_num, cs_mask, MR_CMD1, 0x80, 0x1080)); 893 894 /*enable odt for relevant CS */ 895 CHECK_STATUS(ddr3_tip_if_write 896 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 897 0x1498, (0x3 << (effective_cs * 2)), 0xf)); 898 } else { 899 /* FIXME: should be the same as _CPU case */ 900 CHECK_STATUS(ddr3_tip_write_mrs_cmd 901 (dev_num, cs_mask, MR_CMD1, 0xc0, 0x12c4)); 902 } 903 904 /* 905 * Phase 2: Set training IP to write leveling mode 906 */ 907 908 CHECK_STATUS(ddr3_tip_dynamic_write_leveling_seq(dev_num)); 909 910 /* phase 3: trigger training */ 911 mv_ddr_training_enable(); 912 913 /* check for training done */ 914 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, data_read) != MV_OK) { 915 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("training done failed\n")); 916 } else { /* check for training pass */ 917 reg_data = data_read[0]; 918 if (tm->bus_act_mask == 0xb) /* set to data to 0 to skip the check */ 919 reg_data = 0; 920 if (reg_data != PASS) 921 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("training result failed\n")); 922 923 /* check for training completion per bus */ 924 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 925 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 926 /* training status */ 927 ddr3_tip_if_read(0, ACCESS_TYPE_UNICAST, 0, 928 mask_results_pup_reg_map[bus_cnt], 929 data_read, MASK_ALL_BITS); 930 reg_data = data_read[0]; 931 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("WL: IF %d BUS %d reg 0x%x\n", 932 0, bus_cnt, reg_data)); 933 if ((reg_data & (1 << 25)) == 0) 934 res_values[bus_cnt] = 1; 935 ddr3_tip_if_read(0, ACCESS_TYPE_UNICAST, 0, 936 mask_results_pup_reg_map[bus_cnt], 937 data_read, 0xff); 938 /* 939 * Save the read value that should be 940 * write to PHY register 941 */ 942 wl_values[effective_cs][bus_cnt][0] = (u8)data_read[0]; 943 } 944 } 945 946 /* 947 * Phase 3.5: Validate result 948 */ 949 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 950 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 951 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 952 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 953 /* 954 * Read result control register according to subphy 955 * "16" below is for a half-phase 956 */ 957 reg_data = wl_values[effective_cs][bus_cnt][if_id] + 16; 958 /* 959 * Write to WL register: ADLL [4:0], Phase [8:6], 960 * Centralization ADLL [15:10] + 0x10 961 */ 962 reg_data = (reg_data & 0x1f) | 963 (((reg_data & 0xe0) >> 5) << 6) | 964 (((reg_data & 0x1f) + phy_reg1_val) << 10); 965 /* Search with WL CS0 subphy reg */ 966 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 967 ACCESS_TYPE_UNICAST, bus_cnt, 968 DDR_PHY_DATA, WL_PHY_REG(0), reg_data); 969 /* 970 * Check for change in data read from DRAM. 971 * If changed, fix the result 972 */ 973 CHECK_STATUS(ddr3_tip_if_read 974 (dev_num, 975 ACCESS_TYPE_UNICAST, 976 if_id, 977 TRAINING_WL_REG, 978 data_read, MASK_ALL_BITS)); 979 if (((data_read[if_id] & (1 << (bus_cnt + 20))) >> 980 (bus_cnt + 20)) == 0) { 981 DEBUG_LEVELING( 982 DEBUG_LEVEL_ERROR, 983 ("WLValues was changed from 0x%X", 984 wl_values[effective_cs] 985 [bus_cnt][if_id])); 986 wl_values[effective_cs] 987 [bus_cnt][if_id] += 32; 988 DEBUG_LEVELING( 989 DEBUG_LEVEL_ERROR, 990 ("to 0x%X", 991 wl_values[effective_cs] 992 [bus_cnt][if_id])); 993 } 994 } 995 } 996 997 /* 998 * Phase 4: Exit write leveling mode 999 */ 1000 1001 /* disable DQs toggling */ 1002 CHECK_STATUS(ddr3_tip_if_write 1003 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1004 WL_DQS_PATTERN_REG, 0x0, 0x1)); 1005 1006 /* Update MRS 1 (WL off) */ 1007 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 1008 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MR_CMD1, 1009 0x1000, 0x1080)); 1010 } else { 1011 /* FIXME: should be same as _CPU case */ 1012 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MR_CMD1, 1013 0x1000, 0x12c4)); 1014 } 1015 1016 /* Update MRS 1 (return to functional mode - Q on , WL off) */ 1017 CHECK_STATUS(ddr3_tip_write_mrs_cmd 1018 (dev_num, cs_mask0, MR_CMD1, 0x0, 0x1080)); 1019 1020 /* set phy to normal mode */ 1021 CHECK_STATUS(ddr3_tip_if_write 1022 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1023 TRAINING_SW_2_REG, 0x5, 0x7)); 1024 1025 /* exit sw override mode */ 1026 CHECK_STATUS(ddr3_tip_if_write 1027 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1028 TRAINING_SW_2_REG, 0x4, 0x7)); 1029 } 1030 1031 /* 1032 * Phase 5: Load WL values to each PHY 1033 */ 1034 1035 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1036 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1037 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1038 test_res = 0; 1039 for (bus_cnt = 0; 1040 bus_cnt < octets_per_if_num; 1041 bus_cnt++) { 1042 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 1043 /* check if result == pass */ 1044 if (res_values 1045 [(if_id * 1046 octets_per_if_num) + 1047 bus_cnt] == 0) { 1048 /* 1049 * read result control register 1050 * according to pup 1051 */ 1052 reg_data = 1053 wl_values[effective_cs][bus_cnt] 1054 [if_id]; 1055 /* 1056 * Write into write leveling register 1057 * ([4:0] ADLL, [8:6] Phase, [15:10] 1058 * (centralization) ADLL + 0x10) 1059 */ 1060 reg_data = 1061 (reg_data & 0x1f) | 1062 (((reg_data & 0xe0) >> 5) << 6) | 1063 (((reg_data & 0x1f) + 1064 phy_reg1_val) << 10); 1065 /* 1066 * in case phase remove should be executed 1067 * need to remove more than one phase. 1068 * this will take place only in low frequency, 1069 * where there could be more than one phase between sub-phys 1070 */ 1071 if (phase_remove == 1) { 1072 temp = (reg_data >> WR_LVL_PH_SEL_OFFS) & WR_LVL_PH_SEL_PHASE1; 1073 reg_data &= ~(WR_LVL_PH_SEL_MASK << WR_LVL_PH_SEL_OFFS); 1074 reg_data |= (temp << WR_LVL_PH_SEL_OFFS); 1075 } 1076 1077 ddr3_tip_bus_write( 1078 dev_num, 1079 ACCESS_TYPE_UNICAST, 1080 if_id, 1081 ACCESS_TYPE_UNICAST, 1082 bus_cnt, 1083 DDR_PHY_DATA, 1084 WL_PHY_REG(effective_cs), 1085 reg_data); 1086 } else { 1087 test_res = 1; 1088 /* 1089 * read result control register 1090 * according to pup 1091 */ 1092 CHECK_STATUS(ddr3_tip_if_read 1093 (dev_num, 1094 ACCESS_TYPE_UNICAST, 1095 if_id, 1096 mask_results_pup_reg_map 1097 [bus_cnt], data_read, 1098 0xff)); 1099 reg_data = data_read[if_id]; 1100 DEBUG_LEVELING( 1101 DEBUG_LEVEL_ERROR, 1102 ("WL: IF %d BUS %d failed, reg 0x%x\n", 1103 if_id, bus_cnt, reg_data)); 1104 } 1105 } 1106 1107 if (test_res != 0) { 1108 training_result[training_stage][if_id] = 1109 TEST_FAILED; 1110 } 1111 } 1112 } 1113 /* Set to 0 after each loop to avoid illegal value may be used */ 1114 effective_cs = 0; 1115 1116 /* 1117 * Copy the result from the effective CS search to the real 1118 * Functional CS 1119 */ 1120 /* ddr3_tip_write_cs_result(dev_num, WL_PHY_REG(0); */ 1121 /* restore saved values */ 1122 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1123 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1124 /* restore Read Data Sample Delay */ 1125 CHECK_STATUS(ddr3_tip_if_write 1126 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1127 RD_DATA_SMPL_DLYS_REG, 1128 read_data_sample_delay_vals[if_id], 1129 MASK_ALL_BITS)); 1130 1131 /* restore Read Data Ready Delay */ 1132 CHECK_STATUS(ddr3_tip_if_write 1133 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1134 RD_DATA_RDY_DLYS_REG, 1135 read_data_ready_delay_vals[if_id], 1136 MASK_ALL_BITS)); 1137 1138 /* enable multi cs */ 1139 CHECK_STATUS(ddr3_tip_if_write 1140 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1141 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 1142 MASK_ALL_BITS)); 1143 } 1144 1145 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 1146 /* Disable modt0 for CS0 training - need to adjust for multi-CS 1147 * in case of ddr4 set 0xf else 0 1148 */ 1149 if (odt_config != 0) { 1150 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1151 SDRAM_ODT_CTRL_HIGH_REG, 0x0, 0xf)); 1152 } 1153 else { 1154 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1155 SDRAM_ODT_CTRL_HIGH_REG, 0xf, 0xf)); 1156 } 1157 1158 } 1159 1160 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1161 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1162 if (training_result[training_stage][if_id] == TEST_FAILED) 1163 return MV_FAIL; 1164 } 1165 1166 return MV_OK; 1167} 1168 1169/* 1170 * Dynamic write leveling supplementary 1171 */ 1172int ddr3_tip_dynamic_write_leveling_supp(u32 dev_num) 1173{ 1174 int adll_offset; 1175 u32 if_id, bus_id, data, data_tmp; 1176 int is_if_fail = 0; 1177 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1178 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1179 1180 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1181 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1182 is_if_fail = 0; 1183 1184 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1185 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1186 wr_supp_res[if_id][bus_id].is_pup_fail = 1; 1187 CHECK_STATUS(ddr3_tip_bus_read 1188 (dev_num, if_id, ACCESS_TYPE_UNICAST, 1189 bus_id, DDR_PHY_DATA, 1190 CTX_PHY_REG(effective_cs), 1191 &data)); 1192 DEBUG_LEVELING( 1193 DEBUG_LEVEL_TRACE, 1194 ("WL Supp: adll_offset=0 data delay = %d\n", 1195 data)); 1196 if (ddr3_tip_wl_supp_align_phase_shift 1197 (dev_num, if_id, bus_id) == MV_OK) { 1198 DEBUG_LEVELING( 1199 DEBUG_LEVEL_TRACE, 1200 ("WL Supp: IF %d bus_id %d adll_offset=0 Success !\n", 1201 if_id, bus_id)); 1202 continue; 1203 } 1204 1205 /* change adll */ 1206 adll_offset = 5; 1207 CHECK_STATUS(ddr3_tip_bus_write 1208 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1209 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1210 CTX_PHY_REG(effective_cs), 1211 data + adll_offset)); 1212 CHECK_STATUS(ddr3_tip_bus_read 1213 (dev_num, if_id, ACCESS_TYPE_UNICAST, 1214 bus_id, DDR_PHY_DATA, 1215 CTX_PHY_REG(effective_cs), 1216 &data_tmp)); 1217 DEBUG_LEVELING( 1218 DEBUG_LEVEL_TRACE, 1219 ("WL Supp: adll_offset= %d data delay = %d\n", 1220 adll_offset, data_tmp)); 1221 1222 if (ddr3_tip_wl_supp_align_phase_shift 1223 (dev_num, if_id, bus_id) == MV_OK) { 1224 DEBUG_LEVELING( 1225 DEBUG_LEVEL_TRACE, 1226 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n", 1227 if_id, bus_id, adll_offset)); 1228 continue; 1229 } 1230 1231 /* change adll */ 1232 adll_offset = -5; 1233 CHECK_STATUS(ddr3_tip_bus_write 1234 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1235 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1236 CTX_PHY_REG(effective_cs), 1237 data + adll_offset)); 1238 CHECK_STATUS(ddr3_tip_bus_read 1239 (dev_num, if_id, ACCESS_TYPE_UNICAST, 1240 bus_id, DDR_PHY_DATA, 1241 CTX_PHY_REG(effective_cs), 1242 &data_tmp)); 1243 DEBUG_LEVELING( 1244 DEBUG_LEVEL_TRACE, 1245 ("WL Supp: adll_offset= %d data delay = %d\n", 1246 adll_offset, data_tmp)); 1247 if (ddr3_tip_wl_supp_align_phase_shift 1248 (dev_num, if_id, bus_id) == MV_OK) { 1249 DEBUG_LEVELING( 1250 DEBUG_LEVEL_TRACE, 1251 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n", 1252 if_id, bus_id, adll_offset)); 1253 continue; 1254 } else { 1255 DEBUG_LEVELING( 1256 DEBUG_LEVEL_ERROR, 1257 ("WL Supp: IF %d bus_id %d Failed !\n", 1258 if_id, bus_id)); 1259 is_if_fail = 1; 1260 } 1261 } 1262 1263 if (is_if_fail == 1) { 1264 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 1265 ("WL Supp: CS# %d: IF %d failed\n", 1266 effective_cs, if_id)); 1267 training_result[training_stage][if_id] = TEST_FAILED; 1268 } else { 1269 training_result[training_stage][if_id] = TEST_SUCCESS; 1270 } 1271 } 1272 1273 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1274 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1275 if (training_result[training_stage][if_id] == TEST_FAILED) 1276 return MV_FAIL; 1277 } 1278 1279 return MV_OK; 1280} 1281 1282/* 1283 * Phase Shift 1284 */ 1285static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id, 1286 u32 bus_id) 1287{ 1288 u32 original_phase; 1289 u32 data, write_data; 1290 1291 wr_supp_res[if_id][bus_id].stage = PHASE_SHIFT; 1292 if (ddr3_tip_xsb_compare_test 1293 (dev_num, if_id, bus_id, 0) == MV_OK) 1294 return MV_OK; 1295 1296 /* Read current phase */ 1297 CHECK_STATUS(ddr3_tip_bus_read 1298 (dev_num, if_id, ACCESS_TYPE_UNICAST, bus_id, 1299 DDR_PHY_DATA, WL_PHY_REG(effective_cs), &data)); 1300 original_phase = (data >> 6) & 0x7; 1301 1302 /* Set phase (0x0[6-8]) -2 */ 1303 if (original_phase >= 1) { 1304 if (original_phase == 1) 1305 write_data = data & ~0x1df; 1306 else 1307 write_data = (data & ~0x1c0) | 1308 ((original_phase - 2) << 6); 1309 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1310 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1311 WL_PHY_REG(effective_cs), write_data); 1312 if (ddr3_tip_xsb_compare_test 1313 (dev_num, if_id, bus_id, -2) == MV_OK) 1314 return MV_OK; 1315 } 1316 1317 /* Set phase (0x0[6-8]) +2 */ 1318 if (original_phase <= 5) { 1319 write_data = (data & ~0x1c0) | 1320 ((original_phase + 2) << 6); 1321 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1322 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1323 WL_PHY_REG(effective_cs), write_data); 1324 if (ddr3_tip_xsb_compare_test 1325 (dev_num, if_id, bus_id, 2) == MV_OK) 1326 return MV_OK; 1327 } 1328 1329 /* Set phase (0x0[6-8]) +4 */ 1330 if (original_phase <= 3) { 1331 write_data = (data & ~0x1c0) | 1332 ((original_phase + 4) << 6); 1333 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1334 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1335 WL_PHY_REG(effective_cs), write_data); 1336 if (ddr3_tip_xsb_compare_test 1337 (dev_num, if_id, bus_id, 4) == MV_OK) 1338 return MV_OK; 1339 } 1340 1341 /* Set phase (0x0[6-8]) +6 */ 1342 if (original_phase <= 1) { 1343 write_data = (data & ~0x1c0) | 1344 ((original_phase + 6) << 6); 1345 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1346 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1347 WL_PHY_REG(effective_cs), write_data); 1348 if (ddr3_tip_xsb_compare_test 1349 (dev_num, if_id, bus_id, 6) == MV_OK) 1350 return MV_OK; 1351 } 1352 1353 /* Write original WL result back */ 1354 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1355 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1356 WL_PHY_REG(effective_cs), data); 1357 wr_supp_res[if_id][bus_id].is_pup_fail = 1; 1358 1359 return MV_FAIL; 1360} 1361 1362/* 1363 * Compare Test 1364 */ 1365static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id, 1366 u32 edge_offset) 1367{ 1368 u32 num_of_succ_byte_compare, word_in_pattern; 1369 u32 word_offset, i, num_of_word_mult; 1370 u32 read_pattern[TEST_PATTERN_LENGTH * 2]; 1371 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 1372 u32 pattern_test_pattern_table[8]; 1373 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1374 1375 /* 3 below for INTERFACE_BUS_MASK_16BIT */ 1376 num_of_word_mult = (tm->bus_act_mask == 3) ? 1 : 2; 1377 1378 for (i = 0; i < 8; i++) { 1379 pattern_test_pattern_table[i] = 1380 pattern_table_get_word(dev_num, PATTERN_TEST, (u8)i); 1381 } 1382 1383 /* External write, read and compare */ 1384 CHECK_STATUS(ddr3_tip_load_pattern_to_mem(dev_num, PATTERN_TEST)); 1385 1386 CHECK_STATUS(ddr3_tip_reset_fifo_ptr(dev_num)); 1387 1388 CHECK_STATUS(ddr3_tip_ext_read 1389 (dev_num, if_id, 1390 ((pattern_table[PATTERN_TEST].start_addr << 3) + 1391 ((SDRAM_CS_SIZE + 1) * effective_cs)), 1, read_pattern)); 1392 1393 DEBUG_LEVELING( 1394 DEBUG_LEVEL_TRACE, 1395 ("XSB-compt CS#%d: IF %d bus_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1396 effective_cs, if_id, bus_id, 1397 read_pattern[0], read_pattern[1], 1398 read_pattern[2], read_pattern[3], 1399 read_pattern[4], read_pattern[5], 1400 read_pattern[6], read_pattern[7])); 1401 1402 /* compare byte per pup */ 1403 num_of_succ_byte_compare = 0; 1404 for (word_in_pattern = start_xsb_offset; 1405 word_in_pattern < (TEST_PATTERN_LENGTH * num_of_word_mult); 1406 word_in_pattern++) { 1407 word_offset = word_in_pattern; 1408 if ((word_offset > (TEST_PATTERN_LENGTH * 2 - 1))) 1409 continue; 1410 1411 if ((read_pattern[word_in_pattern] & pup_mask_table[bus_id]) == 1412 (pattern_test_pattern_table[word_offset] & 1413 pup_mask_table[bus_id])) 1414 num_of_succ_byte_compare++; 1415 } 1416 1417 if ((TEST_PATTERN_LENGTH * num_of_word_mult - start_xsb_offset) == 1418 num_of_succ_byte_compare) { 1419 wr_supp_res[if_id][bus_id].stage = edge_offset; 1420 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1421 ("supplementary: shift to %d for if %d pup %d success\n", 1422 edge_offset, if_id, bus_id)); 1423 wr_supp_res[if_id][bus_id].is_pup_fail = 0; 1424 1425 return MV_OK; 1426 } else { 1427 DEBUG_LEVELING( 1428 DEBUG_LEVEL_TRACE, 1429 ("XSB-compt CS#%d: IF %d bus_id %d num_of_succ_byte_compare %d - Fail!\n", 1430 effective_cs, if_id, bus_id, num_of_succ_byte_compare)); 1431 1432 DEBUG_LEVELING( 1433 DEBUG_LEVEL_TRACE, 1434 ("XSB-compt: expected 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1435 pattern_test_pattern_table[0], 1436 pattern_test_pattern_table[1], 1437 pattern_test_pattern_table[2], 1438 pattern_test_pattern_table[3], 1439 pattern_test_pattern_table[4], 1440 pattern_test_pattern_table[5], 1441 pattern_test_pattern_table[6], 1442 pattern_test_pattern_table[7])); 1443 DEBUG_LEVELING( 1444 DEBUG_LEVEL_TRACE, 1445 ("XSB-compt: recieved 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1446 read_pattern[0], read_pattern[1], 1447 read_pattern[2], read_pattern[3], 1448 read_pattern[4], read_pattern[5], 1449 read_pattern[6], read_pattern[7])); 1450 1451 return MV_FAIL; 1452 } 1453} 1454 1455/* 1456 * Dynamic write leveling sequence 1457 */ 1458static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num) 1459{ 1460 u32 bus_id, dq_id; 1461 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 1462 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 1463 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1464 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1465 1466 CHECK_STATUS(ddr3_tip_if_write 1467 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1468 TRAINING_SW_2_REG, 0x1, 0x5)); 1469 CHECK_STATUS(ddr3_tip_if_write 1470 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1471 TRAINING_WL_REG, 0x50, 0xff)); 1472 CHECK_STATUS(ddr3_tip_if_write 1473 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1474 TRAINING_WL_REG, 0x5c, 0xff)); 1475 CHECK_STATUS(ddr3_tip_if_write 1476 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1477 GENERAL_TRAINING_OPCODE_REG, 0x381b82, 0x3c3faf)); 1478 CHECK_STATUS(ddr3_tip_if_write 1479 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1480 OPCODE_REG0_REG(1), (0x3 << 25), (0x3ffff << 9))); 1481 CHECK_STATUS(ddr3_tip_if_write 1482 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1483 OPCODE_REG1_REG(1), 0x80, 0xffff)); 1484 CHECK_STATUS(ddr3_tip_if_write 1485 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1486 WL_DONE_CNTR_REF_REG, 0x14, 0xff)); 1487 CHECK_STATUS(ddr3_tip_if_write 1488 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1489 TRAINING_WL_REG, 0xff5c, 0xffff)); 1490 1491 /* mask PBS */ 1492 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1493 CHECK_STATUS(ddr3_tip_if_write 1494 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1495 mask_results_dq_reg_map[dq_id], 0x1 << 24, 1496 0x1 << 24)); 1497 } 1498 1499 /* Mask all results */ 1500 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1501 CHECK_STATUS(ddr3_tip_if_write 1502 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1503 mask_results_pup_reg_map[bus_id], 0x1 << 24, 1504 0x1 << 24)); 1505 } 1506 1507 /* Unmask only wanted */ 1508 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1509 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1510 CHECK_STATUS(ddr3_tip_if_write 1511 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1512 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24)); 1513 } 1514 1515 CHECK_STATUS(ddr3_tip_if_write 1516 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1517 WL_DQS_PATTERN_REG, 0x1, 0x1)); 1518 1519 return MV_OK; 1520} 1521 1522/* 1523 * Dynamic read leveling sequence 1524 */ 1525static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num) 1526{ 1527 u32 bus_id, dq_id; 1528 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 1529 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 1530 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1531 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1532 1533 /* mask PBS */ 1534 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1535 CHECK_STATUS(ddr3_tip_if_write 1536 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1537 mask_results_dq_reg_map[dq_id], 0x1 << 24, 1538 0x1 << 24)); 1539 } 1540 1541 /* Mask all results */ 1542 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1543 CHECK_STATUS(ddr3_tip_if_write 1544 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1545 mask_results_pup_reg_map[bus_id], 0x1 << 24, 1546 0x1 << 24)); 1547 } 1548 1549 /* Unmask only wanted */ 1550 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1551 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1552 CHECK_STATUS(ddr3_tip_if_write 1553 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1554 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24)); 1555 } 1556 1557 return MV_OK; 1558} 1559 1560/* 1561 * Dynamic read leveling sequence 1562 */ 1563static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num) 1564{ 1565 u32 bus_id, dq_id; 1566 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 1567 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 1568 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1569 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1570 1571 /* mask PBS */ 1572 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1573 CHECK_STATUS(ddr3_tip_if_write 1574 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1575 mask_results_dq_reg_map[dq_id], 0x1 << 24, 1576 0x1 << 24)); 1577 } 1578 1579 /* Mask all results */ 1580 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1581 CHECK_STATUS(ddr3_tip_if_write 1582 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1583 mask_results_pup_reg_map[bus_id], 0x1 << 24, 1584 0x1 << 24)); 1585 } 1586 1587 /* Unmask only wanted */ 1588 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1589 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, dq_id / 8); 1590 CHECK_STATUS(ddr3_tip_if_write 1591 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1592 mask_results_dq_reg_map[dq_id], 0x0 << 24, 1593 0x1 << 24)); 1594 } 1595 1596 return MV_OK; 1597} 1598 1599/* 1600 * Print write leveling supplementary results 1601 */ 1602int ddr3_tip_print_wl_supp_result(u32 dev_num) 1603{ 1604 u32 bus_id = 0, if_id = 0; 1605 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1606 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1607 1608 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1609 ("I/F0 PUP0 Result[0 - success, 1-fail] ...\n")); 1610 1611 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1612 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1613 for (bus_id = 0; bus_id < octets_per_if_num; 1614 bus_id++) { 1615 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1616 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1617 ("%d ,", wr_supp_res[if_id] 1618 [bus_id].is_pup_fail)); 1619 } 1620 } 1621 DEBUG_LEVELING( 1622 DEBUG_LEVEL_INFO, 1623 ("I/F0 PUP0 Stage[0-phase_shift, 1-clock_shift, 2-align_shift] ...\n")); 1624 1625 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1626 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1627 for (bus_id = 0; bus_id < octets_per_if_num; 1628 bus_id++) { 1629 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1630 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1631 ("%d ,", wr_supp_res[if_id] 1632 [bus_id].stage)); 1633 } 1634 } 1635 1636 return MV_OK; 1637} 1638 1639#define RD_FIFO_PTR_LOW_STAT_INDIR_ADDR 0x9a 1640#define RD_FIFO_PTR_HIGH_STAT_INDIR_ADDR 0x9b 1641/* position of falling dqs edge in fifo; walking 1 */ 1642#define RD_FIFO_DQS_FALL_EDGE_POS_0 0x1 1643#define RD_FIFO_DQS_FALL_EDGE_POS_1 0x2 1644#define RD_FIFO_DQS_FALL_EDGE_POS_2 0x4 1645#define RD_FIFO_DQS_FALL_EDGE_POS_3 0x8 1646#define RD_FIFO_DQS_FALL_EDGE_POS_4 0x10 /* lock */ 1647/* position of rising dqs edge in fifo; walking 0 */ 1648#define RD_FIFO_DQS_RISE_EDGE_POS_0 0x1fff 1649#define RD_FIFO_DQS_RISE_EDGE_POS_1 0x3ffe 1650#define RD_FIFO_DQS_RISE_EDGE_POS_2 0x3ffd 1651#define RD_FIFO_DQS_RISE_EDGE_POS_3 0x3ffb 1652#define RD_FIFO_DQS_RISE_EDGE_POS_4 0x3ff7 /* lock */ 1653#define TEST_ADDR 0x8 1654#define TAPS_PER_UI 32 1655#define UI_PER_RD_SAMPLE 4 1656#define TAPS_PER_RD_SAMPLE ((UI_PER_RD_SAMPLE) * (TAPS_PER_UI)) 1657#define MAX_RD_SAMPLES 32 1658#define MAX_RL_VALUE ((MAX_RD_SAMPLES) * (TAPS_PER_RD_SAMPLE)) 1659#define RD_FIFO_DLY 8 1660#define STEP_SIZE 64 1661#define RL_JITTER_WIDTH_LMT 20 1662#define ADLL_TAPS_IN_CYCLE 64 1663 1664enum rl_dqs_burst_state { 1665 RL_AHEAD = 0, 1666 RL_INSIDE, 1667 RL_BEHIND 1668}; 1669 1670#if defined(CONFIG_DDR4) 1671static int mpr_rd_frmt_config( 1672 enum mv_ddr_mpr_ps ps, 1673 enum mv_ddr_mpr_op op, 1674 enum mv_ddr_mpr_rd_frmt rd_frmt, 1675 u8 cs_bitmask, u8 dis_auto_refresh) 1676{ 1677 u32 val, mask; 1678 u8 cs_bitmask_inv; 1679 1680 1681 if (dis_auto_refresh == 1) { 1682 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, ODPG_CTRL_CTRL_REG, 1683 ODPG_CTRL_AUTO_REFRESH_DIS << ODPG_CTRL_AUTO_REFRESH_OFFS, 1684 ODPG_CTRL_AUTO_REFRESH_MASK << ODPG_CTRL_AUTO_REFRESH_OFFS); 1685 } else { 1686 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, ODPG_CTRL_CTRL_REG, 1687 ODPG_CTRL_AUTO_REFRESH_ENA << ODPG_CTRL_AUTO_REFRESH_OFFS, 1688 ODPG_CTRL_AUTO_REFRESH_MASK << ODPG_CTRL_AUTO_REFRESH_OFFS); 1689 } 1690 1691 /* configure MPR Location for MPR write and read accesses within the selected page */ 1692 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, DDR4_MPR_WR_REG, 1693 DDR4_MPR_LOC3 << DDR4_MPR_LOC_OFFS, 1694 DDR4_MPR_LOC_MASK << DDR4_MPR_LOC_OFFS); 1695 1696 /* configure MPR page selection, operation and read format */ 1697 val = ps << DDR4_MPR_PS_OFFS | 1698 op << DDR4_MPR_OP_OFFS | 1699 rd_frmt << DDR4_MPR_RF_OFFS; 1700 mask = DDR4_MPR_PS_MASK << DDR4_MPR_PS_OFFS | 1701 DDR4_MPR_OP_MASK << DDR4_MPR_OP_OFFS | 1702 DDR4_MPR_RF_MASK << DDR4_MPR_RF_OFFS; 1703 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, DDR4_MR3_REG, val, mask); 1704 1705 /* prepare cs bitmask in active low format */ 1706 cs_bitmask_inv = ~cs_bitmask & SDRAM_OP_CMD_ALL_CS_MASK; 1707 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, SDRAM_OP_REG, 1708 CMD_DDR3_DDR4_MR3 << SDRAM_OP_CMD_OFFS | 1709 cs_bitmask_inv << SDRAM_OP_CMD_CS_OFFS(0), 1710 SDRAM_OP_CMD_MASK << SDRAM_OP_CMD_OFFS | 1711 SDRAM_OP_CMD_ALL_CS_MASK << SDRAM_OP_CMD_CS_OFFS(0)); 1712 1713 if (ddr3_tip_if_polling(0, ACCESS_TYPE_UNICAST, 0, 1714 CMD_NORMAL, SDRAM_OP_CMD_MASK, SDRAM_OP_REG, 1715 MAX_POLLING_ITERATIONS)) { 1716 printf("error: %s failed\n", __func__); 1717 return -1; 1718 } 1719 1720 return 0; 1721} 1722#endif /* CONFIG_DDR4 */ 1723 1724int mv_ddr_rl_dqs_burst(u32 dev_num, u32 if_id, u32 freq) 1725{ 1726 enum rl_dqs_burst_state rl_state[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1727 enum hws_ddr_phy subphy_type = DDR_PHY_DATA; 1728 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1729 int cl_val = tm->interface_params[0].cas_l; 1730 int rl_adll_val, rl_phase_val, sdr_cycle_incr, rd_sample, rd_ready; 1731 int final_rd_sample, final_rd_ready; 1732 int i, subphy_id, step; 1733 int pass_lock_num = 0; 1734 int init_pass_lock_num; 1735 int phase_delta; 1736 int min_phase, max_phase; 1737 unsigned int max_cs = mv_ddr_cs_num_get(); 1738 u32 rl_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1739 u32 rl_min_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1740 u32 rl_max_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1741 u32 rl_val, rl_min_val[MAX_CS_NUM], rl_max_val[MAX_CS_NUM]; 1742 u32 reg_val_low, reg_val_high; 1743 u32 reg_val, reg_mask; 1744 uintptr_t test_addr = TEST_ADDR; 1745 1746#if defined(CONFIG_DDR4) 1747 int status; 1748 u8 cs_bitmask = tm->interface_params[0].as_bus_params[0].cs_bitmask; 1749 u8 curr_cs_bitmask_inv; 1750 1751 /* enable MPR for all existing chip-selects */ 1752 status = mpr_rd_frmt_config(DDR4_MPR_PAGE0, 1753 DDR4_MPR_OP_ENA, 1754 DDR4_MPR_RF_SERIAL, 1755 cs_bitmask, 1); 1756 if (status) 1757 return status; 1758#endif /* CONFIG_DDR4 */ 1759 1760 /* initialization */ 1761 if (mv_ddr_is_ecc_ena()) { 1762 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, if_id, TRAINING_SW_2_REG, 1763 ®_val, MASK_ALL_BITS); 1764 reg_mask = (TRAINING_ECC_MUX_MASK << TRAINING_ECC_MUX_OFFS) | 1765 (TRAINING_SW_OVRD_MASK << TRAINING_SW_OVRD_OFFS); 1766 reg_val &= ~reg_mask; 1767 reg_val |= (TRAINING_ECC_MUX_DIS << TRAINING_ECC_MUX_OFFS) | 1768 (TRAINING_SW_OVRD_ENA << TRAINING_SW_OVRD_OFFS); 1769 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, TRAINING_SW_2_REG, 1770 reg_val, MASK_ALL_BITS); 1771 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, if_id, TRAINING_REG, 1772 ®_val, MASK_ALL_BITS); 1773 reg_mask = (TRN_START_MASK << TRN_START_OFFS); 1774 reg_val &= ~reg_mask; 1775 reg_val |= TRN_START_ENA << TRN_START_OFFS; 1776 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, TRAINING_REG, 1777 reg_val, MASK_ALL_BITS); 1778 } 1779 1780 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) 1781 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) 1782 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) 1783 if (IS_BUS_ACTIVE(tm->bus_act_mask, subphy_id) == 0) 1784 pass_lock_num++; /* increment on inactive subphys */ 1785 1786 init_pass_lock_num = pass_lock_num / max_cs; 1787 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1788 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 1789 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1790 training_result[training_stage][if_id] = TEST_SUCCESS; 1791 } 1792 } 1793 1794 /* search for dqs edges per subphy */ 1795 if_id = 0; 1796 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1797#if defined(CONFIG_DDR4) 1798 /* enable read preamble training mode for chip-select under test */ 1799 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1800 DDR4_MR4_REG, 1801 DDR4_RPT_ENA << DDR4_RPT_OFFS, 1802 DDR4_RPT_MASK << DDR4_RPT_OFFS); 1803 /* prepare current cs bitmask in active low format */ 1804 curr_cs_bitmask_inv = ~(1 << effective_cs) & SDRAM_OP_CMD_ALL_CS_MASK; 1805 reg_val = curr_cs_bitmask_inv << SDRAM_OP_CMD_CS_OFFS(0) | 1806 CMD_DDR4_MR4 << SDRAM_OP_CMD_OFFS; 1807 reg_mask = SDRAM_OP_CMD_ALL_CS_MASK << SDRAM_OP_CMD_CS_OFFS(0) | 1808 SDRAM_OP_CMD_MASK << SDRAM_OP_CMD_OFFS; 1809 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1810 SDRAM_OP_REG, reg_val, reg_mask); 1811 if (ddr3_tip_if_polling(0, ACCESS_TYPE_UNICAST, 0, 1812 CMD_NORMAL, SDRAM_OP_CMD_MASK, SDRAM_OP_REG, 1813 MAX_POLLING_ITERATIONS)) { 1814 printf("error: %s failed\n", __func__); 1815 return -1; 1816 } 1817 1818 /* disable preamble training mode for existing chip-selects not under test */ 1819 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1820 DDR4_MR4_REG, 1821 DDR4_RPT_DIS << DDR4_RPT_OFFS, 1822 DDR4_RPT_MASK << DDR4_RPT_OFFS); 1823 /* prepare bitmask for existing chip-selects not under test in active low format */ 1824 reg_val = ((~(curr_cs_bitmask_inv & cs_bitmask) & SDRAM_OP_CMD_ALL_CS_MASK) << 1825 SDRAM_OP_CMD_CS_OFFS(0)) | 1826 CMD_DDR4_MR4 << SDRAM_OP_CMD_OFFS; 1827 reg_mask = SDRAM_OP_CMD_ALL_CS_MASK << SDRAM_OP_CMD_CS_OFFS(0) | 1828 SDRAM_OP_CMD_MASK << SDRAM_OP_CMD_OFFS; 1829 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1830 SDRAM_OP_REG, reg_val, reg_mask); 1831 if (ddr3_tip_if_polling(0, ACCESS_TYPE_UNICAST, 0, 1832 CMD_NORMAL, SDRAM_OP_CMD_MASK, SDRAM_OP_REG, 1833 MAX_POLLING_ITERATIONS)) { 1834 printf("error: %s failed\n", __func__); 1835 return -1; 1836 } 1837 1838#endif /* CONFIG_DDR4 */ 1839 1840 pass_lock_num = init_pass_lock_num; 1841 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ODPG_DATA_CTRL_REG, 1842 effective_cs << ODPG_DATA_CS_OFFS, 1843 ODPG_DATA_CS_MASK << ODPG_DATA_CS_OFFS); 1844 rl_min_val[effective_cs] = MAX_RL_VALUE; 1845 rl_max_val[effective_cs] = 0; 1846 step = STEP_SIZE; 1847 for (i = 0; i < MAX_RL_VALUE; i += step) { 1848 rl_val = 0; 1849 sdr_cycle_incr = i / TAPS_PER_RD_SAMPLE; /* sdr cycle increment */ 1850 rd_sample = cl_val + 2 * sdr_cycle_incr; 1851 /* fifo out to in delay in search is constant */ 1852 rd_ready = rd_sample + RD_FIFO_DLY; 1853 1854 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_SMPL_DLYS_REG, 1855 rd_sample << RD_SMPL_DLY_CS_OFFS(effective_cs), 1856 RD_SMPL_DLY_CS_MASK << RD_SMPL_DLY_CS_OFFS(effective_cs)); 1857 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_RDY_DLYS_REG, 1858 rd_ready << RD_RDY_DLY_CS_OFFS(effective_cs), 1859 RD_RDY_DLY_CS_MASK << RD_RDY_DLY_CS_OFFS(effective_cs)); 1860 1861 /* one sdr (single data rate) cycle incremented on every four phases of ddr clock */ 1862 sdr_cycle_incr = i % TAPS_PER_RD_SAMPLE; 1863 rl_adll_val = sdr_cycle_incr % MAX_RD_SAMPLES; 1864 rl_phase_val = sdr_cycle_incr / MAX_RD_SAMPLES; 1865 rl_val = ((rl_adll_val & RL_REF_DLY_MASK) << RL_REF_DLY_OFFS) | 1866 ((rl_phase_val & RL_PH_SEL_MASK) << RL_PH_SEL_OFFS); 1867 1868 /* write to all subphys (even to not connected or locked) */ 1869 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, 1870 0, DDR_PHY_DATA, RL_PHY_REG(effective_cs), rl_val); 1871 1872 /* reset read fifo assertion */ 1873 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1874 DATA_PUP_RD_RESET_ENA << DATA_PUP_RD_RESET_OFFS, 1875 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1876 1877 /* reset read fifo deassertion */ 1878 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1879 DATA_PUP_RD_RESET_DIS << DATA_PUP_RD_RESET_OFFS, 1880 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1881 1882 /* perform one read burst */ 1883 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) 1884 readq(test_addr); 1885 else 1886 readl(test_addr); 1887 1888 /* progress read ptr; decide on rl state per byte */ 1889 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) { 1890 if (rl_state[effective_cs][subphy_id][if_id] == RL_BEHIND) 1891 continue; /* skip locked subphys */ 1892 ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST, subphy_id, DDR_PHY_DATA, 1893 RD_FIFO_PTR_LOW_STAT_INDIR_ADDR, ®_val_low); 1894 ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST, subphy_id, DDR_PHY_DATA, 1895 RD_FIFO_PTR_HIGH_STAT_INDIR_ADDR, ®_val_high); 1896 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1897 ("%s: cs %d, step %d, subphy %d, state %d, low 0x%04x, high 0x%04x; move to ", 1898 __func__, effective_cs, i, subphy_id, 1899 rl_state[effective_cs][subphy_id][if_id], 1900 reg_val_low, reg_val_high)); 1901 1902 switch (rl_state[effective_cs][subphy_id][if_id]) { 1903 case RL_AHEAD: 1904 /* improve search resolution getting closer to the window */ 1905 if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_4 && 1906 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_4) { 1907 rl_state[effective_cs][subphy_id][if_id] = RL_INSIDE; 1908 rl_values[effective_cs][subphy_id][if_id] = i; 1909 rl_min_values[effective_cs][subphy_id][if_id] = i; 1910 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1911 ("new state %d\n", 1912 rl_state[effective_cs][subphy_id][if_id])); 1913 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_3 && 1914 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_3) { 1915 step = (step < 2) ? step : 2; 1916 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_2 && 1917 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_2) { 1918 step = (step < 16) ? step : 16; 1919 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_1 && 1920 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_1) { 1921 step = (step < 32) ? step : 32; 1922 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_0 && 1923 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_0) { 1924 step = (step < 64) ? step : 64; 1925 } else { 1926 /* otherwise, step is unchanged */ 1927 } 1928 break; 1929 case RL_INSIDE: 1930 if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_4 && 1931 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_4) { 1932 rl_max_values[effective_cs][subphy_id][if_id] = i; 1933 if ((rl_max_values[effective_cs][subphy_id][if_id] - 1934 rl_min_values[effective_cs][subphy_id][if_id]) > 1935 ADLL_TAPS_IN_CYCLE) { 1936 rl_state[effective_cs][subphy_id][if_id] = RL_BEHIND; 1937 rl_values[effective_cs][subphy_id][if_id] = 1938 (i + rl_values[effective_cs][subphy_id][if_id]) / 2; 1939 pass_lock_num++; 1940 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1941 ("new lock %d\n", pass_lock_num)); 1942 if (rl_min_val[effective_cs] > 1943 rl_values[effective_cs][subphy_id][if_id]) 1944 rl_min_val[effective_cs] = 1945 rl_values[effective_cs][subphy_id][if_id]; 1946 if (rl_max_val[effective_cs] < 1947 rl_values[effective_cs][subphy_id][if_id]) 1948 rl_max_val[effective_cs] = 1949 rl_values[effective_cs][subphy_id][if_id]; 1950 step = 2; 1951 } 1952 } 1953 if (reg_val_low != RD_FIFO_DQS_FALL_EDGE_POS_4 || 1954 reg_val_high != RD_FIFO_DQS_RISE_EDGE_POS_4) { 1955 if ((i - rl_values[effective_cs][subphy_id][if_id]) < 1956 RL_JITTER_WIDTH_LMT) { 1957 /* inside the jitter; not valid segment */ 1958 rl_state[effective_cs][subphy_id][if_id] = RL_AHEAD; 1959 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1960 ("new state %d; jitter on mask\n", 1961 rl_state[effective_cs][subphy_id][if_id])); 1962 } else { /* finished valid segment */ 1963 rl_state[effective_cs][subphy_id][if_id] = RL_BEHIND; 1964 rl_values[effective_cs][subphy_id][if_id] = 1965 (i + rl_values[effective_cs][subphy_id][if_id]) / 2; 1966 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1967 ("new state %d, solution %d\n", 1968 rl_state[effective_cs][subphy_id][if_id], 1969 rl_values[effective_cs][subphy_id][if_id])); 1970 pass_lock_num++; 1971 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1972 ("new lock %d\n", pass_lock_num)); 1973 if (rl_min_val[effective_cs] > 1974 rl_values[effective_cs][subphy_id][if_id]) 1975 rl_min_val[effective_cs] = 1976 rl_values[effective_cs][subphy_id][if_id]; 1977 if (rl_max_val[effective_cs] < 1978 rl_values[effective_cs][subphy_id][if_id]) 1979 rl_max_val[effective_cs] = 1980 rl_values[effective_cs][subphy_id][if_id]; 1981 step = 2; 1982 } 1983 } 1984 break; 1985 case RL_BEHIND: /* do nothing */ 1986 break; 1987 } 1988 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("\n")); 1989 } 1990 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("pass_lock_num %d\n", pass_lock_num)); 1991 /* exit condition */ 1992 if (pass_lock_num == MAX_BUS_NUM) 1993 break; 1994 } /* for-loop on i */ 1995 1996 if (pass_lock_num != MAX_BUS_NUM) { 1997 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 1998 ("%s: cs %d, pass_lock_num %d, max_bus_num %d, init_pass_lock_num %d\n", 1999 __func__, effective_cs, pass_lock_num, MAX_BUS_NUM, init_pass_lock_num)); 2000 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) { 2001 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, subphy_id); 2002 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 2003 ("%s: subphy %d %s\n", 2004 __func__, subphy_id, 2005 (rl_state[effective_cs][subphy_id][if_id] == RL_BEHIND) ? 2006 "locked" : "not locked")); 2007 } 2008 } 2009 } /* for-loop on effective_cs */ 2010 2011 /* post-processing read leveling results */ 2012 if_id = 0; 2013 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2014 phase_delta = 0; 2015 i = rl_min_val[effective_cs]; 2016 sdr_cycle_incr = i / TAPS_PER_RD_SAMPLE; /* sdr cycle increment */ 2017 rd_sample = cl_val + 2 * sdr_cycle_incr; 2018 rd_ready = rd_sample + RD_FIFO_DLY; 2019 min_phase = (rl_min_val[effective_cs] - (sdr_cycle_incr * TAPS_PER_RD_SAMPLE)) % MAX_RD_SAMPLES; 2020 max_phase = (rl_max_val[effective_cs] - (sdr_cycle_incr * TAPS_PER_RD_SAMPLE)) % MAX_RD_SAMPLES; 2021 final_rd_sample = rd_sample; 2022 final_rd_ready = rd_ready; 2023 2024 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_SMPL_DLYS_REG, 2025 rd_sample << RD_SMPL_DLY_CS_OFFS(effective_cs), 2026 RD_SMPL_DLY_CS_MASK << RD_SMPL_DLY_CS_OFFS(effective_cs)); 2027 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_RDY_DLYS_REG, 2028 rd_ready << RD_RDY_DLY_CS_OFFS(effective_cs), 2029 RD_RDY_DLY_CS_MASK << RD_RDY_DLY_CS_OFFS(effective_cs)); 2030 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 2031 ("%s: cs %d, min phase %d, max phase %d, read sample %d\n", 2032 __func__, effective_cs, min_phase, max_phase, rd_sample)); 2033 2034 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) { 2035 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, subphy_id); 2036 /* reduce sdr cycle per cs; extract rl adll and phase values */ 2037 i = rl_values[effective_cs][subphy_id][if_id] - (sdr_cycle_incr * TAPS_PER_RD_SAMPLE); 2038 rl_adll_val = i % MAX_RD_SAMPLES; 2039 rl_phase_val = i / MAX_RD_SAMPLES; 2040 rl_phase_val -= phase_delta; 2041 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 2042 ("%s: final results: cs %d, subphy %d, read sample %d read ready %d, rl_phase_val %d, rl_adll_val %d\n", 2043 __func__, effective_cs, subphy_id, final_rd_sample, 2044 final_rd_ready, rl_phase_val, rl_adll_val)); 2045 2046 rl_val = ((rl_adll_val & RL_REF_DLY_MASK) << RL_REF_DLY_OFFS) | 2047 ((rl_phase_val & RL_PH_SEL_MASK) << RL_PH_SEL_OFFS); 2048 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_UNICAST, 2049 subphy_id, subphy_type, RL_PHY_REG(effective_cs), rl_val); 2050 } 2051 } /* for-loop on effective cs */ 2052 2053 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 2054 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 2055 if (odt_config != 0) 2056 CHECK_STATUS(ddr3_tip_write_additional_odt_setting(dev_num, if_id)); 2057 } 2058 2059#if defined(CONFIG_DDR4) 2060 /* disable read preamble training mode for all existing chip-selects */ 2061 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 2062 DDR4_MR4_REG, 2063 DDR4_RPT_DIS << DDR4_RPT_OFFS, 2064 DDR4_RPT_MASK << DDR4_RPT_OFFS); 2065 reg_val = (~cs_bitmask & SDRAM_OP_CMD_ALL_CS_MASK) << SDRAM_OP_CMD_CS_OFFS(0) | 2066 CMD_DDR4_MR4 << SDRAM_OP_CMD_OFFS; 2067 reg_mask = SDRAM_OP_CMD_ALL_CS_MASK << SDRAM_OP_CMD_CS_OFFS(0) | 2068 SDRAM_OP_CMD_MASK << SDRAM_OP_CMD_OFFS; 2069 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 2070 SDRAM_OP_REG, reg_val, reg_mask); 2071 if (ddr3_tip_if_polling(0, ACCESS_TYPE_UNICAST, 0, 2072 CMD_NORMAL, SDRAM_OP_CMD_MASK, SDRAM_OP_REG, 2073 MAX_POLLING_ITERATIONS)) { 2074 printf("error: %s failed\n", __func__); 2075 return -1; 2076 } 2077 2078 /* disable MPR for all existing chip-selects */ 2079 status = mpr_rd_frmt_config(DDR4_MPR_PAGE0, 2080 DDR4_MPR_OP_DIS, 2081 DDR4_MPR_RF_SERIAL, 2082 cs_bitmask, 0); 2083 if (status) 2084 return status; 2085#endif /* CONFIG_DDR4 */ 2086 2087 /* reset read fifo assertion */ 2088 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 2089 DATA_PUP_RD_RESET_ENA << DATA_PUP_RD_RESET_OFFS, 2090 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 2091 2092 /* reset read fifo deassertion */ 2093 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 2094 DATA_PUP_RD_RESET_DIS << DATA_PUP_RD_RESET_OFFS, 2095 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 2096 2097 return MV_OK; 2098} 2099