1// SPDX-License-Identifier: GPL-2.0
2/*
3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4 * Copyright (c) 2022 Edgeble AI Technologies Pvt. Ltd.
5 */
6
7#include <common.h>
8#include <debug_uart.h>
9#include <dm.h>
10#include <ram.h>
11#include <syscon.h>
12#include <asm/arch-rockchip/clock.h>
13#include <asm/arch-rockchip/hardware.h>
14#include <asm/arch-rockchip/cru_rv1126.h>
15#include <asm/arch-rockchip/grf_rv1126.h>
16#include <asm/arch-rockchip/sdram_common.h>
17#include <asm/arch-rockchip/sdram_rv1126.h>
18#include <linux/delay.h>
19
20/* define training flag */
21#define CA_TRAINING			(0x1 << 0)
22#define READ_GATE_TRAINING		(0x1 << 1)
23#define WRITE_LEVELING			(0x1 << 2)
24#define WRITE_TRAINING			(0x1 << 3)
25#define READ_TRAINING			(0x1 << 4)
26#define FULL_TRAINING			(0xff)
27
28#define SKEW_RX_SIGNAL			(0)
29#define SKEW_TX_SIGNAL			(1)
30#define SKEW_CA_SIGNAL			(2)
31
32#define DESKEW_MDF_ABS_VAL		(0)
33#define DESKEW_MDF_DIFF_VAL		(1)
34
35struct dram_info {
36#if defined(CONFIG_TPL_BUILD) || \
37	(!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
38	void __iomem *pctl;
39	void __iomem *phy;
40	struct rv1126_cru *cru;
41	struct msch_regs *msch;
42	struct rv1126_ddrgrf *ddrgrf;
43	struct rv1126_grf *grf;
44	u32 sr_idle;
45	u32 pd_idle;
46#endif
47	struct ram_info info;
48	struct rv1126_pmugrf *pmugrf;
49};
50
51#if defined(CONFIG_TPL_BUILD) || \
52	(!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
53
54#define GRF_BASE_ADDR			0xfe000000
55#define PMU_GRF_BASE_ADDR		0xfe020000
56#define DDR_GRF_BASE_ADDR		0xfe030000
57#define BUS_SGRF_BASE_ADDR		0xfe0a0000
58#define SERVER_MSCH_BASE_ADDR		0xfe800000
59#define CRU_BASE_ADDR			0xff490000
60#define DDR_PHY_BASE_ADDR		0xff4a0000
61#define UPCTL2_BASE_ADDR		0xffa50000
62
63#define SGRF_SOC_CON2			0x8
64#define SGRF_SOC_CON12			0x30
65#define SGRF_SOC_CON13			0x34
66
67struct dram_info dram_info;
68
69struct rv1126_sdram_params sdram_configs[] = {
70#if defined(CONFIG_RAM_ROCKCHIP_LPDDR4)
71# include	"sdram-rv1126-lpddr4-detect-328.inc"
72# include	"sdram-rv1126-lpddr4-detect-396.inc"
73# include	"sdram-rv1126-lpddr4-detect-528.inc"
74# include	"sdram-rv1126-lpddr4-detect-664.inc"
75# include	"sdram-rv1126-lpddr4-detect-784.inc"
76# include	"sdram-rv1126-lpddr4-detect-924.inc"
77# include	"sdram-rv1126-lpddr4-detect-1056.inc"
78#elif defined(CONFIG_RAM_ROCKCHIP_DDR4)
79# include	"sdram-rv1126-ddr4-detect-328.inc"
80# include	"sdram-rv1126-ddr4-detect-396.inc"
81# include	"sdram-rv1126-ddr4-detect-528.inc"
82# include	"sdram-rv1126-ddr4-detect-664.inc"
83# include	"sdram-rv1126-ddr4-detect-784.inc"
84# include	"sdram-rv1126-ddr4-detect-924.inc"
85# include	"sdram-rv1126-ddr4-detect-1056.inc"
86#else
87# include	"sdram-rv1126-ddr3-detect-328.inc"
88# include	"sdram-rv1126-ddr3-detect-396.inc"
89# include	"sdram-rv1126-ddr3-detect-528.inc"
90# include	"sdram-rv1126-ddr3-detect-664.inc"
91# include	"sdram-rv1126-ddr3-detect-784.inc"
92# include	"sdram-rv1126-ddr3-detect-924.inc"
93# include	"sdram-rv1126-ddr3-detect-1056.inc"
94#endif
95};
96
97u32 common_info[] = {
98#include	"sdram-rv1126-loader_params.inc"
99};
100
101#if defined(CONFIG_CMD_DDR_TEST_TOOL)
102static struct rw_trn_result rw_trn_result;
103#endif
104
105static struct rv1126_fsp_param fsp_param[MAX_IDX];
106
107static u8 lp3_odt_value;
108
109static s8 wrlvl_result[2][4];
110
111/* DDR configuration 0-9 */
112u16 ddr_cfg_2_rbc[] = {
113	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
114	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
115	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
116	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
117	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
118	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
119	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
120	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
121	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
122	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
123};
124
125/* DDR configuration 10-21 */
126u8 ddr4_cfg_2_rbc[] = {
127	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
128	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
129	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
130	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
131	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
132	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
133	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
134	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
135	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
136	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
137	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
138	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
139};
140
141/* DDR configuration 22-28 */
142u16 ddr_cfg_2_rbc_p2[] = {
143	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
144	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
145	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
146	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
147	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
148	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
149	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
150};
151
152u8 d4_rbc_2_d3_rbc[][2] = {
153	{10, 0},
154	{11, 2},
155	{12, 23},
156	{13, 1},
157	{14, 28},
158	{15, 24},
159	{16, 27},
160	{17, 7},
161	{18, 6},
162	{19, 25},
163	{20, 26},
164	{21, 3}
165};
166
167u32 addrmap[29][9] = {
168	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
169		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
170	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
171		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
172	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
173		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
174	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
175		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
176	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
177		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
178	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
179		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
180	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
181		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
182	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
183		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
184	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
185		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
186	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
188
189	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
190		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
191	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
192		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
193	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
194		0x07070707, 0x00000f07, 0x0700}, /* 12 */
195	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
196		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
197	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
198		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
199	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
200		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
201	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
202		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
203	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
204		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
205	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
206		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
207	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
209
210	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
211		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
212	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
213		0x06060606, 0x00000f06, 0x0600}, /* 21 */
214	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
215		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
216
217	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
218		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
219	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
220		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
221	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
222		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
223	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
224		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
225	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
226		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
227	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
228		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
229};
230
231static u8 dq_sel[22][3] = {
232	{0x0, 0x17, 0x22},
233	{0x1, 0x18, 0x23},
234	{0x2, 0x19, 0x24},
235	{0x3, 0x1a, 0x25},
236	{0x4, 0x1b, 0x26},
237	{0x5, 0x1c, 0x27},
238	{0x6, 0x1d, 0x28},
239	{0x7, 0x1e, 0x29},
240	{0x8, 0x16, 0x21},
241	{0x9, 0x1f, 0x2a},
242	{0xa, 0x20, 0x2b},
243	{0x10, 0x1, 0xc},
244	{0x11, 0x2, 0xd},
245	{0x12, 0x3, 0xe},
246	{0x13, 0x4, 0xf},
247	{0x14, 0x5, 0x10},
248	{0x15, 0x6, 0x11},
249	{0x16, 0x7, 0x12},
250	{0x17, 0x8, 0x13},
251	{0x18, 0x0, 0xb},
252	{0x19, 0x9, 0x14},
253	{0x1a, 0xa, 0x15}
254};
255
256static u16 grp_addr[4] = {
257	ADD_GROUP_CS0_A,
258	ADD_GROUP_CS0_B,
259	ADD_GROUP_CS1_A,
260	ADD_GROUP_CS1_B
261};
262
263static u8 wrlvl_result_offset[2][4] = {
264	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
265	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
266};
267
268static u16 dqs_dq_skew_adr[16] = {
269	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
270	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
271	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
272	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
273	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
274	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
275	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
276	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
277	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
278	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
279	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
280	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
281	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
282	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
283	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
284	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
285};
286
287static void rkclk_ddr_reset(struct dram_info *dram,
288			    u32 ctl_srstn, u32 ctl_psrstn,
289			    u32 phy_srstn, u32 phy_psrstn)
290{
291	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
292	       UPCTL2_ASRSTN_REQ(ctl_srstn),
293	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
294
295	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
296	       &dram->cru->softrst_con[12]);
297}
298
299static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
300{
301	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
302	int delay = 1000;
303	u32 mhz = hz / MHz;
304	struct global_info *gbl_info;
305	struct sdram_head_info_index_v2 *index =
306		(struct sdram_head_info_index_v2 *)common_info;
307	u32 ssmod_info;
308	u32 dsmpd = 1;
309
310	gbl_info = (struct global_info *)((void *)common_info +
311		    index->global_index.offset * 4);
312	ssmod_info = gbl_info->info_2t;
313	refdiv = 1;
314	if (mhz <= 100) {
315		postdiv1 = 6;
316		postdiv2 = 4;
317	} else if (mhz <= 150) {
318		postdiv1 = 4;
319		postdiv2 = 4;
320	} else if (mhz <= 200) {
321		postdiv1 = 6;
322		postdiv2 = 2;
323	} else if (mhz <= 300) {
324		postdiv1 = 4;
325		postdiv2 = 2;
326	} else if (mhz <= 400) {
327		postdiv1 = 6;
328		postdiv2 = 1;
329	} else {
330		postdiv1 = 4;
331		postdiv2 = 1;
332	}
333	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
334
335	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
336
337	writel(0x1f000000, &dram->cru->clksel_con[64]);
338	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
339	/* enable ssmod */
340	if (PLL_SSMOD_SPREAD(ssmod_info)) {
341		dsmpd = 0;
342		clrsetbits_le32(&dram->cru->pll[1].con2,
343				0xffffff << 0, 0x0 << 0);
344		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
345		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
346		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
347		       SSMOD_RESET(0) |
348		       SSMOD_DIS_SSCG(0) |
349		       SSMOD_BP(0),
350		       &dram->cru->pll[1].con3);
351	}
352	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
353	       &dram->cru->pll[1].con1);
354
355	while (delay > 0) {
356		udelay(1);
357		if (LOCK(readl(&dram->cru->pll[1].con1)))
358			break;
359		delay--;
360	}
361
362	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
363}
364
365static void rkclk_configure_ddr(struct dram_info *dram,
366				struct rv1126_sdram_params *sdram_params)
367{
368	/* for inno ddr phy need freq / 2 */
369	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
370}
371
372static unsigned int
373	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
374{
375	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
376	u32 cs, bw, die_bw, col, row, bank;
377	u32 cs1_row;
378	u32 i, tmp;
379	u32 ddrconf = -1;
380	u32 row_3_4;
381
382	cs = cap_info->rank;
383	bw = cap_info->bw;
384	die_bw = cap_info->dbw;
385	col = cap_info->col;
386	row = cap_info->cs0_row;
387	cs1_row = cap_info->cs1_row;
388	bank = cap_info->bk;
389	row_3_4 = cap_info->row_3_4;
390
391	if (sdram_params->base.dramtype == DDR4) {
392		if (cs == 2 && row == cs1_row && !row_3_4) {
393			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
394			      die_bw;
395			for (i = 17; i < 21; i++) {
396				if (((tmp & 0xf) ==
397				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
398				    ((tmp & 0x70) <=
399				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
400					ddrconf = i;
401					goto out;
402				}
403			}
404		}
405
406		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
407		for (i = 10; i < 21; i++) {
408			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
409			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
410			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
411				ddrconf = i;
412				goto out;
413			}
414		}
415	} else {
416		if (cs == 2 && row == cs1_row && bank == 3) {
417			for (i = 5; i < 8; i++) {
418				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
419							 0x7)) &&
420				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
421							  (0x7 << 5))) {
422					ddrconf = i;
423					goto out;
424				}
425			}
426		}
427
428		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
429		      ((bw + col - 10) << 0);
430		if (bank == 3)
431			tmp |= (1 << 3);
432
433		for (i = 0; i < 9; i++)
434			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
435			    ((tmp & (7 << 5)) <=
436			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
437			    ((tmp & (1 << 8)) <=
438			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
439				ddrconf = i;
440				goto out;
441			}
442
443		for (i = 0; i < 7; i++)
444			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
445			    ((tmp & (7 << 5)) <=
446			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
447			    ((tmp & (1 << 8)) <=
448			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
449				ddrconf = i + 22;
450				goto out;
451			}
452
453		if (cs == 1 && bank == 3 && row <= 17 &&
454		    (col + bw) == 12)
455			ddrconf = 23;
456	}
457
458out:
459	if (ddrconf > 28)
460		printascii("calculate ddrconfig error\n");
461
462	if (sdram_params->base.dramtype == DDR4) {
463		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
464			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
465				if (ddrconf == 21 && row > 16)
466					printascii("warn:ddrconf21 row > 16\n");
467				else
468					ddrconf = d4_rbc_2_d3_rbc[i][1];
469				break;
470			}
471		}
472	}
473
474	return ddrconf;
475}
476
477static void sw_set_req(struct dram_info *dram)
478{
479	void __iomem *pctl_base = dram->pctl;
480
481	/* clear sw_done=0 */
482	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
483}
484
485static void sw_set_ack(struct dram_info *dram)
486{
487	void __iomem *pctl_base = dram->pctl;
488
489	/* set sw_done=1 */
490	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
491	while (1) {
492		/* wait programming done */
493		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
494				PCTL2_SW_DONE_ACK)
495			break;
496	}
497}
498
499static void set_ctl_address_map(struct dram_info *dram,
500				struct rv1126_sdram_params *sdram_params)
501{
502	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
503	void __iomem *pctl_base = dram->pctl;
504	u32 ddrconf = cap_info->ddrconfig;
505	u32 i, row;
506
507	row = cap_info->cs0_row;
508	if (sdram_params->base.dramtype == DDR4) {
509		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
510			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
511				ddrconf = d4_rbc_2_d3_rbc[i][0];
512				break;
513			}
514		}
515	}
516
517	if (ddrconf >= ARRAY_SIZE(addrmap)) {
518		printascii("set ctl address map fail\n");
519		return;
520	}
521
522	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
523			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
524
525	/* unused row set to 0xf */
526	for (i = 17; i >= row; i--)
527		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
528			((i - 12) * 8 / 32) * 4,
529			0xf << ((i - 12) * 8 % 32));
530
531	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
532		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
533	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
534		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
535
536	if (cap_info->rank == 1)
537		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
538}
539
540static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
541{
542	void __iomem *phy_base = dram->phy;
543	u32 fbdiv, prediv, postdiv, postdiv_en;
544
545	if (wait) {
546		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
547		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
548			continue;
549	} else {
550		freq /= MHz;
551		prediv = 1;
552		if (freq <= 200) {
553			fbdiv = 16;
554			postdiv = 2;
555			postdiv_en = 1;
556		} else if (freq <= 456) {
557			fbdiv = 8;
558			postdiv = 1;
559			postdiv_en = 1;
560		} else {
561			fbdiv = 4;
562			postdiv = 0;
563			postdiv_en = 0;
564		}
565		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
566		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
567				(fbdiv >> 8) & 1);
568		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
569				postdiv_en << PHY_POSTDIV_EN_SHIFT);
570
571		clrsetbits_le32(PHY_REG(phy_base, 0x52),
572				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
573		clrsetbits_le32(PHY_REG(phy_base, 0x53),
574				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
575				postdiv << PHY_POSTDIV_SHIFT);
576	}
577}
578
579static const u16 d3_phy_drv_2_ohm[][2] = {
580	{PHY_DDR3_RON_455ohm, 455},
581	{PHY_DDR3_RON_230ohm, 230},
582	{PHY_DDR3_RON_153ohm, 153},
583	{PHY_DDR3_RON_115ohm, 115},
584	{PHY_DDR3_RON_91ohm, 91},
585	{PHY_DDR3_RON_76ohm, 76},
586	{PHY_DDR3_RON_65ohm, 65},
587	{PHY_DDR3_RON_57ohm, 57},
588	{PHY_DDR3_RON_51ohm, 51},
589	{PHY_DDR3_RON_46ohm, 46},
590	{PHY_DDR3_RON_41ohm, 41},
591	{PHY_DDR3_RON_38ohm, 38},
592	{PHY_DDR3_RON_35ohm, 35},
593	{PHY_DDR3_RON_32ohm, 32},
594	{PHY_DDR3_RON_30ohm, 30},
595	{PHY_DDR3_RON_28ohm, 28},
596	{PHY_DDR3_RON_27ohm, 27},
597	{PHY_DDR3_RON_25ohm, 25},
598	{PHY_DDR3_RON_24ohm, 24},
599	{PHY_DDR3_RON_23ohm, 23},
600	{PHY_DDR3_RON_22ohm, 22},
601	{PHY_DDR3_RON_21ohm, 21},
602	{PHY_DDR3_RON_20ohm, 20}
603};
604
605static u16 d3_phy_odt_2_ohm[][2] = {
606	{PHY_DDR3_RTT_DISABLE, 0},
607	{PHY_DDR3_RTT_561ohm, 561},
608	{PHY_DDR3_RTT_282ohm, 282},
609	{PHY_DDR3_RTT_188ohm, 188},
610	{PHY_DDR3_RTT_141ohm, 141},
611	{PHY_DDR3_RTT_113ohm, 113},
612	{PHY_DDR3_RTT_94ohm, 94},
613	{PHY_DDR3_RTT_81ohm, 81},
614	{PHY_DDR3_RTT_72ohm, 72},
615	{PHY_DDR3_RTT_64ohm, 64},
616	{PHY_DDR3_RTT_58ohm, 58},
617	{PHY_DDR3_RTT_52ohm, 52},
618	{PHY_DDR3_RTT_48ohm, 48},
619	{PHY_DDR3_RTT_44ohm, 44},
620	{PHY_DDR3_RTT_41ohm, 41},
621	{PHY_DDR3_RTT_38ohm, 38},
622	{PHY_DDR3_RTT_37ohm, 37},
623	{PHY_DDR3_RTT_34ohm, 34},
624	{PHY_DDR3_RTT_32ohm, 32},
625	{PHY_DDR3_RTT_31ohm, 31},
626	{PHY_DDR3_RTT_29ohm, 29},
627	{PHY_DDR3_RTT_28ohm, 28},
628	{PHY_DDR3_RTT_27ohm, 27},
629	{PHY_DDR3_RTT_25ohm, 25}
630};
631
632static u16 d4lp3_phy_drv_2_ohm[][2] = {
633	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
634	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
635	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
636	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
637	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
638	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
639	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
640	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
641	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
642	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
643	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
644	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
645	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
646	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
647	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
648	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
649	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
650	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
651	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
652	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
653	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
654	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
655	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
656};
657
658static u16 d4lp3_phy_odt_2_ohm[][2] = {
659	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
660	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
661	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
662	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
663	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
664	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
665	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
666	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
667	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
668	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
669	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
670	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
671	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
672	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
673	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
674	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
675	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
676	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
677	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
678	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
679	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
680	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
681	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
682	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
683};
684
685static u16 lp4_phy_drv_2_ohm[][2] = {
686	{PHY_LPDDR4_RON_501ohm, 501},
687	{PHY_LPDDR4_RON_253ohm, 253},
688	{PHY_LPDDR4_RON_168ohm, 168},
689	{PHY_LPDDR4_RON_126ohm, 126},
690	{PHY_LPDDR4_RON_101ohm, 101},
691	{PHY_LPDDR4_RON_84ohm, 84},
692	{PHY_LPDDR4_RON_72ohm, 72},
693	{PHY_LPDDR4_RON_63ohm, 63},
694	{PHY_LPDDR4_RON_56ohm, 56},
695	{PHY_LPDDR4_RON_50ohm, 50},
696	{PHY_LPDDR4_RON_46ohm, 46},
697	{PHY_LPDDR4_RON_42ohm, 42},
698	{PHY_LPDDR4_RON_38ohm, 38},
699	{PHY_LPDDR4_RON_36ohm, 36},
700	{PHY_LPDDR4_RON_33ohm, 33},
701	{PHY_LPDDR4_RON_31ohm, 31},
702	{PHY_LPDDR4_RON_29ohm, 29},
703	{PHY_LPDDR4_RON_28ohm, 28},
704	{PHY_LPDDR4_RON_26ohm, 26},
705	{PHY_LPDDR4_RON_25ohm, 25},
706	{PHY_LPDDR4_RON_24ohm, 24},
707	{PHY_LPDDR4_RON_23ohm, 23},
708	{PHY_LPDDR4_RON_22ohm, 22}
709};
710
711static u16 lp4_phy_odt_2_ohm[][2] = {
712	{PHY_LPDDR4_RTT_DISABLE, 0},
713	{PHY_LPDDR4_RTT_604ohm, 604},
714	{PHY_LPDDR4_RTT_303ohm, 303},
715	{PHY_LPDDR4_RTT_202ohm, 202},
716	{PHY_LPDDR4_RTT_152ohm, 152},
717	{PHY_LPDDR4_RTT_122ohm, 122},
718	{PHY_LPDDR4_RTT_101ohm, 101},
719	{PHY_LPDDR4_RTT_87ohm,	87},
720	{PHY_LPDDR4_RTT_78ohm, 78},
721	{PHY_LPDDR4_RTT_69ohm, 69},
722	{PHY_LPDDR4_RTT_62ohm, 62},
723	{PHY_LPDDR4_RTT_56ohm, 56},
724	{PHY_LPDDR4_RTT_52ohm, 52},
725	{PHY_LPDDR4_RTT_48ohm, 48},
726	{PHY_LPDDR4_RTT_44ohm, 44},
727	{PHY_LPDDR4_RTT_41ohm, 41},
728	{PHY_LPDDR4_RTT_39ohm, 39},
729	{PHY_LPDDR4_RTT_37ohm, 37},
730	{PHY_LPDDR4_RTT_35ohm, 35},
731	{PHY_LPDDR4_RTT_33ohm, 33},
732	{PHY_LPDDR4_RTT_32ohm, 32},
733	{PHY_LPDDR4_RTT_30ohm, 30},
734	{PHY_LPDDR4_RTT_29ohm, 29},
735	{PHY_LPDDR4_RTT_27ohm, 27}
736};
737
738static u32 lp4_odt_calc(u32 odt_ohm)
739{
740	u32 odt;
741
742	if (odt_ohm == 0)
743		odt = LPDDR4_DQODT_DIS;
744	else if (odt_ohm <= 40)
745		odt = LPDDR4_DQODT_40;
746	else if (odt_ohm <= 48)
747		odt = LPDDR4_DQODT_48;
748	else if (odt_ohm <= 60)
749		odt = LPDDR4_DQODT_60;
750	else if (odt_ohm <= 80)
751		odt = LPDDR4_DQODT_80;
752	else if (odt_ohm <= 120)
753		odt = LPDDR4_DQODT_120;
754	else
755		odt = LPDDR4_DQODT_240;
756
757	return odt;
758}
759
760static void *get_ddr_drv_odt_info(u32 dramtype)
761{
762	struct sdram_head_info_index_v2 *index =
763		(struct sdram_head_info_index_v2 *)common_info;
764	void *ddr_info = 0;
765
766	if (dramtype == DDR4)
767		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
768	else if (dramtype == DDR3)
769		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
770	else if (dramtype == LPDDR3)
771		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
772	else if (dramtype == LPDDR4)
773		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
774	else
775		printascii("unsupported dram type\n");
776	return ddr_info;
777}
778
779static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
780			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
781{
782	void __iomem *pctl_base = dram->pctl;
783	u32 ca_vref, dq_vref;
784
785	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
786		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
787	else
788		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
789
790	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
791		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
792	else
793		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
794
795	if (dramtype == LPDDR4) {
796		if (ca_vref < 100)
797			ca_vref = 100;
798		if (ca_vref > 420)
799			ca_vref = 420;
800
801		if (ca_vref <= 300)
802			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
803		else
804			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
805
806		if (dq_vref < 100)
807			dq_vref = 100;
808		if (dq_vref > 420)
809			dq_vref = 420;
810
811		if (dq_vref <= 300)
812			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
813		else
814			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
815	} else {
816		ca_vref = ca_vref * 11 / 6;
817		if (ca_vref < 150)
818			ca_vref = 150;
819		if (ca_vref > 629)
820			ca_vref = 629;
821
822		if (ca_vref <= 449)
823			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
824		else
825			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
826
827		if (dq_vref < 150)
828			dq_vref = 150;
829		if (dq_vref > 629)
830			dq_vref = 629;
831
832		if (dq_vref <= 449)
833			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
834		else
835			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
836	}
837	sw_set_req(dram);
838	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
839			DDR_PCTL2_INIT6,
840			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
841			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
842
843	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
844			DDR_PCTL2_INIT7,
845			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
846			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
847	sw_set_ack(dram);
848}
849
850static void set_ds_odt(struct dram_info *dram,
851		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
852{
853	void __iomem *phy_base = dram->phy;
854	void __iomem *pctl_base = dram->pctl;
855	u32 dramtype = sdram_params->base.dramtype;
856	struct ddr2_3_4_lp2_3_info *ddr_info;
857	struct lp4_info *lp4_info;
858	u32 i, j, tmp;
859	const u16 (*p_drv)[2];
860	const u16 (*p_odt)[2];
861	u32 drv_info, sr_info;
862	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
863	u32 phy_odt_ohm, dram_odt_ohm;
864	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
865	u32 phy_odt_up_en, phy_odt_dn_en;
866	u32 sr_dq, sr_clk;
867	u32 freq = sdram_params->base.ddr_freq;
868	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
869	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
870	u32 phy_dq_drv = 0;
871	u32 phy_odt_up = 0, phy_odt_dn = 0;
872
873	ddr_info = get_ddr_drv_odt_info(dramtype);
874	lp4_info = (void *)ddr_info;
875
876	if (!ddr_info)
877		return;
878
879	/* dram odt en freq control phy drv, dram odt and phy sr */
880	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
881		drv_info = ddr_info->drv_when_odtoff;
882		dram_odt_ohm = 0;
883		sr_info = ddr_info->sr_when_odtoff;
884		phy_lp4_drv_pd_en =
885			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
886	} else {
887		drv_info = ddr_info->drv_when_odten;
888		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
889		sr_info = ddr_info->sr_when_odten;
890		phy_lp4_drv_pd_en =
891			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
892	}
893	phy_dq_drv_ohm =
894		DRV_INFO_PHY_DQ_DRV(drv_info);
895	phy_clk_drv_ohm =
896		DRV_INFO_PHY_CLK_DRV(drv_info);
897	phy_ca_drv_ohm =
898		DRV_INFO_PHY_CA_DRV(drv_info);
899
900	sr_dq = DQ_SR_INFO(sr_info);
901	sr_clk = CLK_SR_INFO(sr_info);
902
903	/* phy odt en freq control dram drv and phy odt */
904	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
905		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
906		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
907		phy_odt_ohm = 0;
908		phy_odt_up_en = 0;
909		phy_odt_dn_en = 0;
910	} else {
911		dram_drv_ohm =
912			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
913		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
914		phy_odt_up_en =
915			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
916		phy_odt_dn_en =
917			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
918		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
919	}
920
921	if (dramtype == LPDDR4) {
922		if (phy_odt_ohm) {
923			phy_odt_up_en = 0;
924			phy_odt_dn_en = 1;
925		}
926		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
927			dram_caodt_ohm = 0;
928		else
929			dram_caodt_ohm =
930				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
931	}
932
933	if (dramtype == DDR3) {
934		p_drv = d3_phy_drv_2_ohm;
935		p_odt = d3_phy_odt_2_ohm;
936	} else if (dramtype == LPDDR4) {
937		p_drv = lp4_phy_drv_2_ohm;
938		p_odt = lp4_phy_odt_2_ohm;
939	} else {
940		p_drv = d4lp3_phy_drv_2_ohm;
941		p_odt = d4lp3_phy_odt_2_ohm;
942	}
943
944	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
945		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
946			phy_dq_drv = **(p_drv + i);
947			break;
948		}
949		if (i == 0)
950			break;
951	}
952	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
953		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
954			phy_clk_drv = **(p_drv + i);
955			break;
956		}
957		if (i == 0)
958			break;
959	}
960	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
961		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
962			phy_ca_drv = **(p_drv + i);
963			break;
964		}
965		if (i == 0)
966			break;
967	}
968	if (!phy_odt_ohm)
969		phy_odt = 0;
970	else
971		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
972			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
973				phy_odt = **(p_odt + i);
974				break;
975			}
976			if (i == 0)
977				break;
978		}
979
980	if (dramtype != LPDDR4) {
981		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
982			vref_inner = 0x80;
983		else if (phy_odt_up_en)
984			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
985				     (dram_drv_ohm + phy_odt_ohm);
986		else
987			vref_inner = phy_odt_ohm * 128 /
988				(phy_odt_ohm + dram_drv_ohm);
989
990		if (dramtype != DDR3 && dram_odt_ohm)
991			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
992				   (phy_dq_drv_ohm + dram_odt_ohm);
993		else
994			vref_out = 0x80;
995	} else {
996		/* for lp4 and lp4x*/
997		if (phy_odt_ohm)
998			vref_inner =
999				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1000				 256) / 1000;
1001		else
1002			vref_inner =
1003				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1004				 256) / 1000;
1005
1006		vref_out = 0x80;
1007	}
1008
1009	/* default ZQCALIB bypass mode */
1010	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1011	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1012	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1013	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1014	if (dramtype == LPDDR4) {
1015		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1016		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1017	} else {
1018		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1019		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1020	}
1021	/* clk / cmd slew rate */
1022	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1023
1024	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1025	if (phy_odt_up_en)
1026		phy_odt_up = phy_odt;
1027	if (phy_odt_dn_en)
1028		phy_odt_dn = phy_odt;
1029
1030	for (i = 0; i < 4; i++) {
1031		j = 0x110 + i * 0x10;
1032		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1033		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1034		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1035		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1036		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1037
1038		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1039				1 << 3, phy_lp4_drv_pd_en << 3);
1040		if (dramtype == LPDDR4)
1041			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1042		/* dq slew rate */
1043		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1044				0x1f, sr_dq);
1045	}
1046
1047	/* reg_rx_vref_value_update */
1048	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1049	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1050
1051	/* RAM VREF */
1052	writel(vref_out, PHY_REG(phy_base, 0x105));
1053	if (dramtype == LPDDR3)
1054		udelay(100);
1055
1056	if (dramtype == LPDDR4)
1057		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1058
1059	if (dramtype == DDR3 || dramtype == DDR4) {
1060		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1061				DDR_PCTL2_INIT3);
1062		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1063	} else {
1064		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1065				DDR_PCTL2_INIT4);
1066		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1067	}
1068
1069	if (dramtype == DDR3) {
1070		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1071		if (dram_drv_ohm == 34)
1072			mr1_mr3 |= DDR3_DS_34;
1073
1074		if (dram_odt_ohm == 0)
1075			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1076		else if (dram_odt_ohm <= 40)
1077			mr1_mr3 |= DDR3_RTT_NOM_40;
1078		else if (dram_odt_ohm <= 60)
1079			mr1_mr3 |= DDR3_RTT_NOM_60;
1080		else
1081			mr1_mr3 |= DDR3_RTT_NOM_120;
1082
1083	} else if (dramtype == DDR4) {
1084		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1085		if (dram_drv_ohm == 48)
1086			mr1_mr3 |= DDR4_DS_48;
1087
1088		if (dram_odt_ohm == 0)
1089			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1090		else if (dram_odt_ohm <= 34)
1091			mr1_mr3 |= DDR4_RTT_NOM_34;
1092		else if (dram_odt_ohm <= 40)
1093			mr1_mr3 |= DDR4_RTT_NOM_40;
1094		else if (dram_odt_ohm <= 48)
1095			mr1_mr3 |= DDR4_RTT_NOM_48;
1096		else if (dram_odt_ohm <= 60)
1097			mr1_mr3 |= DDR4_RTT_NOM_60;
1098		else
1099			mr1_mr3 |= DDR4_RTT_NOM_120;
1100
1101	} else if (dramtype == LPDDR3) {
1102		if (dram_drv_ohm <= 34)
1103			mr1_mr3 |= LPDDR3_DS_34;
1104		else if (dram_drv_ohm <= 40)
1105			mr1_mr3 |= LPDDR3_DS_40;
1106		else if (dram_drv_ohm <= 48)
1107			mr1_mr3 |= LPDDR3_DS_48;
1108		else if (dram_drv_ohm <= 60)
1109			mr1_mr3 |= LPDDR3_DS_60;
1110		else if (dram_drv_ohm <= 80)
1111			mr1_mr3 |= LPDDR3_DS_80;
1112
1113		if (dram_odt_ohm == 0)
1114			lp3_odt_value = LPDDR3_ODT_DIS;
1115		else if (dram_odt_ohm <= 60)
1116			lp3_odt_value = LPDDR3_ODT_60;
1117		else if (dram_odt_ohm <= 120)
1118			lp3_odt_value = LPDDR3_ODT_120;
1119		else
1120			lp3_odt_value = LPDDR3_ODT_240;
1121	} else {/* for lpddr4 and lpddr4x */
1122		/* MR3 for lp4 PU-CAL and PDDS */
1123		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1124		mr1_mr3 |= lp4_pu_cal;
1125
1126		tmp = lp4_odt_calc(dram_drv_ohm);
1127		if (!tmp)
1128			tmp = LPDDR4_PDDS_240;
1129		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1130
1131		/* MR11 for lp4 ca odt, dq odt set */
1132		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1133			     DDR_PCTL2_INIT6);
1134		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1135
1136		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1137
1138		tmp = lp4_odt_calc(dram_odt_ohm);
1139		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1140
1141		tmp = lp4_odt_calc(dram_caodt_ohm);
1142		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1143		sw_set_req(dram);
1144		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1145				DDR_PCTL2_INIT6,
1146				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1147				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1148		sw_set_ack(dram);
1149
1150		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1151		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1152			     DDR_PCTL2_INIT7);
1153		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1154		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1155
1156		tmp = lp4_odt_calc(phy_odt_ohm);
1157		mr22 |= tmp;
1158		mr22 = mr22 |
1159		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1160			LPDDR4_ODTE_CK_SHIFT) |
1161		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1162			LPDDR4_ODTE_CS_SHIFT) |
1163		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1164			LPDDR4_ODTD_CA_SHIFT);
1165
1166		sw_set_req(dram);
1167		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1168				DDR_PCTL2_INIT7,
1169				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1170				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1171		sw_set_ack(dram);
1172	}
1173
1174	if (dramtype == DDR4 || dramtype == DDR3) {
1175		sw_set_req(dram);
1176		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1177				DDR_PCTL2_INIT3,
1178				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1179				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1180		sw_set_ack(dram);
1181	} else {
1182		sw_set_req(dram);
1183		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1184				DDR_PCTL2_INIT4,
1185				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1186				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1187		sw_set_ack(dram);
1188	}
1189}
1190
1191static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1192				   struct rv1126_sdram_params *sdram_params)
1193{
1194	void __iomem *phy_base = dram->phy;
1195	u32 dramtype = sdram_params->base.dramtype;
1196	struct sdram_head_info_index_v2 *index =
1197		(struct sdram_head_info_index_v2 *)common_info;
1198	struct dq_map_info *map_info;
1199
1200	map_info = (struct dq_map_info *)((void *)common_info +
1201		index->dq_map_index.offset * 4);
1202
1203	if (dramtype <= LPDDR4)
1204		writel((map_info->byte_map[dramtype / 4] >>
1205			((dramtype % 4) * 8)) & 0xff,
1206		       PHY_REG(phy_base, 0x4f));
1207
1208	return 0;
1209}
1210
1211static void phy_cfg(struct dram_info *dram,
1212		    struct rv1126_sdram_params *sdram_params)
1213{
1214	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1215	void __iomem *phy_base = dram->phy;
1216	u32 i, dq_map, tmp;
1217	u32 byte1 = 0, byte0 = 0;
1218
1219	sdram_cmd_dq_path_remap(dram, sdram_params);
1220
1221	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1222	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1223		writel(sdram_params->phy_regs.phy[i][1],
1224		       phy_base + sdram_params->phy_regs.phy[i][0]);
1225	}
1226
1227	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1228	dq_map = readl(PHY_REG(phy_base, 0x4f));
1229	for (i = 0; i < 4; i++) {
1230		if (((dq_map >> (i * 2)) & 0x3) == 0)
1231			byte0 = i;
1232		if (((dq_map >> (i * 2)) & 0x3) == 1)
1233			byte1 = i;
1234	}
1235
1236	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1237	if (cap_info->bw == 2)
1238		tmp |= 0xf;
1239	else if (cap_info->bw == 1)
1240		tmp |= ((1 << byte0) | (1 << byte1));
1241	else
1242		tmp |= (1 << byte0);
1243
1244	writel(tmp, PHY_REG(phy_base, 0xf));
1245
1246	/* lpddr4 odt control by phy, enable cs0 odt */
1247	if (sdram_params->base.dramtype == LPDDR4)
1248		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1249				(1 << 6) | (1 << 4));
1250	/* for ca training ca vref choose range1 */
1251	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1252	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1253	/* for wr training PHY_0x7c[5], choose range0 */
1254	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1255}
1256
1257static int update_refresh_reg(struct dram_info *dram)
1258{
1259	void __iomem *pctl_base = dram->pctl;
1260	u32 ret;
1261
1262	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1263	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1264
1265	return 0;
1266}
1267
1268/*
1269 * rank = 1: cs0
1270 * rank = 2: cs1
1271 */
1272int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1273{
1274	u32 ret;
1275	u32 i, temp;
1276	u32 dqmap;
1277
1278	void __iomem *pctl_base = dram->pctl;
1279	struct sdram_head_info_index_v2 *index =
1280		(struct sdram_head_info_index_v2 *)common_info;
1281	struct dq_map_info *map_info;
1282
1283	map_info = (struct dq_map_info *)((void *)common_info +
1284		index->dq_map_index.offset * 4);
1285
1286	if (dramtype == LPDDR2)
1287		dqmap = map_info->lp2_dq0_7_map;
1288	else
1289		dqmap = map_info->lp3_dq0_7_map;
1290
1291	pctl_read_mr(pctl_base, rank, mr_num);
1292
1293	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1294
1295	if (dramtype != LPDDR4) {
1296		temp = 0;
1297		for (i = 0; i < 8; i++) {
1298			temp = temp | (((ret >> i) & 0x1) <<
1299				       ((dqmap >> (i * 4)) & 0xf));
1300		}
1301	} else {
1302		temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1303	}
1304
1305	return temp;
1306}
1307
1308/* before call this function autorefresh should be disabled */
1309void send_a_refresh(struct dram_info *dram)
1310{
1311	void __iomem *pctl_base = dram->pctl;
1312
1313	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1314		continue;
1315	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1316}
1317
1318static void enter_sr(struct dram_info *dram, u32 en)
1319{
1320	void __iomem *pctl_base = dram->pctl;
1321
1322	if (en) {
1323		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1324		while (1) {
1325			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1326			      PCTL2_SELFREF_TYPE_MASK) ==
1327			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1328			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1329			      PCTL2_OPERATING_MODE_MASK) ==
1330			     PCTL2_OPERATING_MODE_SR))
1331				break;
1332		}
1333	} else {
1334		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1335		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1336		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1337			continue;
1338	}
1339}
1340
1341void record_dq_prebit(struct dram_info *dram)
1342{
1343	u32 group, i, tmp;
1344	void __iomem *phy_base = dram->phy;
1345
1346	for (group = 0; group < 4; group++) {
1347		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1348			/* l_loop_invdelaysel */
1349			writel(dq_sel[i][0], PHY_REG(phy_base,
1350						     grp_addr[group] + 0x2c));
1351			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1352			writel(tmp, PHY_REG(phy_base,
1353					    grp_addr[group] + dq_sel[i][1]));
1354
1355			/* r_loop_invdelaysel */
1356			writel(dq_sel[i][0], PHY_REG(phy_base,
1357						     grp_addr[group] + 0x2d));
1358			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1359			writel(tmp, PHY_REG(phy_base,
1360					    grp_addr[group] + dq_sel[i][2]));
1361		}
1362	}
1363}
1364
1365static void update_dq_rx_prebit(struct dram_info *dram)
1366{
1367	void __iomem *phy_base = dram->phy;
1368
1369	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1370			BIT(4));
1371	udelay(1);
1372	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1373}
1374
1375static void update_dq_tx_prebit(struct dram_info *dram)
1376{
1377	void __iomem *phy_base = dram->phy;
1378
1379	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1380	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1381	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1382	udelay(1);
1383	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1384}
1385
1386static void update_ca_prebit(struct dram_info *dram)
1387{
1388	void __iomem *phy_base = dram->phy;
1389
1390	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1391	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1392	udelay(1);
1393	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1394}
1395
1396/*
1397 * dir: 0: de-skew = delta_*
1398 *	1: de-skew = reg val - delta_*
1399 * delta_dir: value for differential signal: clk/
1400 * delta_sig: value for single signal: ca/cmd
1401 */
1402static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1403			     int delta_sig, u32 cs, u32 dramtype)
1404{
1405	void __iomem *phy_base = dram->phy;
1406	u32 i, cs_en, tmp;
1407	u32 dfi_lp_stat = 0;
1408
1409	if (cs == 0)
1410		cs_en = 1;
1411	else if (cs == 2)
1412		cs_en = 2;
1413	else
1414		cs_en = 3;
1415
1416	if (dramtype == LPDDR4 &&
1417	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1418		dfi_lp_stat = 1;
1419		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1420	}
1421	enter_sr(dram, 1);
1422
1423	for (i = 0; i < 0x20; i++) {
1424		if (dir == DESKEW_MDF_ABS_VAL)
1425			tmp = delta_sig;
1426		else
1427			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1428			      delta_sig;
1429		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1430	}
1431
1432	if (dir == DESKEW_MDF_ABS_VAL)
1433		tmp = delta_dif;
1434	else
1435		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1436		       delta_sig + delta_dif;
1437	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1438	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1439	if (dramtype == LPDDR4) {
1440		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1441		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1442
1443		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1444		update_ca_prebit(dram);
1445	}
1446	enter_sr(dram, 0);
1447
1448	if (dfi_lp_stat)
1449		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1450}
1451
1452static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1453{
1454	u32 i, j, offset = 0;
1455	u32 min = 0x3f;
1456	void __iomem *phy_base = dram->phy;
1457	u32 byte_en;
1458
1459	if (signal == SKEW_TX_SIGNAL)
1460		offset = 8;
1461
1462	if (signal == SKEW_CA_SIGNAL) {
1463		for (i = 0; i < 0x20; i++)
1464			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1465	} else {
1466		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1467		for (j = offset; j < offset + rank * 4; j++) {
1468			if (!((byte_en >> (j % 4)) & 1))
1469				continue;
1470			for (i = 0; i < 11; i++)
1471				min = MIN(min,
1472					  readl(PHY_REG(phy_base,
1473							dqs_dq_skew_adr[j] +
1474							i)));
1475		}
1476	}
1477
1478	return min;
1479}
1480
1481static u32 low_power_update(struct dram_info *dram, u32 en)
1482{
1483	void __iomem *pctl_base = dram->pctl;
1484	u32 lp_stat = 0;
1485
1486	if (en) {
1487		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1488	} else {
1489		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1490		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1491	}
1492
1493	return lp_stat;
1494}
1495
1496/*
1497 * signal:
1498 * dir: 0: de-skew = delta_*
1499 *	1: de-skew = reg val - delta_*
1500 * delta_dir: value for differential signal: dqs
1501 * delta_sig: value for single signal: dq/dm
1502 */
1503static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1504			     int delta_dif, int delta_sig, u32 rank)
1505{
1506	void __iomem *phy_base = dram->phy;
1507	u32 i, j, tmp, offset;
1508	u32 byte_en;
1509
1510	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1511
1512	if (signal == SKEW_RX_SIGNAL)
1513		offset = 0;
1514	else
1515		offset = 8;
1516
1517	for (j = offset; j < (offset + rank * 4); j++) {
1518		if (!((byte_en >> (j % 4)) & 1))
1519			continue;
1520		for (i = 0; i < 0x9; i++) {
1521			if (dir == DESKEW_MDF_ABS_VAL)
1522				tmp = delta_sig;
1523			else
1524				tmp = delta_sig + readl(PHY_REG(phy_base,
1525							dqs_dq_skew_adr[j] +
1526							i));
1527			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1528		}
1529		if (dir == DESKEW_MDF_ABS_VAL)
1530			tmp = delta_dif;
1531		else
1532			tmp = delta_dif + readl(PHY_REG(phy_base,
1533						dqs_dq_skew_adr[j] + 9));
1534		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1535		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1536	}
1537	if (signal == SKEW_RX_SIGNAL)
1538		update_dq_rx_prebit(dram);
1539	else
1540		update_dq_tx_prebit(dram);
1541}
1542
1543static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1544{
1545	void __iomem *phy_base = dram->phy;
1546	u32 ret;
1547	u32 dis_auto_zq = 0;
1548	u32 odt_val_up, odt_val_dn;
1549	u32 i, j;
1550
1551	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1552	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1553
1554	if (dramtype != LPDDR4) {
1555		for (i = 0; i < 4; i++) {
1556			j = 0x110 + i * 0x10;
1557			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1558			       PHY_REG(phy_base, j));
1559			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1560			       PHY_REG(phy_base, j + 0x1));
1561		}
1562	}
1563	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1564	/* use normal read mode for data training */
1565	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1566
1567	if (dramtype == DDR4)
1568		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1569
1570	/* choose training cs */
1571	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1572	/* enable gate training */
1573	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1574	udelay(50);
1575	ret = readl(PHY_REG(phy_base, 0x91));
1576	/* disable gate training */
1577	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1578	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1579	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1580
1581	ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1582
1583	if (dramtype != LPDDR4) {
1584		for (i = 0; i < 4; i++) {
1585			j = 0x110 + i * 0x10;
1586			writel(odt_val_dn, PHY_REG(phy_base, j));
1587			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1588		}
1589	}
1590	return ret;
1591}
1592
1593static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1594			    u32 rank)
1595{
1596	void __iomem *pctl_base = dram->pctl;
1597	void __iomem *phy_base = dram->phy;
1598	u32 dis_auto_zq = 0;
1599	u32 tmp;
1600	u32 cur_fsp;
1601	u32 timeout_us = 1000;
1602
1603	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1604
1605	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1606
1607	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1608	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1609	      0xffff;
1610	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1611
1612	/* disable another cs's output */
1613	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1614		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1615			      dramtype);
1616	if (dramtype == DDR3 || dramtype == DDR4)
1617		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1618	else
1619		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1620
1621	/* choose cs */
1622	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1623			((0x2 >> cs) << 6) | (0 << 2));
1624	/* enable write leveling */
1625	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1626			((0x2 >> cs) << 6) | (1 << 2));
1627
1628	while (1) {
1629		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1630		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1631			break;
1632
1633		udelay(1);
1634		if (timeout_us-- == 0) {
1635			printascii("error: write leveling timeout\n");
1636			while (1)
1637				;
1638		}
1639	}
1640
1641	/* disable write leveling */
1642	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1643			((0x2 >> cs) << 6) | (0 << 2));
1644	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1645
1646	/* enable another cs's output */
1647	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1648		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1649			      dramtype);
1650
1651	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1652
1653	return 0;
1654}
1655
1656char pattern[32] = {
1657	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1658	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1659	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1660	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1661};
1662
1663static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1664			    u32 mhz)
1665{
1666	void __iomem *pctl_base = dram->pctl;
1667	void __iomem *phy_base = dram->phy;
1668	u32 trefi_1x, trfc_1x;
1669	u32 dis_auto_zq = 0;
1670	u32 timeout_us = 1000;
1671	u32 dqs_default;
1672	u32 cur_fsp;
1673	u32 vref_inner;
1674	u32 i;
1675	struct sdram_head_info_index_v2 *index =
1676		(struct sdram_head_info_index_v2 *)common_info;
1677	struct dq_map_info *map_info;
1678
1679	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1680	if (dramtype == DDR3 && vref_inner == 0x80) {
1681		for (i = 0; i < 4; i++)
1682			writel(vref_inner - 0xa,
1683			       PHY_REG(phy_base, 0x118 + i * 0x10));
1684
1685		/* reg_rx_vref_value_update */
1686		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1687		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1688	}
1689
1690	map_info = (struct dq_map_info *)((void *)common_info +
1691		index->dq_map_index.offset * 4);
1692	/* only 1cs a time, 0:cs0 1 cs1 */
1693	if (cs > 1)
1694		return -1;
1695
1696	dqs_default = 0xf;
1697	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1698
1699	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1700	/* config refresh timing */
1701	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1702			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1703	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1704			DDR_PCTL2_RFSHTMG) & 0x3ff;
1705	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1706	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1707	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1708	/* reg_phy_trfc */
1709	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1710	/* reg_max_refi_cnt */
1711	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1712
1713	/* choose training cs */
1714	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1715
1716	/* set dq map for ddr4 */
1717	if (dramtype == DDR4) {
1718		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1719		for (i = 0; i < 4; i++) {
1720			writel((map_info->ddr4_dq_map[cs * 2] >>
1721				((i % 4) * 8)) & 0xff,
1722				PHY_REG(phy_base, 0x238 + i));
1723			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1724				((i % 4) * 8)) & 0xff,
1725				PHY_REG(phy_base, 0x2b8 + i));
1726		}
1727	}
1728
1729	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1730	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1731	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1732	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1733	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1734	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1735	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1736	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1737
1738	/* Choose the read train auto mode */
1739	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1740	/* Enable the auto train of the read train */
1741	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1742
1743	/* Wait the train done. */
1744	while (1) {
1745		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1746			break;
1747
1748		udelay(1);
1749		if (timeout_us-- == 0) {
1750			printascii("error: read training timeout\n");
1751			return -1;
1752		}
1753	}
1754
1755	/* Check the read train state */
1756	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1757	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1758		printascii("error: read training error\n");
1759		return -1;
1760	}
1761
1762	/* Exit the Read Training by setting */
1763	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1764
1765	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1766
1767	if (dramtype == DDR3 && vref_inner == 0x80) {
1768		for (i = 0; i < 4; i++)
1769			writel(vref_inner,
1770			       PHY_REG(phy_base, 0x118 + i * 0x10));
1771
1772		/* reg_rx_vref_value_update */
1773		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1774		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1775	}
1776
1777	return 0;
1778}
1779
1780static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1781			    u32 mhz, u32 dst_fsp)
1782{
1783	void __iomem *pctl_base = dram->pctl;
1784	void __iomem *phy_base = dram->phy;
1785	u32 trefi_1x, trfc_1x;
1786	u32 dis_auto_zq = 0;
1787	u32 timeout_us = 1000;
1788	u32 cur_fsp;
1789	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1790
1791	if (dramtype == LPDDR3 && mhz <= 400) {
1792		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1793		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1794		cl = readl(PHY_REG(phy_base, offset));
1795		cwl = readl(PHY_REG(phy_base, offset + 2));
1796
1797		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1798		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1799		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1800	}
1801
1802	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1803
1804	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1805	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1806	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1807	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1808	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1809	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1810	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1811	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1812	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1813	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1814
1815	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1816	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1817
1818	/* config refresh timing */
1819	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1820	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1821			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1822	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1823			DDR_PCTL2_RFSHTMG) & 0x3ff;
1824	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1825	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1826	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1827	/* reg_phy_trfc */
1828	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1829	/* reg_max_refi_cnt */
1830	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1831
1832	/* choose training cs */
1833	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1834
1835	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1836	/* 0: Use the write-leveling value. */
1837	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1838	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1839
1840	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1841	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1842
1843	/* PHY_0x7a [1] reg_dq_wr_train_en */
1844	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1845
1846	send_a_refresh(dram);
1847
1848	while (1) {
1849		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1850			break;
1851
1852		udelay(1);
1853		if (timeout_us-- == 0) {
1854			printascii("error: write training timeout\n");
1855			while (1)
1856				;
1857		}
1858	}
1859
1860	/* Check the write train state */
1861	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1862		printascii("error: write training error\n");
1863		return -1;
1864	}
1865
1866	/* PHY_0x7a [1] reg_dq_wr_train_en */
1867	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1868
1869	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1870
1871	/* save LPDDR4 write vref to fsp_param for dfs */
1872	if (dramtype == LPDDR4) {
1873		fsp_param[dst_fsp].vref_dq[cs] =
1874			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1875			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1876		/* add range info */
1877		fsp_param[dst_fsp].vref_dq[cs] |=
1878			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1879	}
1880
1881	if (dramtype == LPDDR3 && mhz <= 400) {
1882		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1883		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1884		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1885			       DDR_PCTL2_INIT3);
1886		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1887			      dramtype);
1888	}
1889
1890	return 0;
1891}
1892
1893static int data_training(struct dram_info *dram, u32 cs,
1894			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1895			 u32 training_flag)
1896{
1897	u32 ret = 0;
1898
1899	if (training_flag == FULL_TRAINING)
1900		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1901				WRITE_TRAINING | READ_TRAINING;
1902
1903	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1904		ret = data_training_wl(dram, cs,
1905				       sdram_params->base.dramtype,
1906				       sdram_params->ch.cap_info.rank);
1907		if (ret != 0)
1908			goto out;
1909	}
1910
1911	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1912		ret = data_training_rg(dram, cs,
1913				       sdram_params->base.dramtype);
1914		if (ret != 0)
1915			goto out;
1916	}
1917
1918	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1919		ret = data_training_rd(dram, cs,
1920				       sdram_params->base.dramtype,
1921				       sdram_params->base.ddr_freq);
1922		if (ret != 0)
1923			goto out;
1924	}
1925
1926	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1927		ret = data_training_wr(dram, cs,
1928				       sdram_params->base.dramtype,
1929				       sdram_params->base.ddr_freq, dst_fsp);
1930		if (ret != 0)
1931			goto out;
1932	}
1933
1934out:
1935	return ret;
1936}
1937
1938static int get_wrlvl_val(struct dram_info *dram,
1939			 struct rv1126_sdram_params *sdram_params)
1940{
1941	int i, j, clk_skew;
1942	void __iomem *phy_base = dram->phy;
1943	u32 lp_stat;
1944	int ret;
1945
1946	lp_stat = low_power_update(dram, 0);
1947
1948	clk_skew = 0x1f;
1949	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1950			 sdram_params->base.dramtype);
1951
1952	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1953	if (sdram_params->ch.cap_info.rank == 2)
1954		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1955
1956	for (j = 0; j < 2; j++)
1957		for (i = 0; i < 4; i++)
1958			wrlvl_result[j][i] =
1959				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1960				clk_skew;
1961
1962	low_power_update(dram, lp_stat);
1963
1964	return ret;
1965}
1966
1967#if defined(CONFIG_CMD_DDR_TEST_TOOL)
1968static void init_rw_trn_result_struct(struct rw_trn_result *result,
1969				      void __iomem *phy_base, u8 cs_num)
1970{
1971	int i;
1972
1973	result->cs_num = cs_num;
1974	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1975			  PHY_DQ_WIDTH_MASK;
1976	for (i = 0; i < FSP_NUM; i++)
1977		result->fsp_mhz[i] = 0;
1978}
1979
1980static void save_rw_trn_min_max(void __iomem *phy_base,
1981				struct cs_rw_trn_result *rd_result,
1982				struct cs_rw_trn_result *wr_result,
1983				u8 byte_en)
1984{
1985	u16 phy_ofs;
1986	u8 dqs;
1987	u8 dq;
1988
1989	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
1990		if ((byte_en & BIT(dqs)) == 0)
1991			continue;
1992
1993		/* Channel A or B (low or high 16 bit) */
1994		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
1995		/* low or high 8 bit */
1996		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
1997		for (dq = 0; dq < 8; dq++) {
1998			rd_result->dqs[dqs].dq_min[dq] =
1999				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2000			rd_result->dqs[dqs].dq_max[dq] =
2001				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2002			wr_result->dqs[dqs].dq_min[dq] =
2003				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2004			wr_result->dqs[dqs].dq_max[dq] =
2005				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2006		}
2007	}
2008}
2009
2010static void save_rw_trn_deskew(void __iomem *phy_base,
2011			       struct fsp_rw_trn_result *result, u8 cs_num,
2012			       int min_val, bool rw)
2013{
2014	u16 phy_ofs;
2015	u8 cs;
2016	u8 dq;
2017
2018	result->min_val = min_val;
2019
2020	for (cs = 0; cs < cs_num; cs++) {
2021		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2022		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2023		for (dq = 0; dq < 8; dq++) {
2024			result->cs[cs].dqs[0].dq_deskew[dq] =
2025				readb(PHY_REG(phy_base, phy_ofs + dq));
2026			result->cs[cs].dqs[1].dq_deskew[dq] =
2027				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2028			result->cs[cs].dqs[2].dq_deskew[dq] =
2029				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2030			result->cs[cs].dqs[3].dq_deskew[dq] =
2031				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2032		}
2033
2034		result->cs[cs].dqs[0].dqs_deskew =
2035			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2036		result->cs[cs].dqs[1].dqs_deskew =
2037			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2038		result->cs[cs].dqs[2].dqs_deskew =
2039			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2040		result->cs[cs].dqs[3].dqs_deskew =
2041			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2042	}
2043}
2044
2045static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2046{
2047	result->flag = DDR_DQ_EYE_FLAG;
2048	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2049}
2050#endif
2051
2052static int high_freq_training(struct dram_info *dram,
2053			      struct rv1126_sdram_params *sdram_params,
2054			      u32 fsp)
2055{
2056	u32 i, j;
2057	void __iomem *phy_base = dram->phy;
2058	u32 dramtype = sdram_params->base.dramtype;
2059	int min_val;
2060	int dqs_skew, clk_skew, ca_skew;
2061	u8 byte_en;
2062	int ret;
2063
2064	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2065	dqs_skew = 0;
2066	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2067		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2068			if ((byte_en & BIT(i)) != 0)
2069				dqs_skew += wrlvl_result[j][i];
2070		}
2071	}
2072	dqs_skew = dqs_skew /
2073		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2074
2075	clk_skew = 0x20 - dqs_skew;
2076	dqs_skew = 0x20;
2077
2078	if (dramtype == LPDDR4) {
2079		min_val = 0xff;
2080		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2081			for (i = 0; i < sdram_params->ch.cap_info.bw; i++)
2082				min_val = MIN(wrlvl_result[j][i], min_val);
2083
2084		if (min_val < 0) {
2085			clk_skew = -min_val;
2086			ca_skew = -min_val;
2087		} else {
2088			clk_skew = 0;
2089			ca_skew = 0;
2090		}
2091	} else if (dramtype == LPDDR3) {
2092		ca_skew = clk_skew - 4;
2093	} else {
2094		ca_skew = clk_skew;
2095	}
2096	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2097			 dramtype);
2098
2099	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2100	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2101	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2102	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2103	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2104			    READ_TRAINING | WRITE_TRAINING);
2105#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2106	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2107	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2108			    &rw_trn_result.wr_fsp[fsp].cs[0],
2109			    rw_trn_result.byte_en);
2110#endif
2111	if (sdram_params->ch.cap_info.rank == 2) {
2112		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2113		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2114		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2115		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2116		ret |= data_training(dram, 1, sdram_params, fsp,
2117				     READ_GATE_TRAINING | READ_TRAINING |
2118				     WRITE_TRAINING);
2119#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2120		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2121				    &rw_trn_result.wr_fsp[fsp].cs[1],
2122				    rw_trn_result.byte_en);
2123#endif
2124	}
2125	if (ret)
2126		goto out;
2127
2128	record_dq_prebit(dram);
2129
2130	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2131				sdram_params->ch.cap_info.rank) * -1;
2132	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2133			 min_val, min_val, sdram_params->ch.cap_info.rank);
2134#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2135	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2136			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2137			   SKEW_RX_SIGNAL);
2138#endif
2139
2140	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2141				    sdram_params->ch.cap_info.rank),
2142		      get_min_value(dram, SKEW_CA_SIGNAL,
2143				    sdram_params->ch.cap_info.rank)) * -1;
2144
2145	/* clk = 0, rx all skew -7, tx - min_value */
2146	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2147			 dramtype);
2148
2149	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2150			 min_val, min_val, sdram_params->ch.cap_info.rank);
2151#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2152	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2153			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2154			   SKEW_TX_SIGNAL);
2155#endif
2156
2157	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2158	if (sdram_params->ch.cap_info.rank == 2)
2159		ret |= data_training(dram, 1, sdram_params, 0,
2160				     READ_GATE_TRAINING);
2161out:
2162	return ret;
2163}
2164
2165static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2166{
2167	writel(ddrconfig, &dram->msch->deviceconf);
2168	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2169}
2170
2171static void update_noc_timing(struct dram_info *dram,
2172			      struct rv1126_sdram_params *sdram_params)
2173{
2174	void __iomem *pctl_base = dram->pctl;
2175	u32 bw, bl;
2176
2177	bw = 8 << sdram_params->ch.cap_info.bw;
2178	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2179
2180	/* update the noc timing related to data bus width */
2181	if ((bw / 8 * bl) <= 16)
2182		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2183	else if ((bw / 8 * bl) == 32)
2184		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2185	else if ((bw / 8 * bl) == 64)
2186		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2187	else
2188		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2189
2190	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2191		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2192
2193	if (sdram_params->base.dramtype == LPDDR4) {
2194		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2195			(bw == 16) ? 0x1 : 0x2;
2196		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2197			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2198	}
2199
2200	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2201	       &dram->msch->ddrtiminga0);
2202	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2203	       &dram->msch->ddrtimingb0);
2204	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2205	       &dram->msch->ddrtimingc0);
2206	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2207	       &dram->msch->devtodev0);
2208	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2209	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2210	       &dram->msch->ddr4timing);
2211}
2212
2213static int split_setup(struct dram_info *dram,
2214		       struct rv1126_sdram_params *sdram_params)
2215{
2216	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2217	u32 dramtype = sdram_params->base.dramtype;
2218	u32 split_size, split_mode;
2219	u64 cs_cap[2], cap;
2220
2221	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2222	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2223	/* only support the larger cap is in low 16bit */
2224	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2225		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2226		cap_info->cs0_high16bit_row));
2227	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2228		   (cap_info->rank == 2)) {
2229		if (!cap_info->cs1_high16bit_row)
2230			cap = cs_cap[0];
2231		else
2232			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2233				cap_info->cs1_high16bit_row));
2234	} else {
2235		goto out;
2236	}
2237	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2238	if (cap_info->bw == 2)
2239		split_mode = SPLIT_MODE_32_L16_VALID;
2240	else
2241		split_mode = SPLIT_MODE_16_L8_VALID;
2242
2243	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2244		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2245		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2246		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2247		     (split_mode << SPLIT_MODE_OFFSET) |
2248		     (0x0 << SPLIT_BYPASS_OFFSET) |
2249		     (split_size << SPLIT_SIZE_OFFSET));
2250
2251	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2252		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2253		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2254
2255out:
2256	return 0;
2257}
2258
2259static void split_bypass(struct dram_info *dram)
2260{
2261	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2262	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2263		return;
2264
2265	/* bypass split */
2266	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2267		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2268		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2269		     (0x1 << SPLIT_BYPASS_OFFSET) |
2270		     (0x0 << SPLIT_SIZE_OFFSET));
2271}
2272
2273static void dram_all_config(struct dram_info *dram,
2274			    struct rv1126_sdram_params *sdram_params)
2275{
2276	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2277	u32 dram_type = sdram_params->base.dramtype;
2278	void __iomem *pctl_base = dram->pctl;
2279	u32 sys_reg2 = 0;
2280	u32 sys_reg3 = 0;
2281	u64 cs_cap[2];
2282	u32 cs_pst;
2283
2284	set_ddrconfig(dram, cap_info->ddrconfig);
2285	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2286			 &sys_reg3, 0);
2287	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2288	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2289
2290	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2291	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2292
2293	if (cap_info->rank == 2) {
2294		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2295			6 + 2;
2296		if (cs_pst > 28)
2297			cs_cap[0] = 1llu << cs_pst;
2298	}
2299
2300	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2301			(((cs_cap[0] >> 20) / 64) & 0xff),
2302			&dram->msch->devicesize);
2303	update_noc_timing(dram, sdram_params);
2304}
2305
2306static void enable_low_power(struct dram_info *dram,
2307			     struct rv1126_sdram_params *sdram_params)
2308{
2309	void __iomem *pctl_base = dram->pctl;
2310	u32 grf_lp_con;
2311
2312	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2313
2314	if (sdram_params->base.dramtype == DDR4)
2315		grf_lp_con = (0x7 << 16) | (1 << 1);
2316	else if (sdram_params->base.dramtype == DDR3)
2317		grf_lp_con = (0x7 << 16) | (1 << 0);
2318	else
2319		grf_lp_con = (0x7 << 16) | (1 << 2);
2320
2321	/* en lpckdis_en */
2322	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2323	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2324
2325	/* enable sr, pd */
2326	if (dram->pd_idle == 0)
2327		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2328	else
2329		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2330	if (dram->sr_idle == 0)
2331		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2332	else
2333		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2334	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2335}
2336
2337static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2338{
2339	u32 split;
2340
2341	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2342	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2343		split = 0;
2344	else
2345		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2346			SPLIT_SIZE_MASK;
2347
2348	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2349			     &sdram_params->base, split);
2350}
2351
2352static int sdram_init_(struct dram_info *dram,
2353		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2354{
2355	void __iomem *pctl_base = dram->pctl;
2356	void __iomem *phy_base = dram->phy;
2357	u32 ddr4_vref;
2358	u32 mr_tmp;
2359
2360	rkclk_configure_ddr(dram, sdram_params);
2361
2362	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2363	udelay(10);
2364
2365	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2366	phy_cfg(dram, sdram_params);
2367
2368	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2369	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2370
2371	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2372	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2373		 dram->sr_idle, dram->pd_idle);
2374
2375	if (sdram_params->ch.cap_info.bw == 2) {
2376		/* 32bit interface use pageclose */
2377		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2378		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2379		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2380	} else {
2381		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2382	}
2383
2384#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2385	u32 tmp, trefi;
2386
2387	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2388	trefi = (tmp >> 16) & 0xfff;
2389	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2390	       pctl_base + DDR_PCTL2_RFSHTMG);
2391#endif
2392
2393	/* set frequency_mode */
2394	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2395	/* set target_frequency to Frequency 0 */
2396	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2397
2398	set_ds_odt(dram, sdram_params, 0);
2399	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2400	set_ctl_address_map(dram, sdram_params);
2401
2402	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2403
2404	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2405
2406	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2407		continue;
2408
2409	if (sdram_params->base.dramtype == LPDDR3) {
2410		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2411	} else if (sdram_params->base.dramtype == LPDDR4) {
2412		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2413		/* MR11 */
2414		pctl_write_mr(dram->pctl, 3, 11,
2415			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2416			      LPDDR4);
2417		/* MR12 */
2418		pctl_write_mr(dram->pctl, 3, 12,
2419			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2420			      LPDDR4);
2421
2422		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2423		/* MR22 */
2424		pctl_write_mr(dram->pctl, 3, 22,
2425			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2426			      LPDDR4);
2427	}
2428
2429	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2430		if (post_init != 0)
2431			printascii("DTT cs0 error\n");
2432		return -1;
2433	}
2434
2435	if (sdram_params->base.dramtype == LPDDR4) {
2436		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2437
2438		if (mr_tmp != 0x4d)
2439			return -1;
2440	}
2441
2442	if (sdram_params->base.dramtype == LPDDR4) {
2443		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2444		/* MR14 */
2445		pctl_write_mr(dram->pctl, 3, 14,
2446			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2447			      LPDDR4);
2448	}
2449	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2450		if (data_training(dram, 1, sdram_params, 0,
2451				  READ_GATE_TRAINING) != 0) {
2452			printascii("DTT cs1 error\n");
2453			return -1;
2454		}
2455	}
2456
2457	if (sdram_params->base.dramtype == DDR4) {
2458		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2459		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2460				  sdram_params->base.dramtype);
2461	}
2462
2463	dram_all_config(dram, sdram_params);
2464	enable_low_power(dram, sdram_params);
2465
2466	return 0;
2467}
2468
2469static u64 dram_detect_cap(struct dram_info *dram,
2470			   struct rv1126_sdram_params *sdram_params,
2471			   unsigned char channel)
2472{
2473	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2474	void __iomem *pctl_base = dram->pctl;
2475	void __iomem *phy_base = dram->phy;
2476	u32 mr8;
2477
2478	u32 bktmp;
2479	u32 coltmp;
2480	u32 rowtmp;
2481	u32 cs;
2482	u32 dram_type = sdram_params->base.dramtype;
2483	u32 pwrctl;
2484	u32 i, dq_map;
2485	u32 byte1 = 0, byte0 = 0;
2486	u32 tmp, byte;
2487	struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2488	struct dq_map_info *map_info = (struct dq_map_info *)
2489				       ((void *)common_info + index->dq_map_index.offset * 4);
2490
2491	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2492	if (dram_type != LPDDR4) {
2493		if (dram_type != DDR4) {
2494			coltmp = 12;
2495			bktmp = 3;
2496			if (dram_type == LPDDR2)
2497				rowtmp = 15;
2498			else
2499				rowtmp = 16;
2500
2501			if (sdram_detect_col(cap_info, coltmp) != 0)
2502				goto cap_err;
2503
2504			sdram_detect_bank(cap_info, coltmp, bktmp);
2505			if (dram_type != LPDDR3)
2506				sdram_detect_dbw(cap_info, dram_type);
2507		} else {
2508			coltmp = 10;
2509			bktmp = 4;
2510			rowtmp = 17;
2511
2512			cap_info->col = 10;
2513			cap_info->bk = 2;
2514			sdram_detect_bg(cap_info, coltmp);
2515		}
2516
2517		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2518			goto cap_err;
2519
2520		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2521	} else {
2522		cap_info->col = 10;
2523		cap_info->bk = 3;
2524		mr8 = read_mr(dram, 1, 8, dram_type);
2525		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2526		mr8 = (mr8 >> 2) & 0xf;
2527		if (mr8 >= 0 && mr8 <= 6) {
2528			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2529		} else if (mr8 == 0xc) {
2530			cap_info->cs0_row = 13;
2531		} else {
2532			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2533			goto cap_err;
2534		}
2535		if (cap_info->dbw == 0)
2536			cap_info->cs0_row++;
2537		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2538		if (cap_info->cs0_row >= 17) {
2539			printascii("Cap ERR: ");
2540			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2541			goto cap_err;
2542			// cap_info->cs0_row = 16;
2543			// cap_info->row_3_4 = 0;
2544		}
2545	}
2546
2547	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2548	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2549
2550	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2551		cs = 1;
2552	else
2553		cs = 0;
2554	cap_info->rank = cs + 1;
2555
2556	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2557
2558	tmp = data_training_rg(dram, 0, dram_type) & 0xf;
2559
2560	if (tmp == 0) {
2561		cap_info->bw = 2;
2562	} else {
2563		if (dram_type == DDR3 || dram_type == DDR4) {
2564			dq_map = 0;
2565			byte = 0;
2566			for (i = 0; i < 4; i++) {
2567				if ((tmp & BIT(i)) == 0) {
2568					dq_map |= byte << (i * 2);
2569					byte++;
2570				}
2571			}
2572			cap_info->bw = byte / 2;
2573			for (i = 0; i < 4; i++) {
2574				if ((tmp & BIT(i)) != 0) {
2575					dq_map |= byte << (i * 2);
2576					byte++;
2577				}
2578			}
2579			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, dq_map << 24);
2580		} else {
2581			dq_map = readl(PHY_REG(phy_base, 0x4f));
2582			for (i = 0; i < 4; i++) {
2583				if (((dq_map >> (i * 2)) & 0x3) == 0)
2584					byte0 = i;
2585				if (((dq_map >> (i * 2)) & 0x3) == 1)
2586					byte1 = i;
2587			}
2588			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2589					BIT(byte0) | BIT(byte1));
2590			if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2591				cap_info->bw = 1;
2592			else
2593				cap_info->bw = 0;
2594		}
2595	}
2596	if (cap_info->bw > 0)
2597		cap_info->dbw = 1;
2598
2599	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2600
2601	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2602	if (cs) {
2603		cap_info->cs1_row = cap_info->cs0_row;
2604		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2605	} else {
2606		cap_info->cs1_row = 0;
2607		cap_info->cs1_high16bit_row = 0;
2608	}
2609
2610	if (dram_type == LPDDR3)
2611		sdram_detect_dbw(cap_info, dram_type);
2612
2613	return 0;
2614cap_err:
2615	return -1;
2616}
2617
2618static int dram_detect_cs1_row(struct dram_info *dram,
2619			       struct rv1126_sdram_params *sdram_params,
2620			       unsigned char channel)
2621{
2622	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2623	void __iomem *pctl_base = dram->pctl;
2624	u32 ret = 0;
2625	void __iomem *test_addr;
2626	u32 row, bktmp, coltmp, bw;
2627	u64 cs0_cap;
2628	u32 byte_mask;
2629	u32 cs_pst;
2630	u32 cs_add = 0;
2631	u32 max_row;
2632
2633	if (cap_info->rank == 2) {
2634		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2635			6 + 2;
2636		if (cs_pst < 28)
2637			cs_add = 1;
2638
2639		cs0_cap = 1 << cs_pst;
2640
2641		if (sdram_params->base.dramtype == DDR4) {
2642			if (cap_info->dbw == 0)
2643				bktmp = cap_info->bk + 2;
2644			else
2645				bktmp = cap_info->bk + 1;
2646		} else {
2647			bktmp = cap_info->bk;
2648		}
2649		bw = cap_info->bw;
2650		coltmp = cap_info->col;
2651
2652		if (bw == 2)
2653			byte_mask = 0xFFFF;
2654		else
2655			byte_mask = 0xFF;
2656
2657		max_row = (cs_pst == 31) ? 30 : 31;
2658
2659		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2660
2661		row = (cap_info->cs0_row > max_row) ? max_row :
2662			cap_info->cs0_row;
2663
2664		for (; row > 12; row--) {
2665			test_addr = (void __iomem *)(CFG_SYS_SDRAM_BASE +
2666				    (u32)cs0_cap +
2667				    (1ul << (row + bktmp + coltmp +
2668					     cs_add + bw - 1ul)));
2669
2670			writel(0, CFG_SYS_SDRAM_BASE + (u32)cs0_cap);
2671			writel(PATTERN, test_addr);
2672
2673			if (((readl(test_addr) & byte_mask) ==
2674			     (PATTERN & byte_mask)) &&
2675			    ((readl(CFG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2676			      byte_mask) == 0)) {
2677				ret = row;
2678				break;
2679			}
2680		}
2681	}
2682
2683	return ret;
2684}
2685
2686/* return: 0 = success, other = fail */
2687static int sdram_init_detect(struct dram_info *dram,
2688			     struct rv1126_sdram_params *sdram_params)
2689{
2690	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2691	u32 ret;
2692	u32 sys_reg = 0;
2693	u32 sys_reg3 = 0;
2694	struct sdram_head_info_index_v2 *index =
2695		(struct sdram_head_info_index_v2 *)common_info;
2696	struct dq_map_info *map_info;
2697
2698	map_info = (struct dq_map_info *)((void *)common_info +
2699		index->dq_map_index.offset * 4);
2700
2701	if (sdram_init_(dram, sdram_params, 0)) {
2702		if (sdram_params->base.dramtype == DDR3) {
2703			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2704					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2705					(0x0 << 0)) << 24);
2706			if (sdram_init_(dram, sdram_params, 0))
2707				return -1;
2708		} else {
2709			return -1;
2710		}
2711	}
2712
2713	if (sdram_params->base.dramtype == DDR3) {
2714		writel(PATTERN, CFG_SYS_SDRAM_BASE);
2715		if (readl(CFG_SYS_SDRAM_BASE) != PATTERN)
2716			return -1;
2717	}
2718
2719	split_bypass(dram);
2720	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2721		return -1;
2722
2723	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2724				   sdram_params->base.dramtype);
2725	ret = sdram_init_(dram, sdram_params, 1);
2726	if (ret != 0)
2727		goto out;
2728
2729	cap_info->cs1_row =
2730		dram_detect_cs1_row(dram, sdram_params, 0);
2731	if (cap_info->cs1_row) {
2732		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2733		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2734		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2735				    sys_reg, sys_reg3, 0);
2736		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2737		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2738	}
2739
2740	sdram_detect_high_row(cap_info);
2741	split_setup(dram, sdram_params);
2742out:
2743	return ret;
2744}
2745
2746struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2747{
2748	u32 i;
2749	u32 offset = 0;
2750	struct ddr2_3_4_lp2_3_info *ddr_info;
2751
2752	if (!freq_mhz) {
2753		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2754		if (ddr_info)
2755			freq_mhz =
2756				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2757				DDR_FREQ_MASK;
2758		else
2759			freq_mhz = 0;
2760	}
2761
2762	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2763		if (sdram_configs[i].base.ddr_freq == 0 ||
2764		    freq_mhz < sdram_configs[i].base.ddr_freq)
2765			break;
2766	}
2767	offset = i == 0 ? 0 : i - 1;
2768
2769	return &sdram_configs[offset];
2770}
2771
2772static const u16 pctl_need_update_reg[] = {
2773	DDR_PCTL2_RFSHTMG,
2774	DDR_PCTL2_INIT3,
2775	DDR_PCTL2_INIT4,
2776	DDR_PCTL2_INIT6,
2777	DDR_PCTL2_INIT7,
2778	DDR_PCTL2_DRAMTMG0,
2779	DDR_PCTL2_DRAMTMG1,
2780	DDR_PCTL2_DRAMTMG2,
2781	DDR_PCTL2_DRAMTMG3,
2782	DDR_PCTL2_DRAMTMG4,
2783	DDR_PCTL2_DRAMTMG5,
2784	DDR_PCTL2_DRAMTMG6,
2785	DDR_PCTL2_DRAMTMG7,
2786	DDR_PCTL2_DRAMTMG8,
2787	DDR_PCTL2_DRAMTMG9,
2788	DDR_PCTL2_DRAMTMG12,
2789	DDR_PCTL2_DRAMTMG13,
2790	DDR_PCTL2_DRAMTMG14,
2791	DDR_PCTL2_ZQCTL0,
2792	DDR_PCTL2_DFITMG0,
2793	DDR_PCTL2_ODTCFG
2794};
2795
2796static const u16 phy_need_update_reg[] = {
2797	0x14,
2798	0x18,
2799	0x1c
2800};
2801
2802static void pre_set_rate(struct dram_info *dram,
2803			 struct rv1126_sdram_params *sdram_params,
2804			 u32 dst_fsp, u32 dst_fsp_lp4)
2805{
2806	u32 i, j, find;
2807	void __iomem *pctl_base = dram->pctl;
2808	void __iomem *phy_base = dram->phy;
2809	u32 phy_offset;
2810	u32 mr_tmp;
2811	u32 dramtype = sdram_params->base.dramtype;
2812
2813	sw_set_req(dram);
2814	/* pctl timing update */
2815	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2816		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2817		     j++) {
2818			if (sdram_params->pctl_regs.pctl[j][0] ==
2819			    pctl_need_update_reg[i]) {
2820				writel(sdram_params->pctl_regs.pctl[j][1],
2821				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2822				       pctl_need_update_reg[i]);
2823				find = j;
2824				break;
2825			}
2826		}
2827	}
2828
2829#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2830	u32 tmp, trefi;
2831
2832	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2833	trefi = (tmp >> 16) & 0xfff;
2834	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2835	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2836#endif
2837
2838	sw_set_ack(dram);
2839
2840	/* phy timing update */
2841	if (dst_fsp == 0)
2842		phy_offset = 0;
2843	else
2844		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2845	/* cl cwl al update */
2846	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2847		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2848		     j++) {
2849			if (sdram_params->phy_regs.phy[j][0] ==
2850			    phy_need_update_reg[i]) {
2851				writel(sdram_params->phy_regs.phy[j][1],
2852				       phy_base + phy_offset +
2853				       phy_need_update_reg[i]);
2854				find = j;
2855				break;
2856			}
2857		}
2858	}
2859
2860	set_ds_odt(dram, sdram_params, dst_fsp);
2861	if (dramtype == LPDDR4) {
2862		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2863			       DDR_PCTL2_INIT4);
2864		/* MR13 */
2865		pctl_write_mr(dram->pctl, 3, 13,
2866			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2867			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2868			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2869		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2870				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2871				      ((0x2 << 6) >> dst_fsp_lp4),
2872				       PHY_REG(phy_base, 0x1b));
2873		/* MR3 */
2874		pctl_write_mr(dram->pctl, 3, 3,
2875			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2876			      PCTL2_MR_MASK,
2877			      dramtype);
2878		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2879		       PHY_REG(phy_base, 0x19));
2880
2881		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2882			       DDR_PCTL2_INIT3);
2883		/* MR1 */
2884		pctl_write_mr(dram->pctl, 3, 1,
2885			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2886			      PCTL2_MR_MASK,
2887			      dramtype);
2888		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2889		       PHY_REG(phy_base, 0x17));
2890		/* MR2 */
2891		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2892			      dramtype);
2893		writel(mr_tmp & PCTL2_MR_MASK,
2894		       PHY_REG(phy_base, 0x18));
2895
2896		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2897			       DDR_PCTL2_INIT6);
2898		/* MR11 */
2899		pctl_write_mr(dram->pctl, 3, 11,
2900			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2901			      dramtype);
2902		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2903		       PHY_REG(phy_base, 0x1a));
2904		/* MR12 */
2905		pctl_write_mr(dram->pctl, 3, 12,
2906			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2907			      dramtype);
2908
2909		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2910			       DDR_PCTL2_INIT7);
2911		/* MR22 */
2912		pctl_write_mr(dram->pctl, 3, 22,
2913			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2914			      dramtype);
2915		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2916		       PHY_REG(phy_base, 0x1d));
2917		/* MR14 */
2918		pctl_write_mr(dram->pctl, 3, 14,
2919			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2920			      dramtype);
2921		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2922		       PHY_REG(phy_base, 0x1c));
2923	}
2924
2925	update_noc_timing(dram, sdram_params);
2926}
2927
2928static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2929			   struct rv1126_sdram_params *sdram_params)
2930{
2931	void __iomem *pctl_base = dram->pctl;
2932	void __iomem *phy_base = dram->phy;
2933	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2934	u32 temp, temp1;
2935	struct ddr2_3_4_lp2_3_info *ddr_info;
2936
2937	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2938
2939	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2940
2941	if (sdram_params->base.dramtype == LPDDR4) {
2942		p_fsp_param->rd_odt_up_en = 0;
2943		p_fsp_param->rd_odt_down_en = 1;
2944	} else {
2945		p_fsp_param->rd_odt_up_en =
2946			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2947		p_fsp_param->rd_odt_down_en =
2948			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2949	}
2950
2951	if (p_fsp_param->rd_odt_up_en)
2952		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2953	else if (p_fsp_param->rd_odt_down_en)
2954		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2955	else
2956		p_fsp_param->rd_odt = 0;
2957	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2958	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2959	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2960	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2961	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2962
2963	if (sdram_params->base.dramtype == DDR3) {
2964		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2965			     DDR_PCTL2_INIT3);
2966		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2967		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
2968		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
2969		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2970	} else if (sdram_params->base.dramtype == DDR4) {
2971		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2972			     DDR_PCTL2_INIT3);
2973		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2974		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
2975		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
2976		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2977	} else if (sdram_params->base.dramtype == LPDDR3) {
2978		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2979			     DDR_PCTL2_INIT4);
2980		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2981		p_fsp_param->ds_pdds = temp & 0xf;
2982
2983		p_fsp_param->dq_odt = lp3_odt_value;
2984		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2985	} else if (sdram_params->base.dramtype == LPDDR4) {
2986		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2987			     DDR_PCTL2_INIT4);
2988		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2989		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
2990
2991		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2992			     DDR_PCTL2_INIT6);
2993		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2994		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
2995		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
2996
2997		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2998			   readl(PHY_REG(phy_base, 0x3ce)));
2999		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3000			    readl(PHY_REG(phy_base, 0x3de)));
3001		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3002		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3003			   readl(PHY_REG(phy_base, 0x3cf)));
3004		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3005			    readl(PHY_REG(phy_base, 0x3df)));
3006		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3007		p_fsp_param->vref_ca[0] |=
3008			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3009		p_fsp_param->vref_ca[1] |=
3010			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3011
3012		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3013					      3) & 0x1;
3014	}
3015
3016	p_fsp_param->noc_timings.ddrtiminga0 =
3017		sdram_params->ch.noc_timings.ddrtiminga0;
3018	p_fsp_param->noc_timings.ddrtimingb0 =
3019		sdram_params->ch.noc_timings.ddrtimingb0;
3020	p_fsp_param->noc_timings.ddrtimingc0 =
3021		sdram_params->ch.noc_timings.ddrtimingc0;
3022	p_fsp_param->noc_timings.devtodev0 =
3023		sdram_params->ch.noc_timings.devtodev0;
3024	p_fsp_param->noc_timings.ddrmode =
3025		sdram_params->ch.noc_timings.ddrmode;
3026	p_fsp_param->noc_timings.ddr4timing =
3027		sdram_params->ch.noc_timings.ddr4timing;
3028	p_fsp_param->noc_timings.agingx0 =
3029		sdram_params->ch.noc_timings.agingx0;
3030	p_fsp_param->noc_timings.aging0 =
3031		sdram_params->ch.noc_timings.aging0;
3032	p_fsp_param->noc_timings.aging1 =
3033		sdram_params->ch.noc_timings.aging1;
3034	p_fsp_param->noc_timings.aging2 =
3035		sdram_params->ch.noc_timings.aging2;
3036	p_fsp_param->noc_timings.aging3 =
3037		sdram_params->ch.noc_timings.aging3;
3038
3039	p_fsp_param->flag = FSP_FLAG;
3040}
3041
3042static void copy_fsp_param_to_ddr(void)
3043{
3044	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3045	       sizeof(fsp_param));
3046}
3047
3048static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3049			     struct sdram_cap_info *cap_info, u32 dram_type,
3050			     u32 freq)
3051{
3052	u64 cs0_cap;
3053	u32 die_cap;
3054	u32 trfc_ns, trfc4_ns;
3055	u32 trfc, txsnr;
3056	u32 txs_abort_fast = 0;
3057	u32 tmp;
3058
3059	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3060	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3061
3062	switch (dram_type) {
3063	case DDR3:
3064		if (die_cap <= DIE_CAP_512MBIT)
3065			trfc_ns = 90;
3066		else if (die_cap <= DIE_CAP_1GBIT)
3067			trfc_ns = 110;
3068		else if (die_cap <= DIE_CAP_2GBIT)
3069			trfc_ns = 160;
3070		else if (die_cap <= DIE_CAP_4GBIT)
3071			trfc_ns = 260;
3072		else
3073			trfc_ns = 350;
3074		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3075		break;
3076
3077	case DDR4:
3078		if (die_cap <= DIE_CAP_2GBIT) {
3079			trfc_ns = 160;
3080			trfc4_ns = 90;
3081		} else if (die_cap <= DIE_CAP_4GBIT) {
3082			trfc_ns = 260;
3083			trfc4_ns = 110;
3084		} else if (die_cap <= DIE_CAP_8GBIT) {
3085			trfc_ns = 350;
3086			trfc4_ns = 160;
3087		} else {
3088			trfc_ns = 550;
3089			trfc4_ns = 260;
3090		}
3091		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3092		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3093		break;
3094
3095	case LPDDR3:
3096		if (die_cap <= DIE_CAP_4GBIT)
3097			trfc_ns = 130;
3098		else
3099			trfc_ns = 210;
3100		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3101		break;
3102
3103	case LPDDR4:
3104		if (die_cap <= DIE_CAP_2GBIT)
3105			trfc_ns = 130;
3106		else if (die_cap <= DIE_CAP_4GBIT)
3107			trfc_ns = 180;
3108		else if (die_cap <= DIE_CAP_8GBIT)
3109			trfc_ns = 280;
3110		else
3111			trfc_ns = 380;
3112		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3113		break;
3114
3115	default:
3116		return;
3117	}
3118	trfc = (trfc_ns * freq + 999) / 1000;
3119
3120	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3121		switch (pctl_regs->pctl[i][0]) {
3122		case DDR_PCTL2_RFSHTMG:
3123			tmp = pctl_regs->pctl[i][1];
3124			/* t_rfc_min */
3125			tmp &= ~((u32)0x3ff);
3126			tmp |= ((trfc + 1) / 2) & 0x3ff;
3127			pctl_regs->pctl[i][1] = tmp;
3128			break;
3129
3130		case DDR_PCTL2_DRAMTMG8:
3131			if (dram_type == DDR3 || dram_type == DDR4) {
3132				tmp = pctl_regs->pctl[i][1];
3133				/* t_xs_x32 */
3134				tmp &= ~((u32)0x7f);
3135				tmp |= ((txsnr + 63) / 64) & 0x7f;
3136
3137				if (dram_type == DDR4) {
3138					/* t_xs_abort_x32 */
3139					tmp &= ~((u32)(0x7f << 16));
3140					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3141					/* t_xs_fast_x32 */
3142					tmp &= ~((u32)(0x7f << 24));
3143					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3144				}
3145
3146				pctl_regs->pctl[i][1] = tmp;
3147			}
3148			break;
3149
3150		case DDR_PCTL2_DRAMTMG14:
3151			if (dram_type == LPDDR3 ||
3152			    dram_type == LPDDR4) {
3153				tmp = pctl_regs->pctl[i][1];
3154				/* t_xsr */
3155				tmp &= ~((u32)0xfff);
3156				tmp |= ((txsnr + 1) / 2) & 0xfff;
3157				pctl_regs->pctl[i][1] = tmp;
3158			}
3159			break;
3160
3161		default:
3162			break;
3163		}
3164	}
3165}
3166
3167void ddr_set_rate(struct dram_info *dram,
3168		  struct rv1126_sdram_params *sdram_params,
3169		  u32 freq, u32 cur_freq, u32 dst_fsp,
3170		  u32 dst_fsp_lp4, u32 training_en)
3171{
3172	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3173	u32 mr_tmp;
3174	u32 lp_stat;
3175	u32 dramtype = sdram_params->base.dramtype;
3176	struct rv1126_sdram_params *sdram_params_new;
3177	void __iomem *pctl_base = dram->pctl;
3178	void __iomem *phy_base = dram->phy;
3179
3180	lp_stat = low_power_update(dram, 0);
3181	sdram_params_new = get_default_sdram_config(freq);
3182	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3183	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3184
3185	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3186			 &sdram_params->ch.cap_info, dramtype, freq);
3187	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3188
3189	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3190			 PCTL2_OPERATING_MODE_MASK) ==
3191			 PCTL2_OPERATING_MODE_SR)
3192		continue;
3193
3194	dest_dll_off = 0;
3195	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3196			  DDR_PCTL2_INIT3);
3197	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3198	    (dramtype == DDR4 && !(dst_init3 & 1)))
3199		dest_dll_off = 1;
3200
3201	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3202	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3203			  DDR_PCTL2_INIT3);
3204	cur_init3 &= PCTL2_MR_MASK;
3205	cur_dll_off = 1;
3206	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3207	    (dramtype == DDR4 && (cur_init3 & 1)))
3208		cur_dll_off = 0;
3209
3210	if (!cur_dll_off) {
3211		if (dramtype == DDR3)
3212			cur_init3 |= 1;
3213		else
3214			cur_init3 &= ~1;
3215		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3216	}
3217
3218	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3219		     PCTL2_DIS_AUTO_REFRESH);
3220	update_refresh_reg(dram);
3221
3222	enter_sr(dram, 1);
3223
3224	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3225	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3226	       &dram->pmugrf->soc_con[0]);
3227	sw_set_req(dram);
3228	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3229		     PCTL2_DFI_INIT_COMPLETE_EN);
3230	sw_set_ack(dram);
3231
3232	sw_set_req(dram);
3233	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3234		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3235	else
3236		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3237
3238	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3239		     PCTL2_DIS_SRX_ZQCL);
3240	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3241		     PCTL2_DIS_SRX_ZQCL);
3242	sw_set_ack(dram);
3243
3244	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3245	       &dram->cru->clkgate_con[21]);
3246	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3247					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3248					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3249			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3250
3251	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3252	rkclk_set_dpll(dram, freq * MHz / 2);
3253	phy_pll_set(dram, freq * MHz, 0);
3254	phy_pll_set(dram, freq * MHz, 1);
3255	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3256
3257	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3258			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3259			&dram->pmugrf->soc_con[0]);
3260	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3261	       &dram->cru->clkgate_con[21]);
3262	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3263					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3264					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3265			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3266	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3267	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3268		continue;
3269
3270	sw_set_req(dram);
3271	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3272	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3273	sw_set_ack(dram);
3274	update_refresh_reg(dram);
3275	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3276
3277	enter_sr(dram, 0);
3278
3279	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3280	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3281
3282	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3283	if (dramtype == LPDDR3) {
3284		pctl_write_mr(dram->pctl, 3, 1,
3285			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3286			      PCTL2_MR_MASK,
3287			      dramtype);
3288		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3289			      dramtype);
3290		pctl_write_mr(dram->pctl, 3, 3,
3291			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3292			      PCTL2_MR_MASK,
3293			      dramtype);
3294		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3295	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3296		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3297			      dramtype);
3298		if (!dest_dll_off) {
3299			pctl_write_mr(dram->pctl, 3, 0,
3300				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3301				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3302				      dramtype);
3303			udelay(2);
3304		}
3305		pctl_write_mr(dram->pctl, 3, 0,
3306			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3307			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3308			      dramtype);
3309		pctl_write_mr(dram->pctl, 3, 2,
3310			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3311			       PCTL2_MR_MASK), dramtype);
3312		if (dramtype == DDR4) {
3313			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3314				      dramtype);
3315			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3316				       DDR_PCTL2_INIT6);
3317			pctl_write_mr(dram->pctl, 3, 4,
3318				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3319				       PCTL2_MR_MASK,
3320				      dramtype);
3321			pctl_write_mr(dram->pctl, 3, 5,
3322				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3323				      PCTL2_MR_MASK,
3324				      dramtype);
3325
3326			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3327				       DDR_PCTL2_INIT7);
3328			pctl_write_mr(dram->pctl, 3, 6,
3329				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3330				      PCTL2_MR_MASK,
3331				      dramtype);
3332		}
3333	} else if (dramtype == LPDDR4) {
3334		pctl_write_mr(dram->pctl, 3, 13,
3335			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3336			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3337			      dst_fsp_lp4 << 7, dramtype);
3338	}
3339	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3340		     PCTL2_DIS_AUTO_REFRESH);
3341	update_refresh_reg(dram);
3342
3343	/* training */
3344	high_freq_training(dram, sdram_params_new, dst_fsp);
3345	low_power_update(dram, lp_stat);
3346
3347	save_fsp_param(dram, dst_fsp, sdram_params_new);
3348}
3349
3350static void ddr_set_rate_for_fsp(struct dram_info *dram,
3351				 struct rv1126_sdram_params *sdram_params)
3352{
3353	struct ddr2_3_4_lp2_3_info *ddr_info;
3354	u32 f0;
3355	u32 dramtype = sdram_params->base.dramtype;
3356	u32 f1, f2, f3;
3357
3358	ddr_info = get_ddr_drv_odt_info(dramtype);
3359	if (!ddr_info)
3360		return;
3361
3362	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3363	     DDR_FREQ_MASK;
3364
3365	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3366	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3367
3368	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3369	     DDR_FREQ_MASK;
3370	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3371	     DDR_FREQ_MASK;
3372	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3373	     DDR_FREQ_MASK;
3374
3375	if (get_wrlvl_val(dram, sdram_params))
3376		printascii("get wrlvl value fail\n");
3377
3378	if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3379		printascii("change to: ");
3380		printdec(f1);
3381		printascii("MHz\n");
3382	}
3383	ddr_set_rate(&dram_info, sdram_params, f1,
3384		     sdram_params->base.ddr_freq, 1, 1, 1);
3385
3386	if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3387		printascii("change to: ");
3388		printdec(f2);
3389		printascii("MHz\n");
3390	}
3391	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3392
3393	if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3394		printascii("change to: ");
3395		printdec(f3);
3396		printascii("MHz\n");
3397	}
3398	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3399
3400	if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3401		printascii("change to: ");
3402		printdec(f0);
3403		printascii("MHz(final freq)\n");
3404	}
3405	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3406}
3407
3408int get_uart_config(void)
3409{
3410	struct sdram_head_info_index_v2 *index =
3411		(struct sdram_head_info_index_v2 *)common_info;
3412	struct global_info *gbl_info;
3413
3414	gbl_info = (struct global_info *)((void *)common_info +
3415		index->global_index.offset * 4);
3416
3417	return gbl_info->uart_info;
3418}
3419
3420/* return: 0 = success, other = fail */
3421static int rv1126_dmc_init(struct udevice *dev)
3422{
3423	struct rv1126_sdram_params *sdram_params;
3424	int ret = 0;
3425	struct sdram_head_info_index_v2 *index =
3426		(struct sdram_head_info_index_v2 *)common_info;
3427	struct global_info *gbl_info;
3428
3429	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3430	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3431	dram_info.grf = (void *)GRF_BASE_ADDR;
3432	dram_info.cru = (void *)CRU_BASE_ADDR;
3433	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3434	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3435	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3436
3437#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3438	printascii("extended temp support\n");
3439#endif
3440	if (index->version_info != 2 ||
3441	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3442	    (index->ddr3_index.size !=
3443		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3444	    (index->ddr4_index.size !=
3445		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3446	    (index->lp3_index.size !=
3447		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3448	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3449	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3450	    index->global_index.offset == 0 ||
3451	    index->ddr3_index.offset == 0 ||
3452	    index->ddr4_index.offset == 0 ||
3453	    index->lp3_index.offset == 0 ||
3454	    index->lp4_index.offset == 0 ||
3455	    index->lp4x_index.offset == 0) {
3456		printascii("common info error\n");
3457		goto error;
3458	}
3459
3460	gbl_info = (struct global_info *)((void *)common_info +
3461		index->global_index.offset * 4);
3462
3463	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3464	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3465
3466	sdram_params = &sdram_configs[0];
3467	if (sdram_params->base.dramtype == DDR3 ||
3468	    sdram_params->base.dramtype == DDR4) {
3469		if (DDR_2T_INFO(gbl_info->info_2t))
3470			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3471		else
3472			sdram_params->pctl_regs.pctl[0][1] &=
3473				~(0x1 << 10);
3474	}
3475	ret = sdram_init_detect(&dram_info, sdram_params);
3476	if (ret) {
3477		sdram_print_dram_type(sdram_params->base.dramtype);
3478		printascii(", ");
3479		printdec(sdram_params->base.ddr_freq);
3480		printascii("MHz\n");
3481		goto error;
3482	}
3483	print_ddr_info(sdram_params);
3484#if defined(CONFIG_CMD_DDR_TEST_TOOL)
3485	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3486				  (u8)sdram_params->ch.cap_info.rank);
3487#endif
3488
3489	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3490	copy_fsp_param_to_ddr();
3491
3492#if defined(CONFIG_CMD_DDR_TEST_TOOL)
3493	save_rw_trn_result_to_ddr(&rw_trn_result);
3494#endif
3495
3496	if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG))
3497		printascii("out\n");
3498
3499	return ret;
3500error:
3501	printascii("error\n");
3502	return (-1);
3503}
3504
3505#endif
3506
3507static int rv1126_dmc_probe(struct udevice *dev)
3508{
3509#if defined(CONFIG_TPL_BUILD) || \
3510	(!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
3511	if (rv1126_dmc_init(dev))
3512		return 0;
3513#else
3514	struct dram_info *priv = dev_get_priv(dev);
3515
3516	priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
3517	debug("%s: grf=%p\n", __func__, priv->pmugrf);
3518	priv->info.base = CFG_SYS_SDRAM_BASE;
3519	priv->info.size =
3520		rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
3521#endif
3522	return 0;
3523}
3524
3525static int rv1126_dmc_get_info(struct udevice *dev, struct ram_info *info)
3526{
3527	struct dram_info *priv = dev_get_priv(dev);
3528
3529	*info = priv->info;
3530
3531	return 0;
3532}
3533
3534static struct ram_ops rv1126_dmc_ops = {
3535	.get_info = rv1126_dmc_get_info,
3536};
3537
3538static const struct udevice_id rv1126_dmc_ids[] = {
3539	{ .compatible = "rockchip,rv1126-dmc" },
3540	{ }
3541};
3542
3543U_BOOT_DRIVER(dmc_rv1126) = {
3544	.name = "rockchip_rv1126_dmc",
3545	.id = UCLASS_RAM,
3546	.of_match = rv1126_dmc_ids,
3547	.ops = &rv1126_dmc_ops,
3548	.probe = rv1126_dmc_probe,
3549	.priv_auto = sizeof(struct dram_info),
3550};
3551