1// SPDX-License-Identifier: GPL-2.0
2/*
3 * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4 */
5
6#include <common.h>
7#include <debug_uart.h>
8#include <dm.h>
9#include <init.h>
10#include <log.h>
11#include <ram.h>
12#include <syscon.h>
13#include <asm/arch-rockchip/clock.h>
14#include <asm/arch-rockchip/cru_px30.h>
15#include <asm/arch-rockchip/grf_px30.h>
16#include <asm/arch-rockchip/hardware.h>
17#include <asm/arch-rockchip/sdram.h>
18#include <asm/arch-rockchip/sdram_px30.h>
19#include <linux/delay.h>
20
21struct dram_info {
22#ifdef CONFIG_TPL_BUILD
23	struct ddr_pctl_regs *pctl;
24	struct ddr_phy_regs *phy;
25	struct px30_cru *cru;
26	struct msch_regs *msch;
27	struct px30_ddr_grf_regs *ddr_grf;
28	struct px30_grf *grf;
29#endif
30	struct ram_info info;
31	struct px30_pmugrf *pmugrf;
32};
33
34#ifdef CONFIG_TPL_BUILD
35
36u8 ddr_cfg_2_rbc[] = {
37	/*
38	 * [6:4] max row: 13+n
39	 * [3]  bank(0:4bank,1:8bank)
40	 * [2:0]    col(10+n)
41	 */
42	((5 << 4) | (1 << 3) | 0), /* 0 */
43	((5 << 4) | (1 << 3) | 1), /* 1 */
44	((4 << 4) | (1 << 3) | 2), /* 2 */
45	((3 << 4) | (1 << 3) | 3), /* 3 */
46	((2 << 4) | (1 << 3) | 4), /* 4 */
47	((5 << 4) | (0 << 3) | 2), /* 5 */
48	((4 << 4) | (1 << 3) | 2), /* 6 */
49	/*((0<<3)|3),*/	 /* 12 for ddr4 */
50	/*((1<<3)|1),*/  /* 13 B,C exchange for rkvdec */
51};
52
53/*
54 * for ddr4 if ddrconfig=7, upctl should set 7 and noc should
55 * set to 1 for more efficient.
56 * noc ddrconf, upctl addrmap
57 * 1  7
58 * 2  8
59 * 3  9
60 * 12 10
61 * 5  11
62 */
63u8 d4_rbc_2_d3_rbc[] = {
64	1, /* 7 */
65	2, /* 8 */
66	3, /* 9 */
67	12, /* 10 */
68	5, /* 11 */
69};
70
71/*
72 * row higher than cs should be disabled by set to 0xf
73 * rank addrmap calculate by real cap.
74 */
75u32 addrmap[][8] = {
76	/* map0 map1,   map2,       map3,       map4,      map5
77	 * map6,        map7,       map8
78	 * -------------------------------------------------------
79	 * bk2-0       col 5-2     col 9-6    col 11-10   row 11-0
80	 * row 15-12   row 17-16   bg1,0
81	 * -------------------------------------------------------
82	 * 4,3,2       5-2         9-6                    6
83	 *                         3,2
84	 */
85	{0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
86		0x05050505, 0x00000505, 0x3f3f}, /* 0 */
87	{0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
88		0x06060606, 0x06060606, 0x3f3f}, /* 1 */
89	{0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
90		0x07070707, 0x00000f07, 0x3f3f}, /* 2 */
91	{0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
92		0x08080808, 0x00000f0f, 0x3f3f}, /* 3 */
93	{0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
94		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
95	{0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
96		0x06060606, 0x00000606, 0x3f3f}, /* 5 */
97	{0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
98		0x07070707, 0x00000f0f, 0x3f3f}, /* 6 */
99	{0x003f0808, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
100		0x06060606, 0x00000606, 0x0600}, /* 7 */
101	{0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
102		0x07070707, 0x00000f07, 0x0700}, /* 8 */
103	{0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
104		0x08080808, 0x00000f0f, 0x0801}, /* 9 */
105	{0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
106		0x07070707, 0x00000f07, 0x3f01}, /* 10 */
107	{0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
108		0x06060606, 0x00000606, 0x3f00}, /* 11 */
109	/* when ddr4 12 map to 10, when ddr3 12 unused */
110	{0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
111		0x07070707, 0x00000f07, 0x3f01}, /* 10 */
112	{0x00070706, 0x00000000, 0x1f010000, 0x00001f1f, 0x06060606,
113		0x06060606, 0x00000606, 0x3f3f}, /* 13 */
114};
115
116#define PMUGRF_BASE_ADDR		0xFF010000
117#define CRU_BASE_ADDR			0xFF2B0000
118#define GRF_BASE_ADDR			0xFF140000
119#define DDRC_BASE_ADDR			0xFF600000
120#define DDR_PHY_BASE_ADDR		0xFF2A0000
121#define SERVER_MSCH0_BASE_ADDR		0xFF530000
122#define DDR_GRF_BASE_ADDR		0xff630000
123
124struct dram_info dram_info;
125
126struct px30_sdram_params sdram_configs[] = {
127#if defined(CONFIG_RAM_ROCKCHIP_DDR4)
128#include	"sdram-px30-ddr4-detect-333.inc"
129#elif defined(CONFIG_RAM_ROCKCHIP_LPDDR2)
130#include	"sdram-px30-lpddr2-detect-333.inc"
131#elif defined(CONFIG_RAM_ROCKCHIP_LPDDR3)
132#include	"sdram-px30-lpddr3-detect-333.inc"
133#else
134#include	"sdram-px30-ddr3-detect-333.inc"
135#endif
136};
137
138struct ddr_phy_skew skew = {
139#include	"sdram-px30-ddr_skew.inc"
140};
141
142static void rkclk_ddr_reset(struct dram_info *dram,
143			    u32 ctl_srstn, u32 ctl_psrstn,
144			    u32 phy_srstn, u32 phy_psrstn)
145{
146	writel(upctl2_srstn_req(ctl_srstn) | upctl2_psrstn_req(ctl_psrstn) |
147	       upctl2_asrstn_req(ctl_srstn),
148	       &dram->cru->softrst_con[1]);
149	writel(ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
150	       &dram->cru->softrst_con[2]);
151}
152
153static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
154{
155	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
156	int delay = 1000;
157	u32 mhz = hz / MHz;
158
159	refdiv = 1;
160	if (mhz <= 300) {
161		postdiv1 = 4;
162		postdiv2 = 2;
163	} else if (mhz <= 400) {
164		postdiv1 = 6;
165		postdiv2 = 1;
166	} else if (mhz <= 600) {
167		postdiv1 = 4;
168		postdiv2 = 1;
169	} else if (mhz <= 800) {
170		postdiv1 = 3;
171		postdiv2 = 1;
172	} else if (mhz <= 1600) {
173		postdiv1 = 2;
174		postdiv2 = 1;
175	} else {
176		postdiv1 = 1;
177		postdiv2 = 1;
178	}
179	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
180
181	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
182
183	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
184	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
185	       &dram->cru->pll[1].con1);
186
187	while (delay > 0) {
188		udelay(1);
189		if (LOCK(readl(&dram->cru->pll[1].con1)))
190			break;
191		delay--;
192	}
193
194	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
195}
196
197static void rkclk_configure_ddr(struct dram_info *dram,
198				struct px30_sdram_params *sdram_params)
199{
200	/* for inno ddr phy need 2*freq */
201	rkclk_set_dpll(dram,  sdram_params->base.ddr_freq * MHz * 2);
202}
203
204/* return ddrconfig value
205 *       (-1), find ddrconfig fail
206 *       other, the ddrconfig value
207 * only support cs0_row >= cs1_row
208 */
209static unsigned int calculate_ddrconfig(struct px30_sdram_params *sdram_params)
210{
211	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
212	u32 bw, die_bw, col, bank;
213	u32 i, tmp;
214	u32 ddrconf = -1;
215
216	bw = cap_info->bw;
217	die_bw = cap_info->dbw;
218	col = cap_info->col;
219	bank = cap_info->bk;
220
221	if (sdram_params->base.dramtype == DDR4) {
222		if (die_bw == 0)
223			ddrconf = 7 + bw;
224		else
225			ddrconf = 12 - bw;
226		ddrconf = d4_rbc_2_d3_rbc[ddrconf - 7];
227	} else {
228		tmp = ((bank - 2) << 3) | (col + bw - 10);
229		for (i = 0; i < 7; i++)
230			if ((ddr_cfg_2_rbc[i] & 0xf) == tmp) {
231				ddrconf = i;
232				break;
233			}
234		if (i > 6)
235			printascii("calculate ddrconfig error\n");
236	}
237
238	return ddrconf;
239}
240
241/*
242 * calculate controller dram address map, and setting to register.
243 * argument sdram_params->ch.ddrconf must be right value before
244 * call this function.
245 */
246static void set_ctl_address_map(struct dram_info *dram,
247				struct px30_sdram_params *sdram_params)
248{
249	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
250	void __iomem *pctl_base = dram->pctl;
251	u32 cs_pst, bg, max_row, ddrconf;
252	u32 i;
253
254	if (sdram_params->base.dramtype == DDR4)
255		/*
256		 * DDR4 8bit dram BG = 2(4bank groups),
257		 * 16bit dram BG = 1 (2 bank groups)
258		 */
259		bg = (cap_info->dbw == 0) ? 2 : 1;
260	else
261		bg = 0;
262
263	cs_pst = cap_info->bw + cap_info->col +
264		bg + cap_info->bk + cap_info->cs0_row;
265	if (cs_pst >= 32 || cap_info->rank == 1)
266		writel(0x1f, pctl_base + DDR_PCTL2_ADDRMAP0);
267	else
268		writel(cs_pst - 8, pctl_base + DDR_PCTL2_ADDRMAP0);
269
270	ddrconf = cap_info->ddrconfig;
271	if (sdram_params->base.dramtype == DDR4) {
272		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc); i++) {
273			if (d4_rbc_2_d3_rbc[i] == ddrconf) {
274				ddrconf = 7 + i;
275				break;
276			}
277		}
278	}
279
280	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP1),
281			  &addrmap[ddrconf][0], 8 * 4);
282	max_row = cs_pst - 1 - 8 - (addrmap[ddrconf][5] & 0xf);
283
284	if (max_row < 12)
285		printascii("set addrmap fail\n");
286	/* need to disable row ahead of rank by set to 0xf */
287	for (i = 17; i > max_row; i--)
288		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
289			((i - 12) * 8 / 32) * 4,
290			0xf << ((i - 12) * 8 % 32),
291			0xf << ((i - 12) * 8 % 32));
292
293	if ((sdram_params->base.dramtype == LPDDR3 ||
294	     sdram_params->base.dramtype == LPDDR2) &&
295		 cap_info->row_3_4)
296		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
297	if (sdram_params->base.dramtype == DDR4 && cap_info->bw != 0x2)
298		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
299}
300
301/*
302 * rank = 1: cs0
303 * rank = 2: cs1
304 */
305int read_mr(struct dram_info *dram, u32 rank, u32 mr_num)
306{
307	void __iomem *ddr_grf_base = dram->ddr_grf;
308
309	pctl_read_mr(dram->pctl, rank, mr_num);
310
311	return (readl(ddr_grf_base + DDR_GRF_STATUS(0)) & 0xff);
312}
313
314#define MIN(a, b)	(((a) > (b)) ? (b) : (a))
315#define MAX(a, b)	(((a) > (b)) ? (a) : (b))
316static u32 check_rd_gate(struct dram_info *dram)
317{
318	void __iomem *phy_base = dram->phy;
319
320	u32 max_val = 0;
321	u32 min_val = 0xff;
322	u32 gate[4];
323	u32 i, bw;
324
325	bw = (readl(PHY_REG(phy_base, 0x0)) >> 4) & 0xf;
326	switch (bw) {
327	case 0x1:
328		bw = 1;
329		break;
330	case 0x3:
331		bw = 2;
332		break;
333	case 0xf:
334	default:
335		bw = 4;
336		break;
337	}
338
339	for (i = 0; i < bw; i++) {
340		gate[i] = readl(PHY_REG(phy_base, 0xfb + i));
341		max_val = MAX(max_val, gate[i]);
342		min_val = MIN(min_val, gate[i]);
343	}
344
345	if (max_val > 0x80 || min_val < 0x20)
346		return -1;
347	else
348		return 0;
349}
350
351static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
352{
353	void __iomem *pctl_base = dram->pctl;
354	u32 dis_auto_zq = 0;
355	u32 pwrctl;
356	u32 ret;
357
358	/* disable auto low-power */
359	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
360	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
361
362	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
363
364	ret = phy_data_training(dram->phy, cs, dramtype);
365
366	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
367
368	/* restore auto low-power */
369	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
370
371	return ret;
372}
373
374static void dram_set_bw(struct dram_info *dram, u32 bw)
375{
376	phy_dram_set_bw(dram->phy, bw);
377}
378
379static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
380{
381	writel(ddrconfig | (ddrconfig << 8), &dram->msch->deviceconf);
382	rk_clrsetreg(&dram->grf->soc_noc_con[1], 0x3 << 14, 0 << 14);
383}
384
385static void sdram_msch_config(struct msch_regs *msch,
386			      struct sdram_msch_timings *noc_timings,
387			      struct sdram_cap_info *cap_info,
388			      struct sdram_base_params *base)
389{
390	u64 cs_cap[2];
391
392	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, base->dramtype);
393	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, base->dramtype);
394	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
395			(((cs_cap[0] >> 20) / 64) & 0xff),
396			&msch->devicesize);
397
398	writel(noc_timings->ddrtiminga0.d32,
399	       &msch->ddrtiminga0);
400	writel(noc_timings->ddrtimingb0.d32,
401	       &msch->ddrtimingb0);
402	writel(noc_timings->ddrtimingc0.d32,
403	       &msch->ddrtimingc0);
404	writel(noc_timings->devtodev0.d32,
405	       &msch->devtodev0);
406	writel(noc_timings->ddrmode.d32, &msch->ddrmode);
407	writel(noc_timings->ddr4timing.d32,
408	       &msch->ddr4timing);
409	writel(noc_timings->agingx0, &msch->agingx0);
410	writel(noc_timings->agingx0, &msch->aging0);
411	writel(noc_timings->agingx0, &msch->aging1);
412	writel(noc_timings->agingx0, &msch->aging2);
413	writel(noc_timings->agingx0, &msch->aging3);
414}
415
416static void dram_all_config(struct dram_info *dram,
417			    struct px30_sdram_params *sdram_params)
418{
419	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
420	u32 sys_reg2 = 0;
421	u32 sys_reg3 = 0;
422
423	set_ddrconfig(dram, cap_info->ddrconfig);
424	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
425			 &sys_reg3, 0);
426	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
427	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
428	sdram_msch_config(dram->msch, &sdram_params->ch.noc_timings, cap_info,
429			  &sdram_params->base);
430}
431
432static void enable_low_power(struct dram_info *dram,
433			     struct px30_sdram_params *sdram_params)
434{
435	void __iomem *pctl_base = dram->pctl;
436	void __iomem *phy_base = dram->phy;
437	void __iomem *ddr_grf_base = dram->ddr_grf;
438	u32 grf_lp_con;
439
440	/*
441	 * bit0: grf_upctl_axi_cg_en = 1 enable upctl2 axi clk auto gating
442	 * bit1: grf_upctl_apb_cg_en = 1 ungated axi,core clk for apb access
443	 * bit2: grf_upctl_core_cg_en = 1 enable upctl2 core clk auto gating
444	 * bit3: grf_selfref_type2_en = 0 disable core clk gating when type2 sr
445	 * bit4: grf_upctl_syscreq_cg_en = 1
446	 *       ungating coreclk when c_sysreq assert
447	 * bit8-11: grf_auto_sr_dly = 6
448	 */
449	writel(0x1f1f0617, &dram->ddr_grf->ddr_grf_con[1]);
450
451	if (sdram_params->base.dramtype == DDR4)
452		grf_lp_con = (0x7 << 16) | (1 << 1);
453	else if (sdram_params->base.dramtype == DDR3)
454		grf_lp_con = (0x7 << 16) | (1 << 0);
455	else
456		grf_lp_con = (0x7 << 16) | (1 << 2);
457
458	/* en lpckdis_en */
459	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
460	writel(grf_lp_con, ddr_grf_base + DDR_GRF_LP_CON);
461
462	/* off digit module clock when enter power down */
463	setbits_le32(PHY_REG(phy_base, 7), 1 << 7);
464
465	/* enable sr, pd */
466	if (PD_IDLE == 0)
467		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
468	else
469		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
470	if (SR_IDLE == 0)
471		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
472	else
473		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
474	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
475}
476
477/*
478 * pre_init: 0: pre init for dram cap detect
479 * 1: detect correct cap(except cs1 row)info, than reinit
480 * 2: after reinit, we detect cs1_row, if cs1_row not equal
481 *    to cs0_row and cs is in middle on ddrconf map, we need
482 *    to reinit dram, than set the correct ddrconf.
483 */
484static int sdram_init_(struct dram_info *dram,
485		       struct px30_sdram_params *sdram_params, u32 pre_init)
486{
487	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
488	void __iomem *pctl_base = dram->pctl;
489
490	rkclk_ddr_reset(dram, 1, 1, 1, 1);
491	udelay(10);
492	/*
493	 * dereset ddr phy psrstn to config pll,
494	 * if using phy pll psrstn must be dereset
495	 * before config pll
496	 */
497	rkclk_ddr_reset(dram, 1, 1, 1, 0);
498	rkclk_configure_ddr(dram, sdram_params);
499
500	/* release phy srst to provide clk to ctrl */
501	rkclk_ddr_reset(dram, 1, 1, 0, 0);
502	udelay(10);
503	phy_soft_reset(dram->phy);
504	/* release ctrl presetn, and config ctl registers */
505	rkclk_ddr_reset(dram, 1, 0, 0, 0);
506	pctl_cfg(dram->pctl, &sdram_params->pctl_regs, SR_IDLE, PD_IDLE);
507	cap_info->ddrconfig = calculate_ddrconfig(sdram_params);
508	set_ctl_address_map(dram, sdram_params);
509	phy_cfg(dram->phy, &sdram_params->phy_regs, sdram_params->skew,
510		&sdram_params->base, cap_info->bw);
511
512	/* enable dfi_init_start to init phy after ctl srstn deassert */
513	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
514
515	rkclk_ddr_reset(dram, 0, 0, 0, 0);
516	/* wait for dfi_init_done and dram init complete */
517	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
518		continue;
519
520	if (sdram_params->base.dramtype == LPDDR3)
521		pctl_write_mr(dram->pctl, 3, 11, 3, LPDDR3);
522
523	/* do ddr gate training */
524redo_cs0_training:
525	if (data_training(dram, 0, sdram_params->base.dramtype) != 0) {
526		if (pre_init != 0)
527			printascii("DTT cs0 error\n");
528		return -1;
529	}
530	if (check_rd_gate(dram)) {
531		printascii("re training cs0");
532		goto redo_cs0_training;
533	}
534
535	if (sdram_params->base.dramtype == LPDDR3) {
536		if ((read_mr(dram, 1, 8) & 0x3) != 0x3)
537			return -1;
538	} else if (sdram_params->base.dramtype == LPDDR2) {
539		if ((read_mr(dram, 1, 8) & 0x3) != 0x0)
540			return -1;
541	}
542	/* for px30: when 2cs, both 2 cs should be training */
543	if (pre_init != 0 && cap_info->rank == 2) {
544redo_cs1_training:
545		if (data_training(dram, 1, sdram_params->base.dramtype) != 0) {
546			printascii("DTT cs1 error\n");
547			return -1;
548		}
549		if (check_rd_gate(dram)) {
550			printascii("re training cs1");
551			goto redo_cs1_training;
552		}
553	}
554
555	if (sdram_params->base.dramtype == DDR4)
556		pctl_write_vrefdq(dram->pctl, 0x3, 5670,
557				  sdram_params->base.dramtype);
558
559	dram_all_config(dram, sdram_params);
560	enable_low_power(dram, sdram_params);
561
562	return 0;
563}
564
565static int dram_detect_cap(struct dram_info *dram,
566			   struct px30_sdram_params *sdram_params,
567			   unsigned char channel)
568{
569	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
570
571	/*
572	 * for ddr3: ddrconf = 3
573	 * for ddr4: ddrconf = 12
574	 * for lpddr3: ddrconf = 3
575	 * default bw = 1
576	 */
577	u32 bk, bktmp;
578	u32 col, coltmp;
579	u32 rowtmp;
580	u32 cs;
581	u32 bw = 1;
582	u32 dram_type = sdram_params->base.dramtype;
583
584	if (dram_type != DDR4) {
585		/* detect col and bk for ddr3/lpddr3 */
586		coltmp = 12;
587		bktmp = 3;
588		if (dram_type == LPDDR2)
589			rowtmp = 15;
590		else
591			rowtmp = 16;
592
593		if (sdram_detect_col(cap_info, coltmp) != 0)
594			goto cap_err;
595		sdram_detect_bank(cap_info, coltmp, bktmp);
596		sdram_detect_dbw(cap_info, dram_type);
597	} else {
598		/* detect bg for ddr4 */
599		coltmp = 10;
600		bktmp = 4;
601		rowtmp = 17;
602
603		col = 10;
604		bk = 2;
605		cap_info->col = col;
606		cap_info->bk = bk;
607		sdram_detect_bg(cap_info, coltmp);
608	}
609
610	/* detect row */
611	if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
612		goto cap_err;
613
614	/* detect row_3_4 */
615	sdram_detect_row_3_4(cap_info, coltmp, bktmp);
616
617	/* bw and cs detect using data training */
618	if (data_training(dram, 1, dram_type) == 0)
619		cs = 1;
620	else
621		cs = 0;
622	cap_info->rank = cs + 1;
623
624	dram_set_bw(dram, 2);
625	if (data_training(dram, 0, dram_type) == 0)
626		bw = 2;
627	else
628		bw = 1;
629	cap_info->bw = bw;
630
631	cap_info->cs0_high16bit_row = cap_info->cs0_row;
632	if (cs) {
633		cap_info->cs1_row = cap_info->cs0_row;
634		cap_info->cs1_high16bit_row = cap_info->cs0_row;
635	} else {
636		cap_info->cs1_row = 0;
637		cap_info->cs1_high16bit_row = 0;
638	}
639
640	return 0;
641cap_err:
642	return -1;
643}
644
645/* return: 0 = success, other = fail */
646static int sdram_init_detect(struct dram_info *dram,
647			     struct px30_sdram_params *sdram_params)
648{
649	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
650	u32 ret;
651	u32 sys_reg = 0;
652	u32 sys_reg3 = 0;
653
654	if (sdram_init_(dram, sdram_params, 0) != 0)
655		return -1;
656
657	if (dram_detect_cap(dram, sdram_params, 0) != 0)
658		return -1;
659
660	/* modify bw, cs related timing */
661	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
662				   sdram_params->base.dramtype);
663	/* reinit sdram by real dram cap */
664	ret = sdram_init_(dram, sdram_params, 1);
665	if (ret != 0)
666		goto out;
667
668	/* redetect cs1 row */
669	sdram_detect_cs1_row(cap_info, sdram_params->base.dramtype);
670	if (cap_info->cs1_row) {
671		sys_reg = readl(&dram->pmugrf->os_reg[2]);
672		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
673		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
674				    sys_reg, sys_reg3, 0);
675		writel(sys_reg, &dram->pmugrf->os_reg[2]);
676		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
677	}
678
679	ret = sdram_detect_high_row(cap_info);
680
681out:
682	return ret;
683}
684
685struct px30_sdram_params
686		*get_default_sdram_config(void)
687{
688	sdram_configs[0].skew = &skew;
689
690	return &sdram_configs[0];
691}
692
693/* return: 0 = success, other = fail */
694int sdram_init(void)
695{
696	struct px30_sdram_params *sdram_params;
697	int ret = 0;
698
699	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
700	dram_info.pctl = (void *)DDRC_BASE_ADDR;
701	dram_info.grf = (void *)GRF_BASE_ADDR;
702	dram_info.cru = (void *)CRU_BASE_ADDR;
703	dram_info.msch = (void *)SERVER_MSCH0_BASE_ADDR;
704	dram_info.ddr_grf = (void *)DDR_GRF_BASE_ADDR;
705	dram_info.pmugrf = (void *)PMUGRF_BASE_ADDR;
706
707	sdram_params = get_default_sdram_config();
708	ret = sdram_init_detect(&dram_info, sdram_params);
709
710	if (ret)
711		goto error;
712
713	sdram_print_ddr_info(&sdram_params->ch.cap_info, &sdram_params->base, 0);
714
715	printascii("out\n");
716	return ret;
717error:
718	return (-1);
719}
720#else
721
722static int px30_dmc_probe(struct udevice *dev)
723{
724	struct dram_info *priv = dev_get_priv(dev);
725
726	priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
727	debug("%s: grf=%p\n", __func__, priv->pmugrf);
728	priv->info.base = CFG_SYS_SDRAM_BASE;
729	priv->info.size =
730		rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
731
732	return 0;
733}
734
735static int px30_dmc_get_info(struct udevice *dev, struct ram_info *info)
736{
737	struct dram_info *priv = dev_get_priv(dev);
738
739	*info = priv->info;
740
741	return 0;
742}
743
744static struct ram_ops px30_dmc_ops = {
745	.get_info = px30_dmc_get_info,
746};
747
748static const struct udevice_id px30_dmc_ids[] = {
749	{ .compatible = "rockchip,px30-dmc" },
750	{ }
751};
752
753U_BOOT_DRIVER(dmc_px30) = {
754	.name = "rockchip_px30_dmc",
755	.id = UCLASS_RAM,
756	.of_match = px30_dmc_ids,
757	.ops = &px30_dmc_ops,
758	.probe = px30_dmc_probe,
759	.priv_auto	= sizeof(struct dram_info),
760};
761#endif /* CONFIG_TPL_BUILD */
762