1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2016-2022 Intel Corporation <www.intel.com>
4 *
5 */
6
7#include <common.h>
8#include <cpu_func.h>
9#include <dm.h>
10#include <errno.h>
11#include <div64.h>
12#include <fdtdec.h>
13#include <hang.h>
14#include <init.h>
15#include <log.h>
16#include <ram.h>
17#include <reset.h>
18#include <asm/global_data.h>
19#include "sdram_s10.h"
20#include <wait_bit.h>
21#include <asm/arch/firewall.h>
22#include <asm/arch/reset_manager.h>
23#include <asm/io.h>
24#include <linux/sizes.h>
25
26DECLARE_GLOBAL_DATA_PTR;
27
28#define DDR_CONFIG(A, B, C, R)	(((A) << 24) | ((B) << 16) | ((C) << 8) | (R))
29
30/* The followring are the supported configurations */
31u32 ddr_config[] = {
32	/* DDR_CONFIG(Address order,Bank,Column,Row) */
33	/* List for DDR3 or LPDDR3 (pinout order > chip, row, bank, column) */
34	DDR_CONFIG(0, 3, 10, 12),
35	DDR_CONFIG(0, 3,  9, 13),
36	DDR_CONFIG(0, 3, 10, 13),
37	DDR_CONFIG(0, 3,  9, 14),
38	DDR_CONFIG(0, 3, 10, 14),
39	DDR_CONFIG(0, 3, 10, 15),
40	DDR_CONFIG(0, 3, 11, 14),
41	DDR_CONFIG(0, 3, 11, 15),
42	DDR_CONFIG(0, 3, 10, 16),
43	DDR_CONFIG(0, 3, 11, 16),
44	DDR_CONFIG(0, 3, 12, 15),	/* 0xa */
45	/* List for DDR4 only (pinout order > chip, bank, row, column) */
46	DDR_CONFIG(1, 3, 10, 14),
47	DDR_CONFIG(1, 4, 10, 14),
48	DDR_CONFIG(1, 3, 10, 15),
49	DDR_CONFIG(1, 4, 10, 15),
50	DDR_CONFIG(1, 3, 10, 16),
51	DDR_CONFIG(1, 4, 10, 16),
52	DDR_CONFIG(1, 3, 10, 17),
53	DDR_CONFIG(1, 4, 10, 17),
54};
55
56int match_ddr_conf(u32 ddr_conf)
57{
58	int i;
59
60	for (i = 0; i < ARRAY_SIZE(ddr_config); i++) {
61		if (ddr_conf == ddr_config[i])
62			return i;
63	}
64	return 0;
65}
66
67/**
68 * sdram_mmr_init_full() - Function to initialize SDRAM MMR
69 *
70 * Initialize the SDRAM MMR.
71 */
72int sdram_mmr_init_full(struct udevice *dev)
73{
74	struct altera_sdram_plat *plat = dev_get_plat(dev);
75	struct altera_sdram_priv *priv = dev_get_priv(dev);
76	u32 update_value, io48_value, ddrioctl;
77	u32 i;
78	int ret;
79	phys_size_t hw_size;
80	struct bd_info bd = {0};
81
82	/* Enable access to DDR from CPU master */
83	clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_DDRREG),
84		     CCU_ADBASE_DI_MASK);
85	clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE0),
86		     CCU_ADBASE_DI_MASK);
87	clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1A),
88		     CCU_ADBASE_DI_MASK);
89	clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1B),
90		     CCU_ADBASE_DI_MASK);
91	clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1C),
92		     CCU_ADBASE_DI_MASK);
93	clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1D),
94		     CCU_ADBASE_DI_MASK);
95	clrbits_le32(CCU_REG_ADDR(CCU_CPU0_MPRT_ADBASE_MEMSPACE1E),
96		     CCU_ADBASE_DI_MASK);
97
98	/* Enable access to DDR from IO master */
99	clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE0),
100		     CCU_ADBASE_DI_MASK);
101	clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1A),
102		     CCU_ADBASE_DI_MASK);
103	clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1B),
104		     CCU_ADBASE_DI_MASK);
105	clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1C),
106		     CCU_ADBASE_DI_MASK);
107	clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1D),
108		     CCU_ADBASE_DI_MASK);
109	clrbits_le32(CCU_REG_ADDR(CCU_IOM_MPRT_ADBASE_MEMSPACE1E),
110		     CCU_ADBASE_DI_MASK);
111
112	/* Enable access to DDR from TCU */
113	clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE0),
114		     CCU_ADBASE_DI_MASK);
115	clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1A),
116		     CCU_ADBASE_DI_MASK);
117	clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1B),
118		     CCU_ADBASE_DI_MASK);
119	clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1C),
120		     CCU_ADBASE_DI_MASK);
121	clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1D),
122		     CCU_ADBASE_DI_MASK);
123	clrbits_le32(CCU_REG_ADDR(CCU_TCU_MPRT_ADBASE_MEMSPACE1E),
124		     CCU_ADBASE_DI_MASK);
125
126	/* this enables nonsecure access to DDR */
127	/* mpuregion0addr_limit */
128	FW_MPU_DDR_SCR_WRITEL(0xFFFF0000, FW_MPU_DDR_SCR_MPUREGION0ADDR_LIMIT);
129	FW_MPU_DDR_SCR_WRITEL(0x1F, FW_MPU_DDR_SCR_MPUREGION0ADDR_LIMITEXT);
130
131	/* nonmpuregion0addr_limit */
132	FW_MPU_DDR_SCR_WRITEL(0xFFFF0000,
133			      FW_MPU_DDR_SCR_NONMPUREGION0ADDR_LIMIT);
134	FW_MPU_DDR_SCR_WRITEL(0x1F, FW_MPU_DDR_SCR_NONMPUREGION0ADDR_LIMITEXT);
135
136	/* Enable mpuregion0enable and nonmpuregion0enable */
137	FW_MPU_DDR_SCR_WRITEL(MPUREGION0_ENABLE | NONMPUREGION0_ENABLE,
138			      FW_MPU_DDR_SCR_EN_SET);
139
140	/* Ensure HMC clock is running */
141	if (poll_hmc_clock_status()) {
142		puts("DDR: Error as HMC clock not running\n");
143		return -1;
144	}
145
146	/* Try 3 times to do a calibration */
147	for (i = 0; i < 3; i++) {
148		ret = wait_for_bit_le32((const void *)(plat->hmc +
149					DDRCALSTAT),
150					DDR_HMC_DDRCALSTAT_CAL_MSK, true, 1000,
151					false);
152		if (!ret)
153			break;
154
155		emif_reset(plat);
156	}
157
158	if (ret) {
159		puts("DDR: Error as SDRAM calibration failed\n");
160		return -1;
161	}
162	debug("DDR: Calibration success\n");
163
164	u32 ctrlcfg0 = hmc_readl(plat, CTRLCFG0);
165	u32 ctrlcfg1 = hmc_readl(plat, CTRLCFG1);
166	u32 dramaddrw = hmc_readl(plat, DRAMADDRW);
167	u32 dramtim0 = hmc_readl(plat, DRAMTIMING0);
168	u32 caltim0 = hmc_readl(plat, CALTIMING0);
169	u32 caltim1 = hmc_readl(plat, CALTIMING1);
170	u32 caltim2 = hmc_readl(plat, CALTIMING2);
171	u32 caltim3 = hmc_readl(plat, CALTIMING3);
172	u32 caltim4 = hmc_readl(plat, CALTIMING4);
173	u32 caltim9 = hmc_readl(plat, CALTIMING9);
174
175	/*
176	 * Configure the DDR IO size [0xFFCFB008]
177	 * niosreserve0: Used to indicate DDR width &
178	 *	bit[7:0] = Number of data bits (bit[6:5] 0x01=32bit, 0x10=64bit)
179	 *	bit[8]   = 1 if user-mode OCT is present
180	 *	bit[9]   = 1 if warm reset compiled into EMIF Cal Code
181	 *	bit[10]  = 1 if warm reset is on during generation in EMIF Cal
182	 * niosreserve1: IP ADCDS version encoded as 16 bit value
183	 *	bit[2:0] = Variant (0=not special,1=FAE beta, 2=Customer beta,
184	 *			    3=EAP, 4-6 are reserved)
185	 *	bit[5:3] = Service Pack # (e.g. 1)
186	 *	bit[9:6] = Minor Release #
187	 *	bit[14:10] = Major Release #
188	 */
189	update_value = hmc_readl(plat, NIOSRESERVED0);
190	hmc_ecc_writel(plat, ((update_value & 0xFF) >> 5), DDRIOCTRL);
191	ddrioctl = hmc_ecc_readl(plat, DDRIOCTRL);
192
193	/* enable HPS interface to HMC */
194	hmc_ecc_writel(plat, DDR_HMC_HPSINTFCSEL_ENABLE_MASK, HPSINTFCSEL);
195
196	/* Set the DDR Configuration */
197	io48_value = DDR_CONFIG(CTRLCFG1_CFG_ADDR_ORDER(ctrlcfg1),
198				(DRAMADDRW_CFG_BANK_ADDR_WIDTH(dramaddrw) +
199				 DRAMADDRW_CFG_BANK_GRP_ADDR_WIDTH(dramaddrw)),
200				DRAMADDRW_CFG_COL_ADDR_WIDTH(dramaddrw),
201				DRAMADDRW_CFG_ROW_ADDR_WIDTH(dramaddrw));
202
203	update_value = match_ddr_conf(io48_value);
204	if (update_value)
205		ddr_sch_writel(plat, update_value, DDR_SCH_DDRCONF);
206
207	/* Configure HMC dramaddrw */
208	hmc_ecc_writel(plat, hmc_readl(plat, DRAMADDRW), DRAMADDRWIDTH);
209
210	/*
211	 * Configure DDR timing
212	 *  RDTOMISS = tRTP + tRP + tRCD - BL/2
213	 *  WRTOMISS = WL + tWR + tRP + tRCD and
214	 *    WL = RL + BL/2 + 2 - rd-to-wr ; tWR = 15ns  so...
215	 *  First part of equation is in memory clock units so divide by 2
216	 *  for HMC clock units. 1066MHz is close to 1ns so use 15 directly.
217	 *  WRTOMISS = ((RL + BL/2 + 2 + tWR) >> 1)- rd-to-wr + tRP + tRCD
218	 */
219	u32 burst_len = CTRLCFG0_CFG_CTRL_BURST_LEN(ctrlcfg0);
220
221	update_value = CALTIMING2_CFG_RD_TO_WR_PCH(caltim2) +
222		       CALTIMING4_CFG_PCH_TO_VALID(caltim4) +
223		       CALTIMING0_CFG_ACT_TO_RDWR(caltim0) -
224		       (burst_len >> 2);
225	io48_value = (((DRAMTIMING0_CFG_TCL(dramtim0) + 2 + DDR_TWR +
226		       (burst_len >> 1)) >> 1) -
227		      /* Up to here was in memory cycles so divide by 2 */
228		      CALTIMING1_CFG_RD_TO_WR(caltim1) +
229		      CALTIMING0_CFG_ACT_TO_RDWR(caltim0) +
230		      CALTIMING4_CFG_PCH_TO_VALID(caltim4));
231
232	ddr_sch_writel(plat, ((CALTIMING0_CFG_ACT_TO_ACT(caltim0) <<
233			 DDR_SCH_DDRTIMING_ACTTOACT_OFF) |
234			(update_value << DDR_SCH_DDRTIMING_RDTOMISS_OFF) |
235			(io48_value << DDR_SCH_DDRTIMING_WRTOMISS_OFF) |
236			((burst_len >> 2) << DDR_SCH_DDRTIMING_BURSTLEN_OFF) |
237			(CALTIMING1_CFG_RD_TO_WR(caltim1) <<
238			 DDR_SCH_DDRTIMING_RDTOWR_OFF) |
239			(CALTIMING3_CFG_WR_TO_RD(caltim3) <<
240			 DDR_SCH_DDRTIMING_WRTORD_OFF) |
241			(((ddrioctl == 1) ? 1 : 0) <<
242			 DDR_SCH_DDRTIMING_BWRATIO_OFF)),
243			DDR_SCH_DDRTIMING);
244
245	/* Configure DDR mode [precharge = 0] */
246	ddr_sch_writel(plat, ((ddrioctl ? 0 : 1) <<
247			 DDR_SCH_DDRMOD_BWRATIOEXTENDED_OFF),
248			DDR_SCH_DDRMODE);
249
250	/* Configure the read latency */
251	ddr_sch_writel(plat, (DRAMTIMING0_CFG_TCL(dramtim0) >> 1) +
252			DDR_READ_LATENCY_DELAY,
253			DDR_SCH_READ_LATENCY);
254
255	/*
256	 * Configuring timing values concerning activate commands
257	 * [FAWBANK alway 1 because always 4 bank DDR]
258	 */
259	ddr_sch_writel(plat, ((CALTIMING0_CFG_ACT_TO_ACT_DB(caltim0) <<
260			 DDR_SCH_ACTIVATE_RRD_OFF) |
261			(CALTIMING9_CFG_4_ACT_TO_ACT(caltim9) <<
262			 DDR_SCH_ACTIVATE_FAW_OFF) |
263			(DDR_ACTIVATE_FAWBANK <<
264			 DDR_SCH_ACTIVATE_FAWBANK_OFF)),
265			DDR_SCH_ACTIVATE);
266
267	/*
268	 * Configuring timing values concerning device to device data bus
269	 * ownership change
270	 */
271	ddr_sch_writel(plat, ((CALTIMING1_CFG_RD_TO_RD_DC(caltim1) <<
272			 DDR_SCH_DEVTODEV_BUSRDTORD_OFF) |
273			(CALTIMING1_CFG_RD_TO_WR_DC(caltim1) <<
274			 DDR_SCH_DEVTODEV_BUSRDTOWR_OFF) |
275			(CALTIMING3_CFG_WR_TO_RD_DC(caltim3) <<
276			 DDR_SCH_DEVTODEV_BUSWRTORD_OFF)),
277			DDR_SCH_DEVTODEV);
278
279	/* assigning the SDRAM size */
280	phys_size_t size = sdram_calculate_size(plat);
281	/* If the size is invalid, use default Config size */
282	if (size <= 0)
283		hw_size = PHYS_SDRAM_1_SIZE;
284	else
285		hw_size = size;
286
287	/* Get bank configuration from devicetree */
288	ret = fdtdec_decode_ram_size(gd->fdt_blob, NULL, 0, NULL,
289				     (phys_size_t *)&gd->ram_size, &bd);
290	if (ret) {
291		puts("DDR: Failed to decode memory node\n");
292		return -1;
293	}
294
295	if (gd->ram_size != hw_size)
296		printf("DDR: Warning: DRAM size from device tree mismatch with hardware.\n");
297
298	printf("DDR: %lld MiB\n", gd->ram_size >> 20);
299
300	/* Enable or disable the SDRAM ECC */
301	if (CTRLCFG1_CFG_CTRL_EN_ECC(ctrlcfg1)) {
302		setbits_le32(plat->hmc + ECCCTRL1,
303			     (DDR_HMC_ECCCTL_AWB_CNT_RST_SET_MSK |
304			      DDR_HMC_ECCCTL_CNT_RST_SET_MSK |
305			      DDR_HMC_ECCCTL_ECC_EN_SET_MSK));
306		clrbits_le32(plat->hmc + ECCCTRL1,
307			     (DDR_HMC_ECCCTL_AWB_CNT_RST_SET_MSK |
308			      DDR_HMC_ECCCTL_CNT_RST_SET_MSK));
309		setbits_le32(plat->hmc + ECCCTRL2,
310			     (DDR_HMC_ECCCTL2_RMW_EN_SET_MSK |
311			      DDR_HMC_ECCCTL2_AWB_EN_SET_MSK));
312		hmc_ecc_writel(plat, DDR_HMC_ERRINTEN_INTMASK, ERRINTENS);
313
314		/* Initialize memory content if not from warm reset */
315		if (!cpu_has_been_warmreset())
316			sdram_init_ecc_bits(&bd);
317	} else {
318		clrbits_le32(plat->hmc + ECCCTRL1,
319			     (DDR_HMC_ECCCTL_AWB_CNT_RST_SET_MSK |
320			      DDR_HMC_ECCCTL_CNT_RST_SET_MSK |
321			      DDR_HMC_ECCCTL_ECC_EN_SET_MSK));
322		clrbits_le32(plat->hmc + ECCCTRL2,
323			     (DDR_HMC_ECCCTL2_RMW_EN_SET_MSK |
324			      DDR_HMC_ECCCTL2_AWB_EN_SET_MSK));
325	}
326
327	/* Enable non-secure reads/writes to HMC Adapter for SDRAM ECC */
328	writel(FW_HMC_ADAPTOR_MPU_MASK, FW_HMC_ADAPTOR_REG_ADDR);
329
330	sdram_size_check(&bd);
331
332	priv->info.base = bd.bi_dram[0].start;
333	priv->info.size = gd->ram_size;
334
335	debug("DDR: HMC init success\n");
336	return 0;
337}
338