1 // SPDX-License-Identifier: GPL-2.0+
3 * sunxi DRAM controller initialization
7 * Based on sun4i Linux kernel sources mach-sunxi/pm/standby/dram*.c
8 * and earlier U-Boot Allwinner A10 SPL work
10 * (C) Copyright 2007-2012
11 * Allwinner Technology Co., Ltd. <www.allwinnertech.com>
17 * Unfortunately the only documentation we have on the sun7i DRAM
18 * controller is Allwinner boot0 + boot1 code, and that code uses
19 * magic numbers & shifts with no explanations. Hence this code is
20 * rather undocumented and full of magic.
26 #include <asm/arch/clock.h>
27 #include <asm/arch/dram.h>
28 #include <asm/arch/timer.h>
29 #include <asm/arch/sys_proto.h>
30 #include <linux/delay.h>
32 #define CPU_CFG_CHIP_VER(n) ((n) << 6)
33 #define CPU_CFG_CHIP_VER_MASK CPU_CFG_CHIP_VER(0x3)
34 #define CPU_CFG_CHIP_REV_A 0x0
35 #define CPU_CFG_CHIP_REV_C1 0x1
36 #define CPU_CFG_CHIP_REV_C2 0x2
37 #define CPU_CFG_CHIP_REV_B 0x3
40 * Wait up to 1s for mask to be clear in given reg.
42 static inline void await_bits_clear(u32 *reg, u32 mask)
44 mctl_await_completion(reg, mask, 0);
48 * Wait up to 1s for mask to be set in given reg.
50 static inline void await_bits_set(u32 *reg, u32 mask)
52 mctl_await_completion(reg, mask, mask);
56 * This performs the external DRAM reset by driving the RESET pin low and
57 * then high again. According to the DDR3 spec, the RESET pin needs to be
58 * kept low for at least 200 us.
60 static void mctl_ddr3_reset(void)
62 struct sunxi_dram_reg *dram =
63 (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
65 #ifdef CONFIG_MACH_SUN4I
66 struct sunxi_timer_reg *timer =
67 (struct sunxi_timer_reg *)SUNXI_TIMER_BASE;
70 writel(0, &timer->cpu_cfg);
71 reg_val = readl(&timer->cpu_cfg);
73 if ((reg_val & CPU_CFG_CHIP_VER_MASK) !=
74 CPU_CFG_CHIP_VER(CPU_CFG_CHIP_REV_A)) {
75 setbits_le32(&dram->mcr, DRAM_MCR_RESET);
77 clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
81 clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
83 setbits_le32(&dram->mcr, DRAM_MCR_RESET);
85 /* After the RESET pin is de-asserted, the DDR3 spec requires to wait
86 * for additional 500 us before driving the CKE pin (Clock Enable)
87 * high. The duration of this delay can be configured in the SDR_IDCR
88 * (Initialization Delay Configuration Register) and applied
89 * automatically by the DRAM controller during the DDR3 initialization
90 * step. But SDR_IDCR has limited range on sun4i/sun5i hardware and
91 * can't provide sufficient delay at DRAM clock frequencies higher than
92 * 524 MHz (while Allwinner A13 supports DRAM clock frequency up to
93 * 533 MHz according to the datasheet). Additionally, there is no
94 * official documentation for the SDR_IDCR register anywhere, and
95 * there is always a chance that we are interpreting it wrong.
96 * Better be safe than sorry, so add an explicit delay here. */
100 static void mctl_set_drive(void)
102 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
104 #ifdef CONFIG_MACH_SUN7I
105 clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3) | (0x3 << 28),
107 clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3),
109 DRAM_MCR_MODE_EN(0x3) |
113 static void mctl_itm_disable(void)
115 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
117 clrsetbits_le32(&dram->ccr, DRAM_CCR_INIT, DRAM_CCR_ITM_OFF);
120 static void mctl_itm_enable(void)
122 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
124 clrbits_le32(&dram->ccr, DRAM_CCR_ITM_OFF);
127 static void mctl_itm_reset(void)
130 udelay(1); /* ITM reset needs a bit of delay */
135 static void mctl_enable_dll0(u32 phase)
137 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
139 clrsetbits_le32(&dram->dllcr[0], 0x3f << 6,
140 ((phase >> 16) & 0x3f) << 6);
141 clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET, DRAM_DLLCR_DISABLE);
144 clrbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET | DRAM_DLLCR_DISABLE);
147 clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_DISABLE, DRAM_DLLCR_NRESET);
151 /* Get the number of DDR byte lanes */
152 static u32 mctl_get_number_of_lanes(void)
154 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
155 if ((readl(&dram->dcr) & DRAM_DCR_BUS_WIDTH_MASK) ==
156 DRAM_DCR_BUS_WIDTH(DRAM_DCR_BUS_WIDTH_32BIT))
163 * Note: This differs from pm/standby in that it checks the bus width
165 static void mctl_enable_dllx(u32 phase)
167 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
168 u32 i, number_of_lanes;
170 number_of_lanes = mctl_get_number_of_lanes();
172 for (i = 1; i <= number_of_lanes; i++) {
173 clrsetbits_le32(&dram->dllcr[i], 0xf << 14,
174 (phase & 0xf) << 14);
175 clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET,
181 for (i = 1; i <= number_of_lanes; i++)
182 clrbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET |
186 for (i = 1; i <= number_of_lanes; i++)
187 clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_DISABLE,
192 static u32 hpcr_value[32] = {
193 #ifdef CONFIG_MACH_SUN5I
198 0x1031, 0x1031, 0x0735, 0x1035,
199 0x1035, 0x0731, 0x1031, 0,
200 0x0301, 0x0301, 0x0301, 0x0301,
201 0x0301, 0x0301, 0x0301, 0
203 #ifdef CONFIG_MACH_SUN4I
204 0x0301, 0x0301, 0x0301, 0x0301,
205 0x0301, 0x0301, 0, 0,
208 0x1031, 0x1031, 0x0735, 0x5031,
209 0x1035, 0x0731, 0x1031, 0x0735,
210 0x1035, 0x1031, 0x0731, 0x1035,
211 0x1031, 0x0301, 0x0301, 0x0731
213 #ifdef CONFIG_MACH_SUN7I
214 0x0301, 0x0301, 0x0301, 0x0301,
215 0x0301, 0x0301, 0x0301, 0x0301,
218 0x1031, 0x1031, 0x0735, 0x1035,
219 0x1035, 0x0731, 0x1031, 0x0735,
220 0x1035, 0x1031, 0x0731, 0x1035,
221 0x0001, 0x1031, 0, 0x1031
222 /* last row differs from boot0 source table
223 * 0x1031, 0x0301, 0x0301, 0x0731
224 * but boot0 code skips #28 and #30, and sets #29 and #31 to the
225 * value from #28 entry (0x1031)
230 static void mctl_configure_hostport(void)
232 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
235 for (i = 0; i < 32; i++)
236 writel(hpcr_value[i], &dram->hpcr[i]);
239 static void mctl_setup_dram_clock(u32 clk, u32 mbus_clk)
242 struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
243 u32 pll5p_clk, pll6x_clk;
244 u32 pll5p_div, pll6x_div;
245 u32 pll5p_rate, pll6x_rate;
248 reg_val = readl(&ccm->pll5_cfg);
249 reg_val &= ~CCM_PLL5_CTRL_M_MASK; /* set M to 0 (x1) */
250 reg_val &= ~CCM_PLL5_CTRL_K_MASK; /* set K to 0 (x1) */
251 reg_val &= ~CCM_PLL5_CTRL_N_MASK; /* set N to 0 (x0) */
252 reg_val &= ~CCM_PLL5_CTRL_P_MASK; /* set P to 0 (x1) */
253 #ifdef CONFIG_OLD_SUNXI_KERNEL_COMPAT
254 /* Old kernels are hardcoded to P=1 (divide by 2) */
255 reg_val |= CCM_PLL5_CTRL_P(1);
257 if (clk >= 540 && clk < 552) {
259 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
260 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
261 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(15));
262 } else if (clk >= 512 && clk < 528) {
264 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
265 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(4));
266 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(16));
267 } else if (clk >= 496 && clk < 504) {
269 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
270 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
271 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(31));
272 } else if (clk >= 468 && clk < 480) {
274 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
275 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
276 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(13));
277 } else if (clk >= 396 && clk < 408) {
279 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
280 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
281 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(11));
283 /* any other frequency that is a multiple of 24 */
284 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
285 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
286 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(clk / 24));
288 reg_val &= ~CCM_PLL5_CTRL_VCO_GAIN; /* PLL VCO Gain off */
289 reg_val |= CCM_PLL5_CTRL_EN; /* PLL On */
290 writel(reg_val, &ccm->pll5_cfg);
293 setbits_le32(&ccm->pll5_cfg, CCM_PLL5_CTRL_DDR_CLK);
295 #if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN7I)
297 clrbits_le32(&ccm->gps_clk_cfg, CCM_GPS_CTRL_RESET | CCM_GPS_CTRL_GATE);
298 setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
300 clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
303 /* setup MBUS clock */
307 /* PLL5P and PLL6 are the potential clock sources for MBUS */
308 pll6x_clk = clock_get_pll6() / 1000000;
309 #ifdef CONFIG_MACH_SUN7I
310 pll6x_clk *= 2; /* sun7i uses PLL6*2, sun5i uses just PLL6 */
312 pll5p_clk = clock_get_pll5p() / 1000000;
313 pll6x_div = DIV_ROUND_UP(pll6x_clk, mbus_clk);
314 pll5p_div = DIV_ROUND_UP(pll5p_clk, mbus_clk);
315 pll6x_rate = pll6x_clk / pll6x_div;
316 pll5p_rate = pll5p_clk / pll5p_div;
318 if (pll6x_div <= 16 && pll6x_rate > pll5p_rate) {
319 /* use PLL6 as the MBUS clock source */
320 reg_val = CCM_MBUS_CTRL_GATE |
321 CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL6) |
322 CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
323 CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll6x_div));
324 } else if (pll5p_div <= 16) {
325 /* use PLL5P as the MBUS clock source */
326 reg_val = CCM_MBUS_CTRL_GATE |
327 CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL5) |
328 CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
329 CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll5p_div));
331 panic("Bad mbus_clk\n");
333 writel(reg_val, &ccm->mbus_clk_cfg);
336 * open DRAMC AHB & DLL register clock
339 #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
340 clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
342 clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
347 #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
348 setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
350 setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
356 * The data from rslrX and rdgrX registers (X=rank) is stored
357 * in a single 32-bit value using the following format:
358 * bits [31:26] - DQS gating system latency for byte lane 3
359 * bits [25:24] - DQS gating phase select for byte lane 3
360 * bits [23:18] - DQS gating system latency for byte lane 2
361 * bits [17:16] - DQS gating phase select for byte lane 2
362 * bits [15:10] - DQS gating system latency for byte lane 1
363 * bits [ 9:8 ] - DQS gating phase select for byte lane 1
364 * bits [ 7:2 ] - DQS gating system latency for byte lane 0
365 * bits [ 1:0 ] - DQS gating phase select for byte lane 0
367 static void mctl_set_dqs_gating_delay(int rank, u32 dqs_gating_delay)
369 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
370 u32 lane, number_of_lanes = mctl_get_number_of_lanes();
371 /* rank0 gating system latency (3 bits per lane: cycles) */
372 u32 slr = readl(rank == 0 ? &dram->rslr0 : &dram->rslr1);
373 /* rank0 gating phase select (2 bits per lane: 90, 180, 270, 360) */
374 u32 dgr = readl(rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
375 for (lane = 0; lane < number_of_lanes; lane++) {
376 u32 tmp = dqs_gating_delay >> (lane * 8);
377 slr &= ~(7 << (lane * 3));
378 slr |= ((tmp >> 2) & 7) << (lane * 3);
379 dgr &= ~(3 << (lane * 2));
380 dgr |= (tmp & 3) << (lane * 2);
382 writel(slr, rank == 0 ? &dram->rslr0 : &dram->rslr1);
383 writel(dgr, rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
386 static int dramc_scan_readpipe(void)
388 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
391 /* data training trigger */
392 clrbits_le32(&dram->csr, DRAM_CSR_FAILED);
393 setbits_le32(&dram->ccr, DRAM_CCR_DATA_TRAINING);
395 /* check whether data training process has completed */
396 await_bits_clear(&dram->ccr, DRAM_CCR_DATA_TRAINING);
398 /* check data training result */
399 reg_val = readl(&dram->csr);
400 if (reg_val & DRAM_CSR_FAILED)
406 static void dramc_clock_output_en(u32 on)
408 #if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
409 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
412 setbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
414 clrbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
416 #ifdef CONFIG_MACH_SUN4I
417 struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
419 setbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
421 clrbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
425 /* tRFC in nanoseconds for different densities (from the DDR3 spec) */
426 static const u16 tRFC_DDR3_table[6] = {
427 /* 256Mb 512Mb 1Gb 2Gb 4Gb 8Gb */
428 90, 90, 110, 160, 300, 350
431 static void dramc_set_autorefresh_cycle(u32 clk, u32 density)
433 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
436 tRFC = (tRFC_DDR3_table[density] * clk + 999) / 1000;
437 tREFI = (7987 * clk) >> 10; /* <= 7.8us */
439 writel(DRAM_DRR_TREFI(tREFI) | DRAM_DRR_TRFC(tRFC), &dram->drr);
442 /* Calculate the value for A11, A10, A9 bits in MR0 (write recovery) */
443 static u32 ddr3_write_recovery(u32 clk)
445 u32 twr_ns = 15; /* DDR3 spec says that it is 15ns for all speed bins */
446 u32 twr_ck = (twr_ns * clk + 999) / 1000;
449 else if (twr_ck <= 8)
451 else if (twr_ck <= 10)
458 * If the dram->ppwrsctl (SDR_DPCR) register has the lowest bit set to 1, this
459 * means that DRAM is currently in self-refresh mode and retaining the old
460 * data. Since we have no idea what to do in this situation yet, just set this
461 * register to 0 and initialize DRAM in the same way as on any normal reboot
462 * (discarding whatever was stored there).
464 * Note: on sun7i hardware, the highest 16 bits need to be set to 0x1651 magic
465 * value for this write operation to have any effect. On sun5i hadware this
466 * magic value is not necessary. And on sun4i hardware the writes to this
467 * register seem to have no effect at all.
469 static void mctl_disable_power_save(void)
471 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
472 writel(0x16510000, &dram->ppwrsctl);
476 * After the DRAM is powered up or reset, the DDR3 spec requires to wait at
477 * least 500 us before driving the CKE pin (Clock Enable) high. The dram->idct
478 * (SDR_IDCR) register appears to configure this delay, which gets applied
479 * right at the time when the DRAM initialization is activated in the
480 * 'mctl_ddr3_initialize' function.
482 static void mctl_set_cke_delay(void)
484 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
486 /* The CKE delay is represented in DRAM clock cycles, multiplied by N
487 * (where N=2 for sun4i/sun5i and N=3 for sun7i). Here it is set to
488 * the maximum possible value 0x1ffff, just like in the Allwinner's
489 * boot0 bootloader. The resulting delay value is somewhere between
490 * ~0.4 ms (sun5i with 648 MHz DRAM clock speed) and ~1.1 ms (sun7i
491 * with 360 MHz DRAM clock speed). */
492 setbits_le32(&dram->idcr, 0x1ffff);
496 * This triggers the DRAM initialization. It performs sending the mode registers
497 * to the DRAM among other things. Very likely the ZQCL command is also getting
498 * executed (to do the initial impedance calibration on the DRAM side of the
499 * wire). The memory controller and the PHY must be already configured before
500 * calling this function.
502 static void mctl_ddr3_initialize(void)
504 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
505 setbits_le32(&dram->ccr, DRAM_CCR_INIT);
506 await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
510 * Perform impedance calibration on the DRAM controller side of the wire.
512 static void mctl_set_impedance(u32 zq, bool odt_en)
514 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
516 u32 zprog = zq & 0xFF, zdata = (zq >> 8) & 0xFFFFF;
518 #ifndef CONFIG_MACH_SUN7I
519 /* Appears that some kind of automatically initiated default
520 * ZQ calibration is already in progress at this point on sun4i/sun5i
521 * hardware, but not on sun7i. So it is reasonable to wait for its
522 * completion before doing anything else. */
523 await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
526 /* ZQ calibration is not really useful unless ODT is enabled */
530 #ifdef CONFIG_MACH_SUN7I
531 /* Enabling ODT in SDR_IOCR on sun7i hardware results in a deadlock
532 * unless bit 24 is set in SDR_ZQCR1. Not much is known about the
533 * SDR_ZQCR1 register, but there are hints indicating that it might
534 * be related to periodic impedance re-calibration. This particular
535 * magic value is borrowed from the Allwinner boot0 bootloader, and
536 * using it helps to avoid troubles */
537 writel((1 << 24) | (1 << 1), &dram->zqcr1);
540 /* Needed at least for sun5i, because it does not self clear there */
541 clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
544 /* Set the user supplied impedance data */
545 reg_val = DRAM_ZQCR0_ZDEN | zdata;
546 writel(reg_val, &dram->zqcr0);
547 /* no need to wait, this takes effect immediately */
549 /* Do the calibration using the external resistor */
550 reg_val = DRAM_ZQCR0_ZCAL | DRAM_ZQCR0_IMP_DIV(zprog);
551 writel(reg_val, &dram->zqcr0);
552 /* Wait for the new impedance configuration to settle */
553 await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
556 /* Needed at least for sun5i, because it does not self clear there */
557 clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
559 /* Set I/O configure register */
560 writel(DRAM_IOCR_ODT_EN, &dram->iocr);
563 static unsigned long dramc_init_helper(struct dram_para *para)
565 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
571 * only single rank DDR3 is supported by this code even though the
572 * hardware can theoretically support DDR2 and up to two ranks
574 if (para->type != DRAM_MEMORY_TYPE_DDR3 || para->rank_num != 1)
577 /* setup DRAM relative clock */
578 mctl_setup_dram_clock(para->clock, para->mbus_clock);
580 /* Disable any pad power save control */
581 mctl_disable_power_save();
586 dramc_clock_output_en(0);
588 #ifdef CONFIG_MACH_SUN4I
589 /* select dram controller 1 */
590 writel(DRAM_CSEL_MAGIC, &dram->csel);
594 mctl_enable_dll0(para->tpr3);
596 /* configure external DRAM */
597 reg_val = DRAM_DCR_TYPE_DDR3;
598 reg_val |= DRAM_DCR_IO_WIDTH(para->io_width >> 3);
600 if (para->density == 256)
601 density = DRAM_DCR_CHIP_DENSITY_256M;
602 else if (para->density == 512)
603 density = DRAM_DCR_CHIP_DENSITY_512M;
604 else if (para->density == 1024)
605 density = DRAM_DCR_CHIP_DENSITY_1024M;
606 else if (para->density == 2048)
607 density = DRAM_DCR_CHIP_DENSITY_2048M;
608 else if (para->density == 4096)
609 density = DRAM_DCR_CHIP_DENSITY_4096M;
610 else if (para->density == 8192)
611 density = DRAM_DCR_CHIP_DENSITY_8192M;
613 density = DRAM_DCR_CHIP_DENSITY_256M;
615 reg_val |= DRAM_DCR_CHIP_DENSITY(density);
616 reg_val |= DRAM_DCR_BUS_WIDTH((para->bus_width >> 3) - 1);
617 reg_val |= DRAM_DCR_RANK_SEL(para->rank_num - 1);
618 reg_val |= DRAM_DCR_CMD_RANK_ALL;
619 reg_val |= DRAM_DCR_MODE(DRAM_DCR_MODE_INTERLEAVE);
620 writel(reg_val, &dram->dcr);
622 dramc_clock_output_en(1);
624 mctl_set_impedance(para->zq, para->odt_en);
626 mctl_set_cke_delay();
632 await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
634 mctl_enable_dllx(para->tpr3);
636 /* set refresh period */
637 dramc_set_autorefresh_cycle(para->clock, density);
639 /* set timing parameters */
640 writel(para->tpr0, &dram->tpr0);
641 writel(para->tpr1, &dram->tpr1);
642 writel(para->tpr2, &dram->tpr2);
644 reg_val = DRAM_MR_BURST_LENGTH(0x0);
645 #if (defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I))
646 reg_val |= DRAM_MR_POWER_DOWN;
648 reg_val |= DRAM_MR_CAS_LAT(para->cas - 4);
649 reg_val |= DRAM_MR_WRITE_RECOVERY(ddr3_write_recovery(para->clock));
650 writel(reg_val, &dram->mr);
652 writel(para->emr1, &dram->emr);
653 writel(para->emr2, &dram->emr2);
654 writel(para->emr3, &dram->emr3);
656 /* disable drift compensation and set passive DQS window mode */
657 clrsetbits_le32(&dram->ccr, DRAM_CCR_DQS_DRIFT_COMP, DRAM_CCR_DQS_GATE);
659 #ifdef CONFIG_MACH_SUN7I
660 /* Command rate timing mode 2T & 1T */
661 if (para->tpr4 & 0x1)
662 setbits_le32(&dram->ccr, DRAM_CCR_COMMAND_RATE_1T);
664 /* initialize external DRAM */
665 mctl_ddr3_initialize();
667 /* scan read pipe value */
670 /* Hardware DQS gate training */
671 ret_val = dramc_scan_readpipe();
676 /* allow to override the DQS training results with a custom delay */
677 if (para->dqs_gating_delay)
678 mctl_set_dqs_gating_delay(0, para->dqs_gating_delay);
680 /* set the DQS gating window type */
681 if (para->active_windowing)
682 clrbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
684 setbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
688 /* configure all host port */
689 mctl_configure_hostport();
691 return get_ram_size((long *)PHYS_SDRAM_0, PHYS_SDRAM_0_SIZE);
694 unsigned long dramc_init(struct dram_para *para)
696 unsigned long dram_size, actual_density;
698 /* If the dram configuration is not provided, use a default */
702 /* if everything is known, then autodetection is not necessary */
703 if (para->io_width && para->bus_width && para->density)
704 return dramc_init_helper(para);
706 /* try to autodetect the DRAM bus width and density */
708 para->bus_width = 32;
709 #if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN5I)
710 /* only A0-A14 address lines on A10/A13, limiting max density to 4096 */
711 para->density = 4096;
713 /* all A0-A15 address lines on A20, which allow density 8192 */
714 para->density = 8192;
717 dram_size = dramc_init_helper(para);
719 /* if 32-bit bus width failed, try 16-bit bus width instead */
720 para->bus_width = 16;
721 dram_size = dramc_init_helper(para);
723 /* if 16-bit bus width also failed, then bail out */
728 /* check if we need to adjust the density */
729 actual_density = (dram_size >> 17) * para->io_width / para->bus_width;
731 if (actual_density != para->density) {
732 /* update the density and re-initialize DRAM again */
733 para->density = actual_density;
734 dram_size = dramc_init_helper(para);