1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) Marvell International Ltd. and its affiliates
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
12 #include <linux/delay.h>
14 #include "ddr3_hw_training.h"
19 #define DEBUG_DFS_C(s, d, l) \
20 DEBUG_DFS_S(s); DEBUG_DFS_D(d, l); DEBUG_DFS_S("\n")
21 #define DEBUG_DFS_FULL_C(s, d, l) \
22 DEBUG_DFS_FULL_S(s); DEBUG_DFS_FULL_D(d, l); DEBUG_DFS_FULL_S("\n")
25 #define DEBUG_DFS_S(s) puts(s)
26 #define DEBUG_DFS_D(d, l) printf("%x", d)
28 #define DEBUG_DFS_S(s)
29 #define DEBUG_DFS_D(d, l)
32 #ifdef MV_DEBUG_DFS_FULL
33 #define DEBUG_DFS_FULL_S(s) puts(s)
34 #define DEBUG_DFS_FULL_D(d, l) printf("%x", d)
36 #define DEBUG_DFS_FULL_S(s)
37 #define DEBUG_DFS_FULL_D(d, l)
40 #if defined(MV88F672X)
41 extern u8 div_ratio[CLK_VCO][CLK_DDR];
42 extern void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps);
44 extern u16 odt_dynamic[ODT_OPT][MAX_CS];
45 extern u8 div_ratio1to1[CLK_VCO][CLK_DDR];
46 extern u8 div_ratio2to1[CLK_VCO][CLK_DDR];
48 extern u16 odt_static[ODT_OPT][MAX_CS];
50 extern u32 cpu_fab_clk_to_hclk[FAB_OPT][CLK_CPU];
52 extern u32 ddr3_get_vco_freq(void);
54 u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1);
57 static inline void dfs_reg_write(u32 addr, u32 val)
59 printf("\n write reg 0x%08x = 0x%08x", addr, val);
60 writel(val, INTER_REGS_BASE + addr);
63 static inline void dfs_reg_write(u32 addr, u32 val)
65 writel(val, INTER_REGS_BASE + addr);
69 static void wait_refresh_op_complete(void)
73 /* Poll - Wait for Refresh operation completion */
75 reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
76 REG_SDRAM_OPERATION_CMD_RFRS_DONE;
77 } while (reg); /* Wait for '0' */
81 * Name: ddr3_get_freq_parameter
82 * Desc: Finds CPU/DDR frequency ratio according to Sample@reset and table.
83 * Args: target_freq - target frequency
85 * Returns: freq_par - the ratio parameter
87 u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1)
89 u32 ui_vco_freq, freq_par;
91 ui_vco_freq = ddr3_get_vco_freq();
93 #if defined(MV88F672X)
94 freq_par = div_ratio[ui_vco_freq][target_freq];
96 /* Find the ratio between PLL frequency and ddr-clk */
98 freq_par = div_ratio2to1[ui_vco_freq][target_freq];
100 freq_par = div_ratio1to1[ui_vco_freq][target_freq];
107 * Name: ddr3_dfs_high_2_low
109 * Args: freq - target frequency
111 * Returns: MV_OK - success, MV_FAIL - fail
113 int ddr3_dfs_high_2_low(u32 freq, MV_DRAM_INFO *dram_info)
115 #if defined(MV88F78X60) || defined(MV88F672X)
116 /* This Flow is relevant for ArmadaXP A0 */
117 u32 reg, freq_par, tmp;
120 DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
123 /* target frequency - 100MHz */
124 freq_par = ddr3_get_freq_parameter(freq, 0);
126 #if defined(MV88F672X)
128 u32 cpu_freq = ddr3_get_cpu_freq();
129 get_target_freq(cpu_freq, &tmp, &hclk);
132 /* Configure - DRAM DLL final state after DFS is complete - Enable */
133 reg = reg_read(REG_DFS_ADDR);
134 /* [0] - DfsDllNextState - Disable */
135 reg |= (1 << REG_DFS_DLLNEXTSTATE_OFFS);
136 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
139 * Configure - XBAR Retry response during Block to enable internal
142 reg = reg_read(REG_METAL_MASK_ADDR);
143 /* [0] - RetryMask - Disable */
144 reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
145 /* 0x14B0 - Dunit MMask Register */
146 dfs_reg_write(REG_METAL_MASK_ADDR, reg);
148 /* Configure - Block new external transactions - Enable */
149 reg = reg_read(REG_DFS_ADDR);
150 reg |= (1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Enable */
151 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
153 /* Registered DIMM support */
154 if (dram_info->reg_dimm) {
156 * Configure - Disable Register DIMM CKE Power
159 reg = (0x9 & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
160 REG_SDRAM_OPERATION_CWA_RC_OFFS;
162 * Configure - Disable Register DIMM CKE Power
163 * Down mode - CWA_DATA
165 reg |= ((0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
166 REG_SDRAM_OPERATION_CWA_DATA_OFFS);
169 * Configure - Disable Register DIMM CKE Power
170 * Down mode - Set Delay - tMRD
172 reg |= (0 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
174 /* Configure - Issue CWA command with the above parameters */
175 reg |= (REG_SDRAM_OPERATION_CMD_CWA &
176 ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
178 /* 0x1418 - SDRAM Operation Register */
179 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
181 /* Poll - Wait for CWA operation completion */
183 reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
184 (REG_SDRAM_OPERATION_CMD_MASK);
187 /* Configure - Disable outputs floating during Self Refresh */
188 reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
189 /* [15] - SRFloatEn - Disable */
190 reg &= ~(1 << REG_REGISTERED_DRAM_CTRL_SR_FLOAT_OFFS);
191 /* 0x16D0 - DDR3 Registered DRAM Control */
192 dfs_reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
195 /* Optional - Configure - DDR3_Rtt_nom_CS# */
196 for (cs = 0; cs < MAX_CS; cs++) {
197 if (dram_info->cs_ena & (1 << cs)) {
198 reg = reg_read(REG_DDR3_MR1_CS_ADDR +
199 (cs << MR_CS_ADDR_OFFS));
200 reg &= REG_DDR3_MR1_RTT_MASK;
201 dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
202 (cs << MR_CS_ADDR_OFFS), reg);
206 /* Configure - Move DRAM into Self Refresh */
207 reg = reg_read(REG_DFS_ADDR);
208 reg |= (1 << REG_DFS_SR_OFFS); /* [2] - DfsSR - Enable */
209 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
211 /* Poll - Wait for Self Refresh indication */
213 reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
214 } while (reg == 0x0); /* 0x1528 [3] - DfsAtSR - Wait for '1' */
216 /* Start of clock change procedure (PLL) */
217 #if defined(MV88F672X)
219 /* Configure cpupll_clkdiv_reset_mask */
220 reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
221 reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
222 /* 0xE8264[7:0] 0xff CPU Clock Dividers Reset mask */
223 dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
225 /* Configure cpu_clkdiv_reload_smooth */
226 reg = reg_read(CPU_PLL_CNTRL0);
227 reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
228 /* 0xE8260 [15:8] 0x2 CPU Clock Dividers Reload Smooth enable */
229 dfs_reg_write(CPU_PLL_CNTRL0,
230 (reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS)));
232 /* Configure cpupll_clkdiv_relax_en */
233 reg = reg_read(CPU_PLL_CNTRL0);
234 reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
235 /* 0xE8260 [31:24] 0x2 Relax Enable */
236 dfs_reg_write(CPU_PLL_CNTRL0,
237 (reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS)));
239 /* Configure cpupll_clkdiv_ddr_clk_ratio */
240 reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
242 * 0xE8268 [13:8] N Set Training clock:
243 * APLL Out Clock (VCO freq) / N = 100 MHz
245 reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
246 reg |= (freq_par << 8); /* full Integer ratio from PLL-out to ddr-clk */
247 dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
249 /* Configure cpupll_clkdiv_reload_ratio */
250 reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
251 reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
252 /* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
253 dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
254 (reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS)));
258 /* Configure cpupll_clkdiv_reload_ratio */
259 reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
260 reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
261 /* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
262 dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
268 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
269 * and in the same chance, block reassertions of reset [15:8] and
270 * force reserved bits[7:0].
273 /* 0x18700 - CPU Div CLK control 0 */
274 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
277 * RelaX whenever reset is asserted to that channel
278 * (good for any case)
281 /* 0x18704 - CPU Div CLK control 0 */
282 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
284 reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
285 REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
287 /* full Integer ratio from PLL-out to ddr-clk */
288 reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
289 /* 0x1870C - CPU Div CLK control 3 register */
290 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
293 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
294 * All the rest are kept as is (forced, but could be read-modify-write).
295 * This is done now by RMW above.
298 /* Clock is not shut off gracefully - keep it running */
300 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
302 /* Wait before replacing the clock on the DDR Phy Channel. */
306 * This for triggering the frequency update. Bit[24] is the
308 * bits [23:16] == which channels to change ==2 ==>
309 * only DDR Phy (smooth transition)
310 * bits [15:8] == mask reset reassertion due to clock modification
312 * bits [7:0] == not in use
315 /* 0x18700 - CPU Div CLK control 0 register */
316 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
318 udelay(1); /* Wait 1usec */
321 * Poll Div CLK status 0 register - indication that the clocks
322 * are active - 0x18718 [8]
325 reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
326 (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
330 * Clean the CTRL0, to be ready for next resets and next requests
331 * of ratio modifications.
334 /* 0x18700 - CPU Div CLK control 0 register */
335 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
339 /* End of clock change procedure (PLL) */
341 /* Configure - Select normal clock for the DDR PHY - Enable */
342 reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
343 /* [16] - ddr_phy_trn_clk_sel - Enable */
344 reg |= (1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
345 /* 0x18488 - DRAM Init control status register */
346 dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
348 /* Configure - Set Correct Ratio - 1:1 */
349 /* [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between Dunit and Phy */
351 reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
352 dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Register */
354 /* Configure - 2T Mode - Restore original configuration */
355 reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
356 /* [3:4] 2T - 1T Mode - low freq */
357 reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
358 /* 0x1404 - DDR Controller Control Low Register */
359 dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
361 /* Configure - Restore CL and CWL - MRS Commands */
362 reg = reg_read(REG_DFS_ADDR);
363 reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
364 reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
365 /* [8] - DfsCLNextState - MRS CL=6 after DFS (due to DLL-off mode) */
366 reg |= (0x4 << REG_DFS_CL_NEXT_STATE_OFFS);
367 /* [12] - DfsCWLNextState - MRS CWL=6 after DFS (due to DLL-off mode) */
368 reg |= (0x1 << REG_DFS_CWL_NEXT_STATE_OFFS);
369 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
371 /* Poll - Wait for APLL + ADLLs lock on new frequency */
373 reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
374 REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
375 /* 0x1674 [10:0] - Phy lock status Register */
376 } while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
378 /* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
379 reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
380 /* [30:29] = 0 - Data Pup R/W path reset */
381 /* 0x1400 - SDRAM Configuration register */
382 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
385 * Configure - DRAM Data PHY Read [30], Write [29] path
386 * reset - Release Reset
388 reg = (reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK);
389 /* [30:29] = '11' - Data Pup R/W path reset */
390 /* 0x1400 - SDRAM Configuration register */
391 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
393 /* Registered DIMM support */
394 if (dram_info->reg_dimm) {
396 * Configure - Change register DRAM operating speed
397 * (below 400MHz) - CWA_RC
399 reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
400 REG_SDRAM_OPERATION_CWA_RC_OFFS;
403 * Configure - Change register DRAM operating speed
404 * (below 400MHz) - CWA_DATA
406 reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
407 REG_SDRAM_OPERATION_CWA_DATA_OFFS);
409 /* Configure - Set Delay - tSTAB */
410 reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
412 /* Configure - Issue CWA command with the above parameters */
413 reg |= (REG_SDRAM_OPERATION_CMD_CWA &
414 ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
416 /* 0x1418 - SDRAM Operation Register */
417 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
419 /* Poll - Wait for CWA operation completion */
421 reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
422 (REG_SDRAM_OPERATION_CMD_MASK);
426 /* Configure - Exit Self Refresh */
428 reg = (reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS));
429 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
432 * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices
433 * on all ranks are NOT in self refresh mode
436 reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
437 } while (reg); /* Wait for '0' */
439 /* Configure - Issue Refresh command */
440 /* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
441 reg = REG_SDRAM_OPERATION_CMD_RFRS;
442 for (cs = 0; cs < MAX_CS; cs++) {
443 if (dram_info->cs_ena & (1 << cs))
444 reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
447 /* 0x1418 - SDRAM Operation Register */
448 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
450 /* Poll - Wait for Refresh operation completion */
451 wait_refresh_op_complete();
453 /* Configure - Block new external transactions - Disable */
454 reg = reg_read(REG_DFS_ADDR);
455 reg &= ~(1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Disable */
456 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
459 * Configure - XBAR Retry response during Block to enable
460 * internal access - Disable
462 reg = reg_read(REG_METAL_MASK_ADDR);
463 /* [0] - RetryMask - Enable */
464 reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
465 /* 0x14B0 - Dunit MMask Register */
466 dfs_reg_write(REG_METAL_MASK_ADDR, reg);
468 for (cs = 0; cs < MAX_CS; cs++) {
469 if (dram_info->cs_ena & (1 << cs)) {
470 /* Configure - Set CL */
471 reg = reg_read(REG_DDR3_MR0_CS_ADDR +
472 (cs << MR_CS_ADDR_OFFS)) &
473 ~REG_DDR3_MR0_CL_MASK;
474 tmp = 0x4; /* CL=6 - 0x4 */
475 reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
476 reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
477 dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
478 (cs << MR_CS_ADDR_OFFS), reg);
480 /* Configure - Set CWL */
481 reg = reg_read(REG_DDR3_MR2_CS_ADDR +
482 (cs << MR_CS_ADDR_OFFS))
483 & ~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
485 reg |= ((0x1) << REG_DDR3_MR2_CWL_OFFS);
486 dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
487 (cs << MR_CS_ADDR_OFFS), reg);
491 DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
496 /* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
501 DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
504 /* target frequency - 100MHz */
505 freq_par = ddr3_get_freq_parameter(freq, 0);
508 /* 0x18700 - CPU Div CLK control 0 */
509 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
511 /* 0x1600 - ODPG_CNTRL_Control */
512 reg = reg_read(REG_ODPG_CNTRL_ADDR);
513 /* [21] = 1 - auto refresh disable */
514 reg |= (1 << REG_ODPG_CNTRL_OFFS);
515 dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
517 /* 0x1670 - PHY lock mask register */
518 reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
519 reg &= REG_PHY_LOCK_MASK_MASK; /* [11:0] = 0 */
520 dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
522 reg = reg_read(REG_DFS_ADDR); /* 0x1528 - DFS register */
524 /* Disable reconfig */
525 reg &= ~0x10; /* [4] - Enable reconfig MR registers after DFS_ERG */
526 reg |= 0x1; /* [0] - DRAM DLL disabled after DFS */
528 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
530 reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0); /* [0] - disable */
531 /* 0x14B0 - Dunit MMask Register */
532 dfs_reg_write(REG_METAL_MASK_ADDR, reg);
534 /* [1] - DFS Block enable */
535 reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
536 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
538 /* [2] - DFS Self refresh enable */
539 reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
540 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
543 * Poll DFS Register - 0x1528 [3] - DfsAtSR -
544 * All DRAM devices on all ranks are in self refresh mode -
545 * DFS can be executed afterwards
548 reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
549 } while (reg == 0x0); /* Wait for '1' */
551 /* Disable ODT on DLL-off mode */
552 dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR,
553 REG_SDRAM_ODT_CTRL_HIGH_OVRD_MASK);
556 reg = (reg_read(REG_PHY_LOCK_MASK_ADDR) & REG_PHY_LOCK_MASK_MASK);
557 /* 0x1670 - PHY lock mask register */
558 dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
560 /* Add delay between entering SR and start ratio modification */
564 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
565 * and in the same chance, block reassertions of reset [15:8] and
566 * force reserved bits[7:0].
569 /* 0x18700 - CPU Div CLK control 0 */
570 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
573 * RelaX whenever reset is asserted to that channel (good for any case)
576 /* 0x18700 - CPU Div CLK control 0 */
577 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
579 reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
580 REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
581 /* Full Integer ratio from PLL-out to ddr-clk */
582 reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
583 /* 0x1870C - CPU Div CLK control 3 register */
584 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
587 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
588 * All the rest are kept as is (forced, but could be read-modify-write).
589 * This is done now by RMW above.
592 /* Clock is not shut off gracefully - keep it running */
594 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
596 /* Wait before replacing the clock on the DDR Phy Channel. */
600 * This for triggering the frequency update. Bit[24] is the
602 * bits [23:16] == which channels to change ==2 ==> only DDR Phy
603 * (smooth transition)
604 * bits [15:8] == mask reset reassertion due to clock modification
606 * bits [7:0] == not in use
609 /* 0x18700 - CPU Div CLK control 0 register */
610 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
612 udelay(1); /* Wait 1usec */
615 * Poll Div CLK status 0 register - indication that the clocks
616 * are active - 0x18718 [8]
619 reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
620 (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
624 * Clean the CTRL0, to be ready for next resets and next requests of
625 * ratio modifications.
628 /* 0x18700 - CPU Div CLK control 0 register */
629 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
633 /* Switch HCLK Mux to training clk (100Mhz), keep DFS request bit */
635 /* 0x18488 - DRAM Init control status register */
636 dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
638 reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
639 /* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
640 dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Regist */
642 reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
643 /* [31:30]] - reset pup data ctrl ADLL */
644 /* 0x15EC - DRAM PHY Config register */
645 dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
647 reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK);
648 /* [31:30] - normal pup data ctrl ADLL */
649 /* 0x15EC - DRAM PHY Config register */
650 dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
652 udelay(1); /* Wait 1usec */
655 reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7);
656 dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
658 /* Poll Phy lock status register - APLL lock indication - 0x1674 */
660 reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
661 REG_PHY_LOCK_STATUS_LOCK_MASK;
662 } while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK); /* Wait for '0xFFF' */
664 reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
665 /* [30:29] = 0 - Data Pup R/W path reset */
666 /* 0x1400 - SDRAM Configuration register */
667 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
669 reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
670 /* [30:29] = '11' - Data Pup R/W path reset */
671 /* 0x1400 - SDRAM Configuration register */
672 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
674 udelay(1000); /* Wait 1msec */
676 for (cs = 0; cs < MAX_CS; cs++) {
677 if (dram_info->cs_ena & (1 << cs)) {
678 /* Poll - Wait for Refresh operation completion */
679 wait_refresh_op_complete();
681 /* Config CL and CWL with MR0 and MR2 registers */
682 reg = reg_read(REG_DDR3_MR0_ADDR);
683 reg &= ~0x74; /* CL [3:0]; [6:4],[2] */
684 reg |= (1 << 5); /* CL = 4, CAS is 6 */
685 dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
686 reg = REG_SDRAM_OPERATION_CMD_MR0 &
687 ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
688 /* 0x1418 - SDRAM Operation Register */
689 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
691 /* Poll - Wait for Refresh operation completion */
692 wait_refresh_op_complete();
694 reg = reg_read(REG_DDR3_MR2_ADDR);
695 reg &= ~0x38; /* CWL [5:3] */
696 reg |= (1 << 3); /* CWL = 1, CWL is 6 */
697 dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
699 reg = REG_SDRAM_OPERATION_CMD_MR2 &
700 ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
701 /* 0x1418 - SDRAM Operation Register */
702 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
704 /* Poll - Wait for Refresh operation completion */
705 wait_refresh_op_complete();
707 /* Set current rd_sample_delay */
708 reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
709 reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
710 (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
711 reg |= (5 << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
712 dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
714 /* Set current rd_ready_delay */
715 reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
716 reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
717 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
718 reg |= ((6) << (REG_READ_DATA_READY_DELAYS_OFFS * cs));
719 dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
723 /* [2] - DFS Self refresh disable */
724 reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
725 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
727 /* [1] - DFS Block enable */
728 reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
729 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
732 * Poll DFS Register - 0x1528 [3] - DfsAtSR -
733 * All DRAM devices on all ranks are in self refresh mode - DFS can
734 * be executed afterwards
737 reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
738 } while (reg); /* Wait for '1' */
740 reg = (reg_read(REG_METAL_MASK_ADDR) | (1 << 0));
741 /* [0] - Enable Dunit to crossbar retry */
742 /* 0x14B0 - Dunit MMask Register */
743 dfs_reg_write(REG_METAL_MASK_ADDR, reg);
745 /* 0x1600 - PHY lock mask register */
746 reg = reg_read(REG_ODPG_CNTRL_ADDR);
747 reg &= ~(1 << REG_ODPG_CNTRL_OFFS); /* [21] = 0 */
748 dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
750 /* 0x1670 - PHY lock mask register */
751 reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
752 reg |= ~REG_PHY_LOCK_MASK_MASK; /* [11:0] = FFF */
753 dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
755 DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
763 * Name: ddr3_dfs_low_2_high
765 * Args: freq - target frequency
767 * Returns: MV_OK - success, MV_FAIL - fail
769 int ddr3_dfs_low_2_high(u32 freq, int ratio_2to1, MV_DRAM_INFO *dram_info)
771 #if defined(MV88F78X60) || defined(MV88F672X)
772 /* This Flow is relevant for ArmadaXP A0 */
773 u32 reg, freq_par, tmp;
776 DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
779 /* target frequency - freq */
780 freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
782 #if defined(MV88F672X)
784 u32 cpu_freq = ddr3_get_cpu_freq();
785 get_target_freq(cpu_freq, &tmp, &hclk);
788 /* Configure - DRAM DLL final state after DFS is complete - Enable */
789 reg = reg_read(REG_DFS_ADDR);
790 /* [0] - DfsDllNextState - Enable */
791 reg &= ~(1 << REG_DFS_DLLNEXTSTATE_OFFS);
792 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
795 * Configure - XBAR Retry response during Block to enable
796 * internal access - Disable
798 reg = reg_read(REG_METAL_MASK_ADDR);
799 /* [0] - RetryMask - Disable */
800 reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
801 /* 0x14B0 - Dunit MMask Register */
802 dfs_reg_write(REG_METAL_MASK_ADDR, reg);
804 /* Configure - Block new external transactions - Enable */
805 reg = reg_read(REG_DFS_ADDR);
806 reg |= (1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Enable */
807 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
809 /* Configure - Move DRAM into Self Refresh */
810 reg = reg_read(REG_DFS_ADDR);
811 reg |= (1 << REG_DFS_SR_OFFS); /* [2] - DfsSR - Enable */
812 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
814 /* Poll - Wait for Self Refresh indication */
816 reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
817 } while (reg == 0x0); /* 0x1528 [3] - DfsAtSR - Wait for '1' */
819 /* Start of clock change procedure (PLL) */
820 #if defined(MV88F672X)
822 /* Configure cpupll_clkdiv_reset_mask */
823 reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
824 reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
825 /* 0xE8264[7:0] 0xff CPU Clock Dividers Reset mask */
826 dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
828 /* Configure cpu_clkdiv_reload_smooth */
829 reg = reg_read(CPU_PLL_CNTRL0);
830 reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
831 /* 0xE8260 [15:8] 0x2 CPU Clock Dividers Reload Smooth enable */
832 dfs_reg_write(CPU_PLL_CNTRL0,
833 reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS));
835 /* Configure cpupll_clkdiv_relax_en */
836 reg = reg_read(CPU_PLL_CNTRL0);
837 reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
838 /* 0xE8260 [31:24] 0x2 Relax Enable */
839 dfs_reg_write(CPU_PLL_CNTRL0,
840 reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS));
842 /* Configure cpupll_clkdiv_ddr_clk_ratio */
843 reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
845 * 0xE8268 [13:8] N Set Training clock:
846 * APLL Out Clock (VCO freq) / N = 100 MHz
848 reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
849 reg |= (freq_par << 8); /* full Integer ratio from PLL-out to ddr-clk */
850 dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
851 /* Configure cpupll_clkdiv_reload_ratio */
852 reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
853 reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
854 /* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
855 dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
856 reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS));
860 /* Configure cpupll_clkdiv_reload_ratio */
861 reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
862 reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
863 /* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
864 dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
870 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
871 * and in the same chance, block reassertions of reset [15:8]
872 * and force reserved bits[7:0].
876 /* 0x18700 - CPU Div CLK control 0 */
877 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
880 * RelaX whenever reset is asserted to that channel (good for any case)
883 /* 0x18704 - CPU Div CLK control 0 */
884 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
886 reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
887 REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
888 reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
889 /* full Integer ratio from PLL-out to ddr-clk */
890 /* 0x1870C - CPU Div CLK control 3 register */
891 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
894 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
895 * All the rest are kept as is (forced, but could be read-modify-write).
896 * This is done now by RMW above.
899 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
901 /* Wait before replacing the clock on the DDR Phy Channel. */
906 * This for triggering the frequency update. Bit[24] is the
908 * bits [23:16] == which channels to change ==2 ==> only DDR Phy
909 * (smooth transition)
910 * bits [15:8] == mask reset reassertion due to clock modification
912 * bits [7:0] == not in use
914 /* 0x18700 - CPU Div CLK control 0 register */
915 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
920 * Poll Div CLK status 0 register - indication that the clocks
921 * are active - 0x18718 [8]
924 reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
925 (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
930 * Clean the CTRL0, to be ready for next resets and next requests
931 * of ratio modifications.
933 /* 0x18700 - CPU Div CLK control 0 register */
934 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
936 /* End of clock change procedure (PLL) */
939 /* Configure - Select normal clock for the DDR PHY - Disable */
940 reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
941 /* [16] - ddr_phy_trn_clk_sel - Disable */
942 reg &= ~(1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
943 /* 0x18488 - DRAM Init control status register */
944 dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
948 * Configure - Set Correct Ratio - according to target ratio
949 * parameter - 2:1/1:1
953 * [15] - Phy2UnitClkRatio = 1 - Set 2:1 Ratio between
956 reg = reg_read(REG_DDR_IO_ADDR) |
957 (1 << REG_DDR_IO_CLK_RATIO_OFFS);
960 * [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between
963 reg = reg_read(REG_DDR_IO_ADDR) &
964 ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
966 dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Register */
968 /* Configure - 2T Mode - Restore original configuration */
969 reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
970 /* [3:4] 2T - Restore value */
971 reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
972 reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
973 REG_DUNIT_CTRL_LOW_2T_OFFS);
974 /* 0x1404 - DDR Controller Control Low Register */
975 dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
977 /* Configure - Restore CL and CWL - MRS Commands */
978 reg = reg_read(REG_DFS_ADDR);
979 reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
980 reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
982 if (freq == DDR_400) {
983 if (dram_info->target_frequency == 0x8)
984 tmp = ddr3_cl_to_valid_cl(5);
986 tmp = ddr3_cl_to_valid_cl(6);
988 tmp = ddr3_cl_to_valid_cl(dram_info->cl);
991 /* [8] - DfsCLNextState */
992 reg |= ((tmp & REG_DFS_CL_NEXT_STATE_MASK) << REG_DFS_CL_NEXT_STATE_OFFS);
993 if (freq == DDR_400) {
994 /* [12] - DfsCWLNextState */
995 reg |= (((0) & REG_DFS_CWL_NEXT_STATE_MASK) <<
996 REG_DFS_CWL_NEXT_STATE_OFFS);
998 /* [12] - DfsCWLNextState */
999 reg |= (((dram_info->cwl) & REG_DFS_CWL_NEXT_STATE_MASK) <<
1000 REG_DFS_CWL_NEXT_STATE_OFFS);
1002 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
1004 /* Optional - Configure - DDR3_Rtt_nom_CS# */
1005 for (cs = 0; cs < MAX_CS; cs++) {
1006 if (dram_info->cs_ena & (1 << cs)) {
1007 reg = reg_read(REG_DDR3_MR1_CS_ADDR +
1008 (cs << MR_CS_ADDR_OFFS));
1009 reg &= REG_DDR3_MR1_RTT_MASK;
1010 reg |= odt_static[dram_info->cs_ena][cs];
1011 dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
1012 (cs << MR_CS_ADDR_OFFS), reg);
1016 /* Configure - Reset ADLLs - Set Reset */
1017 reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
1018 /* [31:30]] - reset pup data ctrl ADLL */
1019 /* 0x15EC - DRAM PHY Config Register */
1020 dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1022 /* Configure - Reset ADLLs - Release Reset */
1023 reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
1024 /* [31:30] - normal pup data ctrl ADLL */
1025 /* 0x15EC - DRAM PHY Config register */
1026 dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1028 /* Poll - Wait for APLL + ADLLs lock on new frequency */
1030 reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
1031 REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
1032 /* 0x1674 [10:0] - Phy lock status Register */
1033 } while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
1035 /* Configure - Reset the PHY SDR clock divider */
1037 /* Pup Reset Divider B - Set Reset */
1038 /* [28] - DataPupRdRST = 0 */
1039 reg = reg_read(REG_SDRAM_CONFIG_ADDR) &
1040 ~(1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
1041 /* [28] - DataPupRdRST = 1 */
1042 tmp = reg_read(REG_SDRAM_CONFIG_ADDR) |
1043 (1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
1044 /* 0x1400 - SDRAM Configuration register */
1045 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1047 /* Pup Reset Divider B - Release Reset */
1048 /* 0x1400 - SDRAM Configuration register */
1049 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
1052 /* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
1053 reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
1054 /* [30:29] = 0 - Data Pup R/W path reset */
1055 /* 0x1400 - SDRAM Configuration register */
1056 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1059 * Configure - DRAM Data PHY Read [30], Write [29] path reset -
1062 reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
1063 /* [30:29] = '11' - Data Pup R/W path reset */
1064 /* 0x1400 - SDRAM Configuration register */
1065 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1067 /* Registered DIMM support */
1068 if (dram_info->reg_dimm) {
1070 * Configure - Change register DRAM operating speed
1071 * (DDR3-1333 / DDR3-1600) - CWA_RC
1073 reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
1074 REG_SDRAM_OPERATION_CWA_RC_OFFS;
1075 if (freq <= DDR_400) {
1077 * Configure - Change register DRAM operating speed
1078 * (DDR3-800) - CWA_DATA
1080 reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1081 REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1082 } else if ((freq > DDR_400) && (freq <= DDR_533)) {
1084 * Configure - Change register DRAM operating speed
1085 * (DDR3-1066) - CWA_DATA
1087 reg |= ((0x1 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1088 REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1089 } else if ((freq > DDR_533) && (freq <= DDR_666)) {
1091 * Configure - Change register DRAM operating speed
1092 * (DDR3-1333) - CWA_DATA
1094 reg |= ((0x2 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1095 REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1098 * Configure - Change register DRAM operating speed
1099 * (DDR3-1600) - CWA_DATA
1101 reg |= ((0x3 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1102 REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1105 /* Configure - Set Delay - tSTAB */
1106 reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
1107 /* Configure - Issue CWA command with the above parameters */
1108 reg |= (REG_SDRAM_OPERATION_CMD_CWA &
1109 ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
1111 /* 0x1418 - SDRAM Operation Register */
1112 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1114 /* Poll - Wait for CWA operation completion */
1116 reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
1117 REG_SDRAM_OPERATION_CMD_MASK;
1121 /* Configure - Exit Self Refresh */
1123 reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
1124 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
1127 * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM
1128 * devices on all ranks are NOT in self refresh mode
1131 reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
1132 } while (reg); /* Wait for '0' */
1134 /* Configure - Issue Refresh command */
1135 /* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
1136 reg = REG_SDRAM_OPERATION_CMD_RFRS;
1137 for (cs = 0; cs < MAX_CS; cs++) {
1138 if (dram_info->cs_ena & (1 << cs))
1139 reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1142 /* 0x1418 - SDRAM Operation Register */
1143 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1145 /* Poll - Wait for Refresh operation completion */
1146 wait_refresh_op_complete();
1148 /* Configure - Block new external transactions - Disable */
1149 reg = reg_read(REG_DFS_ADDR);
1150 reg &= ~(1 << REG_DFS_BLOCK_OFFS); /* [1] - DfsBlock - Disable */
1151 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
1154 * Configure - XBAR Retry response during Block to enable
1155 * internal access - Disable
1157 reg = reg_read(REG_METAL_MASK_ADDR);
1158 /* [0] - RetryMask - Enable */
1159 reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
1160 /* 0x14B0 - Dunit MMask Register */
1161 dfs_reg_write(REG_METAL_MASK_ADDR, reg);
1163 for (cs = 0; cs < MAX_CS; cs++) {
1164 if (dram_info->cs_ena & (1 << cs)) {
1165 /* Configure - Set CL */
1166 reg = reg_read(REG_DDR3_MR0_CS_ADDR +
1167 (cs << MR_CS_ADDR_OFFS)) &
1168 ~REG_DDR3_MR0_CL_MASK;
1169 if (freq == DDR_400)
1170 tmp = ddr3_cl_to_valid_cl(6);
1172 tmp = ddr3_cl_to_valid_cl(dram_info->cl);
1173 reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
1174 reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
1175 dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
1176 (cs << MR_CS_ADDR_OFFS), reg);
1178 /* Configure - Set CWL */
1179 reg = reg_read(REG_DDR3_MR2_CS_ADDR +
1180 (cs << MR_CS_ADDR_OFFS)) &
1181 ~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
1182 if (freq == DDR_400)
1183 reg |= ((0) << REG_DDR3_MR2_CWL_OFFS);
1185 reg |= ((dram_info->cwl) << REG_DDR3_MR2_CWL_OFFS);
1186 dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
1187 (cs << MR_CS_ADDR_OFFS), reg);
1191 DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",
1198 /* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
1200 u32 reg, freq_par, tmp;
1203 DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
1206 /* target frequency - freq */
1207 freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
1210 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
1212 /* 0x1600 - PHY lock mask register */
1213 reg = reg_read(REG_ODPG_CNTRL_ADDR);
1214 reg |= (1 << REG_ODPG_CNTRL_OFFS); /* [21] = 1 */
1215 dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
1217 /* 0x1670 - PHY lock mask register */
1218 reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
1219 reg &= REG_PHY_LOCK_MASK_MASK; /* [11:0] = 0 */
1220 dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
1222 /* Enable reconfig MR Registers after DFS */
1223 reg = reg_read(REG_DFS_ADDR); /* 0x1528 - DFS register */
1224 /* [4] - Disable - reconfig MR registers after DFS_ERG */
1226 /* [0] - Enable - DRAM DLL after DFS */
1227 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
1229 /* Disable DRAM Controller to crossbar retry */
1231 reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0);
1232 /* 0x14B0 - Dunit MMask Register */
1233 dfs_reg_write(REG_METAL_MASK_ADDR, reg);
1235 /* Enable DRAM Blocking */
1236 /* [1] - DFS Block enable */
1237 reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
1238 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
1240 /* Enable Self refresh */
1241 /* [2] - DFS Self refresh enable */
1242 reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
1243 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
1246 * Poll DFS Register - All DRAM devices on all ranks are in
1247 * self refresh mode - DFS can be executed afterwards
1249 /* 0x1528 [3] - DfsAtSR */
1251 reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
1252 } while (reg == 0x0); /* Wait for '1' */
1255 * Set Correct Ratio - if freq>MARGIN_FREQ use 2:1 ratio
1256 * else use 1:1 ratio
1259 /* [15] = 1 - Set 2:1 Ratio between Dunit and Phy */
1260 reg = reg_read(REG_DDR_IO_ADDR) |
1261 (1 << REG_DDR_IO_CLK_RATIO_OFFS);
1263 /* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
1264 reg = reg_read(REG_DDR_IO_ADDR) &
1265 ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
1267 dfs_reg_write(REG_DDR_IO_ADDR, reg); /* 0x1524 - DDR IO Register */
1269 /* Switch HCLK Mux from (100Mhz) [16]=0, keep DFS request bit */
1272 * [29] - training logic request DFS, [28:27] -
1273 * preload patterns frequency [18]
1276 /* 0x18488 - DRAM Init control status register */
1277 dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
1279 /* Add delay between entering SR and start ratio modification */
1283 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
1284 * and in the same chance, block reassertions of reset [15:8] and
1285 * force reserved bits[7:0].
1288 /* 0x18700 - CPU Div CLK control 0 */
1289 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
1292 * RelaX whenever reset is asserted to that channel (good for any case)
1295 /* 0x18704 - CPU Div CLK control 0 */
1296 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
1298 reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
1299 REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
1300 reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
1301 /* Full Integer ratio from PLL-out to ddr-clk */
1302 /* 0x1870C - CPU Div CLK control 3 register */
1303 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
1306 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
1307 * All the rest are kept as is (forced, but could be read-modify-write).
1308 * This is done now by RMW above.
1313 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
1315 /* Wait before replacing the clock on the DDR Phy Channel. */
1320 * This for triggering the frequency update. Bit[24] is the
1322 * bits [23:16] == which channels to change ==2 ==> only DDR Phy
1323 * (smooth transition)
1324 * bits [15:8] == mask reset reassertion due to clock modification
1325 * to these channels.
1326 * bits [7:0] == not in use
1328 /* 0x18700 - CPU Div CLK control 0 register */
1329 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
1334 * Poll Div CLK status 0 register - indication that the clocks are
1335 * active - 0x18718 [8]
1338 reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
1339 (1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
1344 * Clean the CTRL0, to be ready for next resets and next requests of
1345 * ratio modifications.
1347 /* 0x18700 - CPU Div CLK control 0 register */
1348 dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
1353 /* Pup Reset Divider B - Set Reset */
1354 /* [28] = 0 - Pup Reset Divider B */
1355 reg = reg_read(REG_SDRAM_CONFIG_ADDR) & ~(1 << 28);
1356 /* [28] = 1 - Pup Reset Divider B */
1357 tmp = reg_read(REG_SDRAM_CONFIG_ADDR) | (1 << 28);
1358 /* 0x1400 - SDRAM Configuration register */
1359 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1361 /* Pup Reset Divider B - Release Reset */
1362 /* 0x1400 - SDRAM Configuration register */
1363 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
1366 /* DRAM Data PHYs ADLL Reset - Set Reset */
1367 reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK);
1368 /* [31:30]] - reset pup data ctrl ADLL */
1369 /* 0x15EC - DRAM PHY Config Register */
1370 dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1374 /* APLL lock indication - Poll Phy lock status Register - 0x1674 [9] */
1376 reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
1377 (1 << REG_PHY_LOCK_STATUS_LOCK_OFFS);
1380 /* DRAM Data PHYs ADLL Reset - Release Reset */
1381 reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
1382 /* [31:30] - normal pup data ctrl ADLL */
1383 /* 0x15EC - DRAM PHY Config register */
1384 dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1386 udelay(10000); /* Wait 10msec */
1389 * APLL lock indication - Poll Phy lock status Register - 0x1674 [11:0]
1392 reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
1393 REG_PHY_LOCK_STATUS_LOCK_MASK;
1394 } while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK);
1396 /* DRAM Data PHY Read [30], Write [29] path reset - Set Reset */
1397 reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
1398 /* [30:29] = 0 - Data Pup R/W path reset */
1399 /* 0x1400 - SDRAM Configuration register */
1400 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1402 /* DRAM Data PHY Read [30], Write [29] path reset - Release Reset */
1403 reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
1404 /* [30:29] = '11' - Data Pup R/W path reset */
1405 /* 0x1400 - SDRAM Configuration register */
1406 dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1408 /* Disable DFS Reconfig */
1409 reg = reg_read(REG_DFS_ADDR) & ~(1 << 4);
1410 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
1412 /* [2] - DFS Self refresh disable */
1413 reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
1414 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
1417 * Poll DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices on
1418 * all ranks are NOT in self refresh mode
1421 reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
1422 } while (reg); /* Wait for '0' */
1425 reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7) | 0x2;
1427 /* Configure - 2T Mode - Restore original configuration */
1428 /* [3:4] 2T - Restore value */
1429 reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
1430 reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
1431 REG_DUNIT_CTRL_LOW_2T_OFFS);
1432 dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
1434 udelay(1); /* Wait 1us */
1436 for (cs = 0; cs < MAX_CS; cs++) {
1437 if (dram_info->cs_ena & (1 << cs)) {
1438 reg = (reg_read(REG_DDR3_MR1_ADDR));
1440 reg &= ~(1 << REG_DDR3_MR1_DLL_ENA_OFFS);
1441 dfs_reg_write(REG_DDR3_MR1_ADDR, reg);
1443 /* Issue MRS Command to current cs */
1444 reg = REG_SDRAM_OPERATION_CMD_MR1 &
1445 ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1447 * [3-0] = 0x4 - MR1 Command, [11-8] -
1450 /* 0x1418 - SDRAM Operation Register */
1451 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1453 /* Poll - Wait for Refresh operation completion */
1454 wait_refresh_op_complete();
1456 /* DLL Reset - MR0 */
1457 reg = reg_read(REG_DDR3_MR0_ADDR);
1458 dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
1460 /* Issue MRS Command to current cs */
1461 reg = REG_SDRAM_OPERATION_CMD_MR0 &
1462 ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1464 * [3-0] = 0x4 - MR1 Command, [11-8] -
1467 /* 0x1418 - SDRAM Operation Register */
1468 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1470 /* Poll - Wait for Refresh operation completion */
1471 wait_refresh_op_complete();
1473 reg = reg_read(REG_DDR3_MR0_ADDR);
1474 reg &= ~0x74; /* CL [3:0]; [6:4],[2] */
1476 if (freq == DDR_400)
1477 tmp = ddr3_cl_to_valid_cl(6) & 0xF;
1479 tmp = ddr3_cl_to_valid_cl(dram_info->cl) & 0xF;
1481 reg |= ((tmp & 0x1) << 2);
1482 reg |= ((tmp >> 1) << 4); /* to bit 4 */
1483 dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
1485 reg = REG_SDRAM_OPERATION_CMD_MR0 &
1486 ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1487 /* 0x1418 - SDRAM Operation Register */
1488 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1490 /* Poll - Wait for Refresh operation completion */
1491 wait_refresh_op_complete();
1493 reg = reg_read(REG_DDR3_MR2_ADDR);
1494 reg &= ~0x38; /* CWL [5:3] */
1495 /* CWL = 0 ,for 400 MHg is 5 */
1496 if (freq != DDR_400)
1497 reg |= dram_info->cwl << REG_DDR3_MR2_CWL_OFFS;
1498 dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
1499 reg = REG_SDRAM_OPERATION_CMD_MR2 &
1500 ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1501 /* 0x1418 - SDRAM Operation Register */
1502 dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1504 /* Poll - Wait for Refresh operation completion */
1505 wait_refresh_op_complete();
1507 /* Set current rd_sample_delay */
1508 reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
1509 reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
1510 (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1511 reg |= (dram_info->cl <<
1512 (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1513 dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
1515 /* Set current rd_ready_delay */
1516 reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
1517 reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
1518 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
1519 reg |= ((dram_info->cl + 1) <<
1520 (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1521 dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
1525 /* Enable ODT on DLL-on mode */
1526 dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, 0);
1528 /* [1] - DFS Block disable */
1529 reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
1530 dfs_reg_write(REG_DFS_ADDR, reg); /* 0x1528 - DFS register */
1532 /* Change DDR frequency to 100MHz procedure: */
1533 /* 0x1600 - PHY lock mask register */
1534 reg = reg_read(REG_ODPG_CNTRL_ADDR);
1535 reg &= ~(1 << REG_ODPG_CNTRL_OFFS); /* [21] = 0 */
1536 dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
1538 /* Change DDR frequency to 100MHz procedure: */
1539 /* 0x1670 - PHY lock mask register */
1540 reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
1541 reg |= ~REG_PHY_LOCK_MASK_MASK; /* [11:0] = FFF */
1542 dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
1544 reg = reg_read(REG_METAL_MASK_ADDR) | (1 << 0); /* [0] - disable */
1545 /* 0x14B0 - Dunit MMask Register */
1546 dfs_reg_write(REG_METAL_MASK_ADDR, reg);
1548 DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",