1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
3 * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
12 #include <asm/arch/ddr.h>
13 #include <linux/iopoll.h>
14 #include "stm32mp1_ddr.h"
15 #include "stm32mp1_ddr_regs.h"
17 #define RCC_DDRITFCR 0xD8
19 #define RCC_DDRITFCR_DDRCAPBRST (BIT(14))
20 #define RCC_DDRITFCR_DDRCAXIRST (BIT(15))
21 #define RCC_DDRITFCR_DDRCORERST (BIT(16))
22 #define RCC_DDRITFCR_DPHYAPBRST (BIT(17))
23 #define RCC_DDRITFCR_DPHYRST (BIT(18))
24 #define RCC_DDRITFCR_DPHYCTLRST (BIT(19))
28 u16 offset; /* offset for base address */
29 u8 par_offset; /* offset for parameter array */
32 #define INVALID_OFFSET 0xFF
34 #define DDRCTL_REG(x, y) \
36 offsetof(struct stm32mp1_ddrctl, x),\
37 offsetof(struct y, x)}
39 #define DDRPHY_REG(x, y) \
41 offsetof(struct stm32mp1_ddrphy, x),\
42 offsetof(struct y, x)}
44 #define DDR_REG_DYN(x) \
46 offsetof(struct stm32mp1_ddrctl, x),\
49 #define DDRPHY_REG_DYN(x) \
51 offsetof(struct stm32mp1_ddrphy, x),\
54 /***********************************************************
55 * PARAMETERS: value get from device tree :
56 * size / order need to be aligned with binding
57 * modification NOT ALLOWED !!!
58 ***********************************************************/
59 #define DDRCTL_REG_REG_SIZE 25 /* st,ctl-reg */
60 #define DDRCTL_REG_TIMING_SIZE 12 /* st,ctl-timing */
61 #define DDRCTL_REG_MAP_SIZE 9 /* st,ctl-map */
62 #define DDRCTL_REG_PERF_SIZE 17 /* st,ctl-perf */
64 #define DDRPHY_REG_REG_SIZE 11 /* st,phy-reg */
65 #define DDRPHY_REG_TIMING_SIZE 10 /* st,phy-timing */
66 #define DDRPHY_REG_CAL_SIZE 12 /* st,phy-cal */
68 #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
69 static const struct reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = {
71 DDRCTL_REG_REG(mrctrl0),
72 DDRCTL_REG_REG(mrctrl1),
73 DDRCTL_REG_REG(derateen),
74 DDRCTL_REG_REG(derateint),
75 DDRCTL_REG_REG(pwrctl),
76 DDRCTL_REG_REG(pwrtmg),
77 DDRCTL_REG_REG(hwlpctl),
78 DDRCTL_REG_REG(rfshctl0),
79 DDRCTL_REG_REG(rfshctl3),
80 DDRCTL_REG_REG(crcparctl0),
81 DDRCTL_REG_REG(zqctl0),
82 DDRCTL_REG_REG(dfitmg0),
83 DDRCTL_REG_REG(dfitmg1),
84 DDRCTL_REG_REG(dfilpcfg0),
85 DDRCTL_REG_REG(dfiupd0),
86 DDRCTL_REG_REG(dfiupd1),
87 DDRCTL_REG_REG(dfiupd2),
88 DDRCTL_REG_REG(dfiphymstr),
89 DDRCTL_REG_REG(odtmap),
92 DDRCTL_REG_REG(dbgcmd),
93 DDRCTL_REG_REG(poisoncfg),
94 DDRCTL_REG_REG(pccfg),
97 #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
98 static const struct reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = {
99 DDRCTL_REG_TIMING(rfshtmg),
100 DDRCTL_REG_TIMING(dramtmg0),
101 DDRCTL_REG_TIMING(dramtmg1),
102 DDRCTL_REG_TIMING(dramtmg2),
103 DDRCTL_REG_TIMING(dramtmg3),
104 DDRCTL_REG_TIMING(dramtmg4),
105 DDRCTL_REG_TIMING(dramtmg5),
106 DDRCTL_REG_TIMING(dramtmg6),
107 DDRCTL_REG_TIMING(dramtmg7),
108 DDRCTL_REG_TIMING(dramtmg8),
109 DDRCTL_REG_TIMING(dramtmg14),
110 DDRCTL_REG_TIMING(odtcfg),
113 #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
114 static const struct reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = {
115 DDRCTL_REG_MAP(addrmap1),
116 DDRCTL_REG_MAP(addrmap2),
117 DDRCTL_REG_MAP(addrmap3),
118 DDRCTL_REG_MAP(addrmap4),
119 DDRCTL_REG_MAP(addrmap5),
120 DDRCTL_REG_MAP(addrmap6),
121 DDRCTL_REG_MAP(addrmap9),
122 DDRCTL_REG_MAP(addrmap10),
123 DDRCTL_REG_MAP(addrmap11),
126 #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
127 static const struct reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = {
128 DDRCTL_REG_PERF(sched),
129 DDRCTL_REG_PERF(sched1),
130 DDRCTL_REG_PERF(perfhpr1),
131 DDRCTL_REG_PERF(perflpr1),
132 DDRCTL_REG_PERF(perfwr1),
133 DDRCTL_REG_PERF(pcfgr_0),
134 DDRCTL_REG_PERF(pcfgw_0),
135 DDRCTL_REG_PERF(pcfgqos0_0),
136 DDRCTL_REG_PERF(pcfgqos1_0),
137 DDRCTL_REG_PERF(pcfgwqos0_0),
138 DDRCTL_REG_PERF(pcfgwqos1_0),
139 DDRCTL_REG_PERF(pcfgr_1),
140 DDRCTL_REG_PERF(pcfgw_1),
141 DDRCTL_REG_PERF(pcfgqos0_1),
142 DDRCTL_REG_PERF(pcfgqos1_1),
143 DDRCTL_REG_PERF(pcfgwqos0_1),
144 DDRCTL_REG_PERF(pcfgwqos1_1),
147 #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
148 static const struct reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = {
149 DDRPHY_REG_REG(pgcr),
150 DDRPHY_REG_REG(aciocr),
151 DDRPHY_REG_REG(dxccr),
152 DDRPHY_REG_REG(dsgcr),
154 DDRPHY_REG_REG(odtcr),
155 DDRPHY_REG_REG(zq0cr1),
156 DDRPHY_REG_REG(dx0gcr),
157 DDRPHY_REG_REG(dx1gcr),
158 DDRPHY_REG_REG(dx2gcr),
159 DDRPHY_REG_REG(dx3gcr),
162 #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
163 static const struct reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = {
164 DDRPHY_REG_TIMING(ptr0),
165 DDRPHY_REG_TIMING(ptr1),
166 DDRPHY_REG_TIMING(ptr2),
167 DDRPHY_REG_TIMING(dtpr0),
168 DDRPHY_REG_TIMING(dtpr1),
169 DDRPHY_REG_TIMING(dtpr2),
170 DDRPHY_REG_TIMING(mr0),
171 DDRPHY_REG_TIMING(mr1),
172 DDRPHY_REG_TIMING(mr2),
173 DDRPHY_REG_TIMING(mr3),
176 #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
177 static const struct reg_desc ddrphy_cal[DDRPHY_REG_CAL_SIZE] = {
178 DDRPHY_REG_CAL(dx0dllcr),
179 DDRPHY_REG_CAL(dx0dqtr),
180 DDRPHY_REG_CAL(dx0dqstr),
181 DDRPHY_REG_CAL(dx1dllcr),
182 DDRPHY_REG_CAL(dx1dqtr),
183 DDRPHY_REG_CAL(dx1dqstr),
184 DDRPHY_REG_CAL(dx2dllcr),
185 DDRPHY_REG_CAL(dx2dqtr),
186 DDRPHY_REG_CAL(dx2dqstr),
187 DDRPHY_REG_CAL(dx3dllcr),
188 DDRPHY_REG_CAL(dx3dqtr),
189 DDRPHY_REG_CAL(dx3dqstr),
192 /**************************************************************
193 * DYNAMIC REGISTERS: only used for debug purpose (read/modify)
194 **************************************************************/
195 #ifdef CONFIG_STM32MP1_DDR_INTERACTIVE
196 static const struct reg_desc ddr_dyn[] = {
199 DDR_REG_DYN(dfimisc),
200 DDR_REG_DYN(dfistat),
203 DDR_REG_DYN(pctrl_0),
204 DDR_REG_DYN(pctrl_1),
207 #define DDR_REG_DYN_SIZE ARRAY_SIZE(ddr_dyn)
209 static const struct reg_desc ddrphy_dyn[] = {
211 DDRPHY_REG_DYN(pgsr),
212 DDRPHY_REG_DYN(zq0sr0),
213 DDRPHY_REG_DYN(zq0sr1),
214 DDRPHY_REG_DYN(dx0gsr0),
215 DDRPHY_REG_DYN(dx0gsr1),
216 DDRPHY_REG_DYN(dx1gsr0),
217 DDRPHY_REG_DYN(dx1gsr1),
218 DDRPHY_REG_DYN(dx2gsr0),
219 DDRPHY_REG_DYN(dx2gsr1),
220 DDRPHY_REG_DYN(dx3gsr0),
221 DDRPHY_REG_DYN(dx3gsr1),
224 #define DDRPHY_REG_DYN_SIZE ARRAY_SIZE(ddrphy_dyn)
228 /*****************************************************************
229 * REGISTERS ARRAY: used to parse device tree and interactive mode
230 *****************************************************************/
239 #ifdef CONFIG_STM32MP1_DDR_INTERACTIVE
240 /* dynamic registers => managed in driver or not changed,
241 * can be dumped in interactive mode
255 struct ddr_reg_info {
257 const struct reg_desc *desc;
262 #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
264 const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
266 "static", ddr_reg, DDRCTL_REG_REG_SIZE, DDR_BASE},
268 "timing", ddr_timing, DDRCTL_REG_TIMING_SIZE, DDR_BASE},
270 "perf", ddr_perf, DDRCTL_REG_PERF_SIZE, DDR_BASE},
272 "map", ddr_map, DDRCTL_REG_MAP_SIZE, DDR_BASE},
274 "static", ddrphy_reg, DDRPHY_REG_REG_SIZE, DDRPHY_BASE},
276 "timing", ddrphy_timing, DDRPHY_REG_TIMING_SIZE, DDRPHY_BASE},
278 "cal", ddrphy_cal, DDRPHY_REG_CAL_SIZE, DDRPHY_BASE},
279 #ifdef CONFIG_STM32MP1_DDR_INTERACTIVE
281 "dyn", ddr_dyn, DDR_REG_DYN_SIZE, DDR_BASE},
283 "dyn", ddrphy_dyn, DDRPHY_REG_DYN_SIZE, DDRPHY_BASE},
288 const char *base_name[] = {
290 [DDRPHY_BASE] = "phy",
293 static u32 get_base_addr(const struct ddr_info *priv, enum base_type base)
295 if (base == DDRPHY_BASE)
296 return (u32)priv->phy;
298 return (u32)priv->ctl;
301 static void set_reg(const struct ddr_info *priv,
306 unsigned int *ptr, value;
307 enum base_type base = ddr_registers[type].base;
308 u32 base_addr = get_base_addr(priv, base);
309 const struct reg_desc *desc = ddr_registers[type].desc;
311 debug("init %s\n", ddr_registers[type].name);
312 for (i = 0; i < ddr_registers[type].size; i++) {
313 ptr = (unsigned int *)(base_addr + desc[i].offset);
314 if (desc[i].par_offset == INVALID_OFFSET) {
315 pr_err("invalid parameter offset for %s", desc[i].name);
317 value = *((u32 *)((u32)param +
318 desc[i].par_offset));
320 debug("[0x%x] %s= 0x%08x\n",
321 (u32)ptr, desc[i].name, value);
326 #ifdef CONFIG_STM32MP1_DDR_INTERACTIVE
327 static void stm32mp1_dump_reg_desc(u32 base_addr, const struct reg_desc *desc)
331 ptr = (unsigned int *)(base_addr + desc->offset);
332 printf("%s= 0x%08x\n", desc->name, readl(ptr));
335 static void stm32mp1_dump_param_desc(u32 par_addr, const struct reg_desc *desc)
339 ptr = (unsigned int *)(par_addr + desc->par_offset);
340 printf("%s= 0x%08x\n", desc->name, readl(ptr));
343 static const struct reg_desc *found_reg(const char *name, enum reg_type *type)
346 const struct reg_desc *desc;
348 for (i = 0; i < ARRAY_SIZE(ddr_registers); i++) {
349 desc = ddr_registers[i].desc;
350 for (j = 0; j < ddr_registers[i].size; j++) {
351 if (strcmp(name, desc[j].name) == 0) {
361 int stm32mp1_dump_reg(const struct ddr_info *priv,
365 const struct reg_desc *desc;
367 enum base_type p_base;
370 enum base_type filter = NONE_BASE;
374 if (strcmp(name, base_name[DDR_BASE]) == 0)
376 else if (strcmp(name, base_name[DDRPHY_BASE]) == 0)
377 filter = DDRPHY_BASE;
380 for (i = 0; i < ARRAY_SIZE(ddr_registers); i++) {
381 p_base = ddr_registers[i].base;
382 p_name = ddr_registers[i].name;
383 if (!name || (filter == p_base || !strcmp(name, p_name))) {
385 desc = ddr_registers[i].desc;
386 base_addr = get_base_addr(priv, p_base);
387 printf("==%s.%s==\n", base_name[p_base], p_name);
388 for (j = 0; j < ddr_registers[i].size; j++)
389 stm32mp1_dump_reg_desc(base_addr, &desc[j]);
393 desc = found_reg(name, &type);
395 p_base = ddr_registers[type].base;
396 base_addr = get_base_addr(priv, p_base);
397 stm32mp1_dump_reg_desc(base_addr, desc);
404 void stm32mp1_edit_reg(const struct ddr_info *priv,
405 char *name, char *string)
407 unsigned long *ptr, value;
410 const struct reg_desc *desc;
413 desc = found_reg(name, &type);
416 printf("%s not found\n", name);
419 if (strict_strtoul(string, 16, &value) < 0) {
420 printf("invalid value %s\n", string);
423 base = ddr_registers[type].base;
424 base_addr = get_base_addr(priv, base);
425 ptr = (unsigned long *)(base_addr + desc->offset);
427 printf("%s= 0x%08x\n", desc->name, readl(ptr));
430 static u32 get_par_addr(const struct stm32mp1_ddr_config *config,
437 par_addr = (u32)&config->c_reg;
440 par_addr = (u32)&config->c_timing;
443 par_addr = (u32)&config->c_perf;
446 par_addr = (u32)&config->c_map;
449 par_addr = (u32)&config->p_reg;
452 par_addr = (u32)&config->p_timing;
455 par_addr = (u32)&config->p_cal;
460 par_addr = (u32)NULL;
467 int stm32mp1_dump_param(const struct stm32mp1_ddr_config *config,
471 const struct reg_desc *desc;
473 enum base_type p_base;
476 enum base_type filter = NONE_BASE;
477 int result = -EINVAL;
480 if (strcmp(name, base_name[DDR_BASE]) == 0)
482 else if (strcmp(name, base_name[DDRPHY_BASE]) == 0)
483 filter = DDRPHY_BASE;
486 for (i = 0; i < ARRAY_SIZE(ddr_registers); i++) {
487 par_addr = get_par_addr(config, i);
490 p_base = ddr_registers[i].base;
491 p_name = ddr_registers[i].name;
492 if (!name || (filter == p_base || !strcmp(name, p_name))) {
494 desc = ddr_registers[i].desc;
495 printf("==%s.%s==\n", base_name[p_base], p_name);
496 for (j = 0; j < ddr_registers[i].size; j++)
497 stm32mp1_dump_param_desc(par_addr, &desc[j]);
501 desc = found_reg(name, &type);
503 par_addr = get_par_addr(config, type);
505 stm32mp1_dump_param_desc(par_addr, desc);
513 void stm32mp1_edit_param(const struct stm32mp1_ddr_config *config,
514 char *name, char *string)
516 unsigned long *ptr, value;
518 const struct reg_desc *desc;
521 desc = found_reg(name, &type);
523 printf("%s not found\n", name);
526 if (strict_strtoul(string, 16, &value) < 0) {
527 printf("invalid value %s\n", string);
530 par_addr = get_par_addr(config, type);
532 printf("no parameter %s\n", name);
535 ptr = (unsigned long *)(par_addr + desc->par_offset);
537 printf("%s= 0x%08x\n", desc->name, readl(ptr));
541 __weak bool stm32mp1_ddr_interactive(void *priv,
542 enum stm32mp1_ddr_interact_step step,
543 const struct stm32mp1_ddr_config *config)
548 #define INTERACTIVE(step)\
549 stm32mp1_ddr_interactive(priv, step, config)
551 static void ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
556 ret = readl_poll_timeout(&phy->pgsr, pgsr,
557 pgsr & (DDRPHYC_PGSR_IDONE |
559 DDRPHYC_PGSR_DTIERR |
560 DDRPHYC_PGSR_DFTERR |
562 DDRPHYC_PGSR_RVEIRR),
564 debug("\n[0x%08x] pgsr = 0x%08x ret=%d\n",
565 (u32)&phy->pgsr, pgsr, ret);
568 void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, u32 pir)
570 pir |= DDRPHYC_PIR_INIT;
571 writel(pir, &phy->pir);
572 debug("[0x%08x] pir = 0x%08x -> 0x%08x\n",
573 (u32)&phy->pir, pir, readl(&phy->pir));
575 /* need to wait 10 configuration clock before start polling */
578 /* Wait DRAM initialization and Gate Training Evaluation complete */
579 ddrphy_idone_wait(phy);
582 /* start quasi dynamic register update */
583 static void start_sw_done(struct stm32mp1_ddrctl *ctl)
585 clrbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
588 /* wait quasi dynamic register update */
589 static void wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
594 setbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
596 ret = readl_poll_timeout(&ctl->swstat, swstat,
597 swstat & DDRCTRL_SWSTAT_SW_DONE_ACK,
600 panic("Timeout initialising DRAM : DDR->swstat = %x\n",
603 debug("[0x%08x] swstat = 0x%08x\n", (u32)&ctl->swstat, swstat);
606 /* wait quasi dynamic register update */
607 static void wait_operating_mode(struct ddr_info *priv, int mode)
609 u32 stat, val, mask, val2 = 0, mask2 = 0;
612 mask = DDRCTRL_STAT_OPERATING_MODE_MASK;
614 /* self-refresh due to software => check also STAT.selfref_type */
615 if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
616 mask |= DDRCTRL_STAT_SELFREF_TYPE_MASK;
617 val |= DDRCTRL_STAT_SELFREF_TYPE_SR;
618 } else if (mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) {
619 /* normal mode: handle also automatic self refresh */
620 mask2 = DDRCTRL_STAT_OPERATING_MODE_MASK |
621 DDRCTRL_STAT_SELFREF_TYPE_MASK;
622 val2 = DDRCTRL_STAT_OPERATING_MODE_SR |
623 DDRCTRL_STAT_SELFREF_TYPE_ASR;
626 ret = readl_poll_timeout(&priv->ctl->stat, stat,
627 ((stat & mask) == val) ||
628 (mask2 && ((stat & mask2) == val2)),
632 panic("Timeout DRAM : DDR->stat = %x\n", stat);
634 debug("[0x%08x] stat = 0x%08x\n", (u32)&priv->ctl->stat, stat);
637 void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
640 /* quasi-dynamic register update*/
641 setbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
642 clrbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
643 clrbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
644 wait_sw_done_ack(ctl);
647 void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
648 u32 rfshctl3, u32 pwrctl)
651 if (!(rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH))
652 clrbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
653 if (pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN)
654 setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
655 setbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
656 wait_sw_done_ack(ctl);
659 /* board-specific DDR power initializations. */
660 __weak int board_ddr_power_init(enum ddr_type ddr_type)
666 void stm32mp1_ddr_init(struct ddr_info *priv,
667 const struct stm32mp1_ddr_config *config)
672 if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
673 ret = board_ddr_power_init(STM32MP_DDR3);
674 else if (config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2)
675 ret = board_ddr_power_init(STM32MP_LPDDR2);
676 else if (config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3)
677 ret = board_ddr_power_init(STM32MP_LPDDR3);
680 panic("ddr power init failed\n");
683 debug("name = %s\n", config->info.name);
684 debug("speed = %d kHz\n", config->info.speed);
685 debug("size = 0x%x\n", config->info.size);
687 * 1. Program the DWC_ddr_umctl2 registers
688 * 1.1 RESETS: presetn, core_ddrc_rstn, aresetn
690 /* Assert All DDR part */
691 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
692 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
693 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
694 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
695 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
696 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
698 /* 1.2. start CLOCK */
699 if (stm32mp1_ddr_clk_enable(priv, config->info.speed))
700 panic("invalid DRAM clock : %d kHz\n",
703 /* 1.3. deassert reset */
704 /* de-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST */
705 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
706 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
707 /* De-assert presetn once the clocks are active
708 * and stable via DDRCAPBRST bit
710 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
712 /* 1.4. wait 128 cycles to permit initialization of end logic */
714 /* for PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
716 if (INTERACTIVE(STEP_DDR_RESET))
719 /* 1.5. initialize registers ddr_umctl2 */
720 /* Stop uMCTL2 before PHY is ready */
721 clrbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
722 debug("[0x%08x] dfimisc = 0x%08x\n",
723 (u32)&priv->ctl->dfimisc, readl(&priv->ctl->dfimisc));
725 set_reg(priv, REG_REG, &config->c_reg);
726 set_reg(priv, REG_TIMING, &config->c_timing);
727 set_reg(priv, REG_MAP, &config->c_map);
729 /* skip CTRL init, SDRAM init is done by PHY PUBL */
730 clrsetbits_le32(&priv->ctl->init0,
731 DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
732 DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
734 set_reg(priv, REG_PERF, &config->c_perf);
736 if (INTERACTIVE(STEP_CTL_INIT))
739 /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
740 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
741 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
742 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
744 /* 3. start PHY init by accessing relevant PUBL registers
745 * (DXGCR, DCR, PTR*, MR*, DTPR*)
747 set_reg(priv, REGPHY_REG, &config->p_reg);
748 set_reg(priv, REGPHY_TIMING, &config->p_timing);
749 set_reg(priv, REGPHY_CAL, &config->p_cal);
751 if (INTERACTIVE(STEP_PHY_INIT))
754 /* 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
755 * Perform DDR PHY DRAM initialization and Gate Training Evaluation
757 ddrphy_idone_wait(priv->phy);
759 /* 5. Indicate to PUBL that controller performs SDRAM initialization
760 * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
761 * DRAM init is done by PHY, init0.skip_dram.init = 1
763 pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
764 DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
766 if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
767 pir |= DDRPHYC_PIR_DRAMRST; /* only for DDR3 */
769 stm32mp1_ddrphy_init(priv->phy, pir);
771 /* 6. SET DFIMISC.dfi_init_complete_en to 1 */
772 /* Enable quasi-dynamic register programming*/
773 start_sw_done(priv->ctl);
774 setbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
775 wait_sw_done_ack(priv->ctl);
777 /* 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
778 * by monitoring STAT.operating_mode signal
780 /* wait uMCTL2 ready */
782 wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
784 debug("DDR DQS training : ");
785 /* 8. Disable Auto refresh and power down by setting
786 * - RFSHCTL3.dis_au_refresh = 1
787 * - PWRCTL.powerdown_en = 0
788 * - DFIMISC.dfiinit_complete_en = 0
790 stm32mp1_refresh_disable(priv->ctl);
792 /* 9. Program PUBL PGCR to enable refresh during training and rank to train
793 * not done => keep the programed value in PGCR
796 /* 10. configure PUBL PIR register to specify which training step to run */
797 /* warning : RVTRN is not supported by this PUBL */
798 stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
800 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
801 ddrphy_idone_wait(priv->phy);
803 /* 12. set back registers in step 8 to the orginal values if desidered */
804 stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
805 config->c_reg.pwrctl);
807 /* enable uMCTL2 AXI port 0 and 1 */
808 setbits_le32(&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
809 setbits_le32(&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);
811 if (INTERACTIVE(STEP_DDR_READY))