]> Git Repo - J-u-boot.git/blob - arch/arm/mach-stm32mp/stm32mp1/psci.c
7772546b2fefbee2e7e55bce77ca7e012c363a10
[J-u-boot.git] / arch / arm / mach-stm32mp / stm32mp1 / psci.c
1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
2 /*
3  * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
4  */
5
6 #include <config.h>
7 #include <asm/armv7.h>
8 #include <asm/cache.h>
9 #include <asm/gic.h>
10 #include <asm/io.h>
11 #include <asm/psci.h>
12 #include <asm/secure.h>
13 #include <hang.h>
14 #include <linux/bitops.h>
15 #include <linux/errno.h>
16
17 /* PWR */
18 #define PWR_CR3                                 0x0c
19 #define PWR_MPUCR                               0x10
20
21 #define PWR_CR3_DDRSREN                         BIT(10)
22 #define PWR_CR3_DDRRETEN                        BIT(12)
23
24 #define PWR_MPUCR_PDDS                          BIT(0)
25 #define PWR_MPUCR_CSTDBYDIS                     BIT(3)
26 #define PWR_MPUCR_CSSF                          BIT(9)
27
28 /* RCC */
29 #define RCC_MSSCKSELR                           0x48
30 #define RCC_DDRITFCR                            0xd8
31
32 #define RCC_DDRITFCR_DDRC1EN                    BIT(0)
33 #define RCC_DDRITFCR_DDRC1LPEN                  BIT(1)
34 #define RCC_DDRITFCR_DDRC2EN                    BIT(2)
35 #define RCC_DDRITFCR_DDRC2LPEN                  BIT(3)
36 #define RCC_DDRITFCR_DDRPHYCEN                  BIT(4)
37 #define RCC_DDRITFCR_DDRPHYCLPEN                BIT(5)
38 #define RCC_DDRITFCR_DDRCAPBEN                  BIT(6)
39 #define RCC_DDRITFCR_DDRCAPBLPEN                BIT(7)
40 #define RCC_DDRITFCR_AXIDCGEN                   BIT(8)
41 #define RCC_DDRITFCR_DDRPHYCAPBEN               BIT(9)
42 #define RCC_DDRITFCR_DDRPHYCAPBLPEN             BIT(10)
43 #define RCC_DDRITFCR_DDRCKMOD_MASK              GENMASK(22, 20)
44 #define RCC_DDRITFCR_GSKPCTRL                   BIT(24)
45
46 #define RCC_MP_SREQSETR                         0x104
47 #define RCC_MP_SREQCLRR                         0x108
48
49 #define RCC_MP_CIER                             0x414
50 #define RCC_MP_CIFR                             0x418
51 #define RCC_MP_CIFR_WKUPF                       BIT(20)
52
53 #define RCC_MCUDIVR                             0x830
54 #define RCC_PLL3CR                              0x880
55 #define RCC_PLL4CR                              0x894
56
57 /* SYSCFG */
58 #define SYSCFG_CMPCR                            0x20
59 #define SYSCFG_CMPCR_SW_CTRL                    BIT(2)
60 #define SYSCFG_CMPENSETR                        0x24
61 #define SYSCFG_CMPENCLRR                        0x28
62 #define SYSCFG_CMPENR_MPUEN                     BIT(0)
63
64 /* DDR Controller registers offsets */
65 #define DDRCTRL_STAT                            0x004
66 #define DDRCTRL_PWRCTL                          0x030
67 #define DDRCTRL_PWRTMG                          0x034
68 #define DDRCTRL_HWLPCTL                         0x038
69 #define DDRCTRL_DFIMISC                         0x1b0
70 #define DDRCTRL_SWCTL                           0x320
71 #define DDRCTRL_SWSTAT                          0x324
72 #define DDRCTRL_PSTAT                           0x3fc
73 #define DDRCTRL_PCTRL_0                         0x490
74 #define DDRCTRL_PCTRL_1                         0x540
75
76 /* DDR Controller Register fields */
77 #define DDRCTRL_STAT_OPERATING_MODE_MASK        GENMASK(2, 0)
78 #define DDRCTRL_STAT_OPERATING_MODE_NORMAL      0x1
79 #define DDRCTRL_STAT_OPERATING_MODE_SR          0x3
80 #define DDRCTRL_STAT_SELFREF_TYPE_MASK          GENMASK(5, 4)
81 #define DDRCTRL_STAT_SELFREF_TYPE_ASR           (0x3 << 4)
82 #define DDRCTRL_STAT_SELFREF_TYPE_SR            (0x2 << 4)
83
84 #define DDRCTRL_PWRCTL_SELFREF_EN               BIT(0)
85 #define DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE  BIT(3)
86 #define DDRCTRL_PWRCTL_SELFREF_SW               BIT(5)
87
88 #define DDRCTRL_PWRTMG_SELFREF_TO_X32_MASK      GENMASK(23, 16)
89 #define DDRCTRL_PWRTMG_SELFREF_TO_X32_0         BIT(16)
90
91 #define DDRCTRL_HWLPCTL_HW_LP_EN                BIT(0)
92
93 #define DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN    BIT(0)
94
95 #define DDRCTRL_SWCTL_SW_DONE                   BIT(0)
96
97 #define DDRCTRL_SWSTAT_SW_DONE_ACK              BIT(0)
98
99 #define DDRCTRL_PSTAT_RD_PORT_BUSY_0            BIT(0)
100 #define DDRCTRL_PSTAT_RD_PORT_BUSY_1            BIT(1)
101 #define DDRCTRL_PSTAT_WR_PORT_BUSY_0            BIT(16)
102 #define DDRCTRL_PSTAT_WR_PORT_BUSY_1            BIT(17)
103
104 #define DDRCTRL_PCTRL_N_PORT_EN                 BIT(0)
105
106 /* DDR PHY registers offsets */
107 #define DDRPHYC_PIR                             0x004
108 #define DDRPHYC_PGSR                            0x00c
109 #define DDRPHYC_ACDLLCR                         0x014
110 #define DDRPHYC_ACIOCR                          0x024
111 #define DDRPHYC_DXCCR                           0x028
112 #define DDRPHYC_DSGCR                           0x02c
113 #define DDRPHYC_ZQ0CR0                          0x180
114 #define DDRPHYC_DX0DLLCR                        0x1cc
115 #define DDRPHYC_DX1DLLCR                        0x20c
116 #define DDRPHYC_DX2DLLCR                        0x24c
117 #define DDRPHYC_DX3DLLCR                        0x28c
118
119 /* DDR PHY Register fields */
120 #define DDRPHYC_PIR_INIT                        BIT(0)
121 #define DDRPHYC_PIR_DLLSRST                     BIT(1)
122 #define DDRPHYC_PIR_DLLLOCK                     BIT(2)
123 #define DDRPHYC_PIR_ITMSRST                     BIT(4)
124
125 #define DDRPHYC_PGSR_IDONE                      BIT(0)
126
127 #define DDRPHYC_ACDLLCR_DLLSRST                 BIT(30)
128 #define DDRPHYC_ACDLLCR_DLLDIS                  BIT(31)
129
130 #define DDRPHYC_ACIOCR_ACOE                     BIT(1)
131 #define DDRPHYC_ACIOCR_ACPDD                    BIT(3)
132 #define DDRPHYC_ACIOCR_ACPDR                    BIT(4)
133 #define DDRPHYC_ACIOCR_CKPDD_MASK               GENMASK(10, 8)
134 #define DDRPHYC_ACIOCR_CKPDD_0                  BIT(8)
135 #define DDRPHYC_ACIOCR_CKPDR_MASK               GENMASK(13, 11)
136 #define DDRPHYC_ACIOCR_CKPDR_0                  BIT(11)
137 #define DDRPHYC_ACIOCR_CSPDD_MASK               GENMASK(20, 18)
138 #define DDRPHYC_ACIOCR_CSPDD_0                  BIT(18)
139
140 #define DDRPHYC_DXCCR_DXPDD                     BIT(2)
141 #define DDRPHYC_DXCCR_DXPDR                     BIT(3)
142
143 #define DDRPHYC_DSGCR_CKEPDD_MASK               GENMASK(19, 16)
144 #define DDRPHYC_DSGCR_CKEPDD_0                  BIT(16)
145 #define DDRPHYC_DSGCR_ODTPDD_MASK               GENMASK(23, 20)
146 #define DDRPHYC_DSGCR_ODTPDD_0                  BIT(20)
147 #define DDRPHYC_DSGCR_NL2PD                     BIT(24)
148 #define DDRPHYC_DSGCR_CKOE                      BIT(28)
149
150 #define DDRPHYC_ZQ0CRN_ZQPD                     BIT(31)
151
152 #define DDRPHYC_DXNDLLCR_DLLDIS                 BIT(31)
153
154 #define BOOT_API_A7_CORE0_MAGIC_NUMBER          0xca7face0
155 #define BOOT_API_A7_CORE1_MAGIC_NUMBER          0xca7face1
156
157 #define MPIDR_AFF0                              GENMASK(7, 0)
158
159 #define RCC_MP_GRSTCSETR                        (STM32_RCC_BASE + 0x0404)
160 #define RCC_MP_GRSTCSETR_MPSYSRST               BIT(0)
161 #define RCC_MP_GRSTCSETR_MPUP0RST               BIT(4)
162 #define RCC_MP_GRSTCSETR_MPUP1RST               BIT(5)
163
164 /* IWDG */
165 #define IWDG_KR                                 0x00
166 #define IWDG_KR_RELOAD_KEY                      0xaaaa
167 #define IWDG_EWCR                               0x14
168 #define IWDG_EWCR_EWIC                          BIT(14)
169
170 #define STM32MP1_PSCI_NR_CPUS                   2
171 #if STM32MP1_PSCI_NR_CPUS > CONFIG_ARMV7_PSCI_NR_CPUS
172 #error "invalid value for CONFIG_ARMV7_PSCI_NR_CPUS"
173 #endif
174
175 u8 psci_state[STM32MP1_PSCI_NR_CPUS] __secure_data = {
176          PSCI_AFFINITY_LEVEL_ON,
177          PSCI_AFFINITY_LEVEL_OFF};
178
179 static u32 __secure_data cntfrq;
180
181 static u32 __secure cp15_read_cntfrq(void)
182 {
183         u32 frq;
184
185         asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
186
187         return frq;
188 }
189
190 static void __secure cp15_write_cntfrq(u32 frq)
191 {
192         asm volatile ("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq));
193 }
194
195 static inline void psci_set_state(int cpu, u8 state)
196 {
197         psci_state[cpu] = state;
198         dsb();
199         isb();
200 }
201
202 static u32 __secure stm32mp_get_gicd_base_address(void)
203 {
204         u32 periphbase;
205
206         /* get the GIC base address from the CBAR register */
207         asm("mrc p15, 4, %0, c15, c0, 0\n" : "=r" (periphbase));
208
209         return (periphbase & CBAR_MASK) + GIC_DIST_OFFSET;
210 }
211
212 static void __secure stm32mp_raise_sgi0(int cpu)
213 {
214         u32 gic_dist_addr;
215
216         gic_dist_addr = stm32mp_get_gicd_base_address();
217
218         /* ask cpu with SGI0 */
219         writel((BIT(cpu) << 16), gic_dist_addr + GICD_SGIR);
220 }
221
222 void __secure psci_arch_cpu_entry(void)
223 {
224         u32 cpu = psci_get_cpu_id();
225
226         psci_set_state(cpu, PSCI_AFFINITY_LEVEL_ON);
227
228         /* write the saved cntfrq */
229         cp15_write_cntfrq(cntfrq);
230
231         /* reset magic in TAMP register */
232         writel(0xFFFFFFFF, TAMP_BACKUP_MAGIC_NUMBER);
233 }
234
235 s32 __secure psci_features(u32 function_id, u32 psci_fid)
236 {
237         switch (psci_fid) {
238         case ARM_PSCI_0_2_FN_PSCI_VERSION:
239         case ARM_PSCI_0_2_FN_CPU_OFF:
240         case ARM_PSCI_0_2_FN_CPU_ON:
241         case ARM_PSCI_0_2_FN_AFFINITY_INFO:
242         case ARM_PSCI_0_2_FN_MIGRATE_INFO_TYPE:
243         case ARM_PSCI_0_2_FN_SYSTEM_OFF:
244         case ARM_PSCI_0_2_FN_SYSTEM_RESET:
245         case ARM_PSCI_1_0_FN_SYSTEM_SUSPEND:
246                 return 0x0;
247         }
248         return ARM_PSCI_RET_NI;
249 }
250
251 u32 __secure psci_version(void)
252 {
253         return ARM_PSCI_VER_1_0;
254 }
255
256 s32 __secure psci_affinity_info(u32 function_id, u32 target_affinity,
257                                 u32  lowest_affinity_level)
258 {
259         u32 cpu = target_affinity & MPIDR_AFF0;
260
261         if (lowest_affinity_level > 0)
262                 return ARM_PSCI_RET_INVAL;
263
264         if (target_affinity & ~MPIDR_AFF0)
265                 return ARM_PSCI_RET_INVAL;
266
267         if (cpu >= STM32MP1_PSCI_NR_CPUS)
268                 return ARM_PSCI_RET_INVAL;
269
270         return psci_state[cpu];
271 }
272
273 u32 __secure psci_migrate_info_type(void)
274 {
275         /*
276          * in Power_State_Coordination_Interface_PDD_v1_1_DEN0022D.pdf
277          * return 2 = Trusted OS is either not present or does not require
278          * migration, system of this type does not require the caller
279          * to use the MIGRATE function.
280          * MIGRATE function calls return NOT_SUPPORTED.
281          */
282         return 2;
283 }
284
285 s32 __secure psci_cpu_on(u32 function_id, u32 target_cpu, u32 pc,
286                          u32 context_id)
287 {
288         u32 cpu = target_cpu & MPIDR_AFF0;
289
290         if (target_cpu & ~MPIDR_AFF0)
291                 return ARM_PSCI_RET_INVAL;
292
293         if (cpu >= STM32MP1_PSCI_NR_CPUS)
294                 return ARM_PSCI_RET_INVAL;
295
296         if (psci_state[cpu] == PSCI_AFFINITY_LEVEL_ON)
297                 return ARM_PSCI_RET_ALREADY_ON;
298
299         /* read and save cntfrq of current cpu to write on target cpu  */
300         cntfrq = cp15_read_cntfrq();
301
302         /* reset magic in TAMP register */
303         if (readl(TAMP_BACKUP_MAGIC_NUMBER))
304                 writel(0xFFFFFFFF, TAMP_BACKUP_MAGIC_NUMBER);
305         /*
306          * ROM code need a first SGI0 after core reset
307          * core is ready when magic is set to 0 in ROM code
308          */
309         while (readl(TAMP_BACKUP_MAGIC_NUMBER))
310                 stm32mp_raise_sgi0(cpu);
311
312         /* store target PC and context id*/
313         psci_save(cpu, pc, context_id);
314
315         /* write entrypoint in backup RAM register */
316         writel((u32)&psci_cpu_entry, TAMP_BACKUP_BRANCH_ADDRESS);
317         psci_set_state(cpu, PSCI_AFFINITY_LEVEL_ON_PENDING);
318
319         /* write magic number in backup register */
320         if (cpu == 0x01)
321                 writel(BOOT_API_A7_CORE1_MAGIC_NUMBER,
322                        TAMP_BACKUP_MAGIC_NUMBER);
323         else
324                 writel(BOOT_API_A7_CORE0_MAGIC_NUMBER,
325                        TAMP_BACKUP_MAGIC_NUMBER);
326
327         /* Generate an IT to start the core */
328         stm32mp_raise_sgi0(cpu);
329
330         return ARM_PSCI_RET_SUCCESS;
331 }
332
333 s32 __secure psci_cpu_off(void)
334 {
335         u32 cpu;
336
337         cpu = psci_get_cpu_id();
338
339         psci_cpu_off_common();
340         psci_set_state(cpu, PSCI_AFFINITY_LEVEL_OFF);
341
342         /* reset core: wfi is managed by BootRom */
343         if (cpu == 0x01)
344                 writel(RCC_MP_GRSTCSETR_MPUP1RST, RCC_MP_GRSTCSETR);
345         else
346                 writel(RCC_MP_GRSTCSETR_MPUP0RST, RCC_MP_GRSTCSETR);
347
348         /* just waiting reset */
349         while (1)
350                 wfi();
351 }
352
353 void __secure psci_system_reset(void)
354 {
355         /* System reset */
356         writel(RCC_MP_GRSTCSETR_MPSYSRST, RCC_MP_GRSTCSETR);
357         /* just waiting reset */
358         while (1)
359                 wfi();
360 }
361
362 void __secure psci_system_off(void)
363 {
364         /* System Off is not managed, waiting user power off
365          * TODO: handle I2C write in PMIC Main Control register bit 0 = SWOFF
366          */
367         while (1)
368                 wfi();
369 }
370
371 static void __secure secure_udelay(unsigned int delay)
372 {
373         u32 freq = cp15_read_cntfrq() / 1000000;
374         u64 start, end;
375
376         delay *= freq;
377
378         asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (start));
379         for (;;) {
380                 asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (end));
381                 if ((end - start) > delay)
382                         break;
383         }
384 }
385
386 static int __secure secure_waitbits(u32 reg, u32 mask, u32 val)
387 {
388         u32 freq = cp15_read_cntfrq() / 1000000;
389         u32 delay = 500 * freq; /* 500 us */
390         u64 start, end;
391         u32 tmp;
392
393         asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (start));
394         for (;;) {
395                 tmp = readl(reg);
396                 tmp &= mask;
397                 if ((tmp & val) == val)
398                         return 0;
399                 asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (end));
400                 if ((end - start) > delay)
401                         return -ETIMEDOUT;
402         }
403 }
404
405 static void __secure ddr_sr_mode_ssr(u32 *saved_pwrctl)
406 {
407         setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR,
408                      RCC_DDRITFCR_DDRC1LPEN | RCC_DDRITFCR_DDRC1EN |
409                      RCC_DDRITFCR_DDRC2LPEN | RCC_DDRITFCR_DDRC2EN |
410                      RCC_DDRITFCR_DDRCAPBLPEN | RCC_DDRITFCR_DDRPHYCAPBLPEN |
411                      RCC_DDRITFCR_DDRCAPBEN | RCC_DDRITFCR_DDRPHYCAPBEN |
412                      RCC_DDRITFCR_DDRPHYCEN);
413
414         clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR,
415                      RCC_DDRITFCR_AXIDCGEN | RCC_DDRITFCR_DDRCKMOD_MASK);
416
417         /* Disable HW LP interface of uMCTL2 */
418         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_HWLPCTL,
419                      DDRCTRL_HWLPCTL_HW_LP_EN);
420
421         /* Configure Automatic LP modes of uMCTL2 */
422         clrsetbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRTMG,
423                         DDRCTRL_PWRTMG_SELFREF_TO_X32_MASK,
424                         DDRCTRL_PWRTMG_SELFREF_TO_X32_0);
425
426         /* Save PWRCTL register to restart ASR after suspend (if applicable) */
427         *saved_pwrctl = readl(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL);
428
429         /*
430          * Disable Clock disable with LP modes
431          * (used in RUN mode for LPDDR2 with specific timing).
432          */
433         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL,
434                      DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE);
435
436         /* Disable automatic Self-Refresh mode */
437         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL,
438                      DDRCTRL_PWRCTL_SELFREF_EN);
439 }
440
441 static void __secure ddr_sr_mode_restore(u32 saved_pwrctl)
442 {
443         saved_pwrctl &= DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE |
444                         DDRCTRL_PWRCTL_SELFREF_EN;
445
446         /* Restore ASR mode in case it was enabled before suspend. */
447         setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL, saved_pwrctl);
448 }
449
450 static int __secure ddr_sw_self_refresh_in(void)
451 {
452         int ret;
453
454         clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_AXIDCGEN);
455
456         /* Blocks AXI ports from taking anymore transactions */
457         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_0,
458                      DDRCTRL_PCTRL_N_PORT_EN);
459         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_1,
460                      DDRCTRL_PCTRL_N_PORT_EN);
461
462         /*
463          * Waits unit all AXI ports are idle
464          * Poll PSTAT.rd_port_busy_n = 0
465          * Poll PSTAT.wr_port_busy_n = 0
466          */
467         ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_PSTAT,
468                               DDRCTRL_PSTAT_RD_PORT_BUSY_0 |
469                               DDRCTRL_PSTAT_RD_PORT_BUSY_1 |
470                               DDRCTRL_PSTAT_WR_PORT_BUSY_0 |
471                               DDRCTRL_PSTAT_WR_PORT_BUSY_1, 0);
472         if (ret)
473                 goto pstat_failed;
474
475         /* SW Self-Refresh entry */
476         setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
477
478         /*
479          * Wait operating mode change in self-refresh mode
480          * with STAT.operating_mode[1:0]==11.
481          * Ensure transition to self-refresh was due to software
482          * by checking also that STAT.selfref_type[1:0]=2.
483          */
484         ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_STAT,
485                               DDRCTRL_STAT_OPERATING_MODE_MASK |
486                               DDRCTRL_STAT_SELFREF_TYPE_MASK,
487                               DDRCTRL_STAT_OPERATING_MODE_SR |
488                               DDRCTRL_STAT_SELFREF_TYPE_SR);
489         if (ret)
490                 goto selfref_sw_failed;
491
492         /* IOs powering down (PUBL registers) */
493         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACPDD);
494         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACPDR);
495
496         clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR,
497                         DDRPHYC_ACIOCR_CKPDD_MASK,
498                         DDRPHYC_ACIOCR_CKPDD_0);
499
500         clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR,
501                         DDRPHYC_ACIOCR_CKPDR_MASK,
502                         DDRPHYC_ACIOCR_CKPDR_0);
503
504         clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR,
505                         DDRPHYC_ACIOCR_CSPDD_MASK,
506                         DDRPHYC_ACIOCR_CSPDD_0);
507
508         /* Disable command/address output driver */
509         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACOE);
510
511         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDD);
512
513         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDR);
514
515         clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR,
516                         DDRPHYC_DSGCR_ODTPDD_MASK,
517                         DDRPHYC_DSGCR_ODTPDD_0);
518
519         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_NL2PD);
520
521         clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR,
522                         DDRPHYC_DSGCR_CKEPDD_MASK,
523                         DDRPHYC_DSGCR_CKEPDD_0);
524
525         /* Disable PZQ cell (PUBL register) */
526         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ZQ0CR0, DDRPHYC_ZQ0CRN_ZQPD);
527
528         /* Set latch */
529         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_CKOE);
530
531         /* Additional delay to avoid early latch */
532         secure_udelay(10);
533
534         /* Activate sw retention in PWRCTRL */
535         setbits_le32(STM32_PWR_BASE + PWR_CR3, PWR_CR3_DDRRETEN);
536
537         /* Switch controller clocks (uMCTL2/PUBL) to DLL ref clock */
538         setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL);
539
540         /* Disable all DLLs: GLITCH window */
541         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACDLLCR, DDRPHYC_ACDLLCR_DLLDIS);
542
543         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX0DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
544
545         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX1DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
546
547         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX2DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
548
549         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX3DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
550
551         /* Switch controller clocks (uMCTL2/PUBL) to DLL output clock */
552         clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL);
553
554         /* Deactivate all DDR clocks */
555         clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR,
556                      RCC_DDRITFCR_DDRC1EN | RCC_DDRITFCR_DDRC2EN |
557                      RCC_DDRITFCR_DDRCAPBEN | RCC_DDRITFCR_DDRPHYCAPBEN);
558
559         return 0;
560
561 selfref_sw_failed:
562         /* This bit should be cleared to restore DDR in its previous state */
563         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL,
564                      DDRCTRL_PWRCTL_SELFREF_SW);
565
566 pstat_failed:
567         setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_0,
568                      DDRCTRL_PCTRL_N_PORT_EN);
569         setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_1,
570                      DDRCTRL_PCTRL_N_PORT_EN);
571
572         return -EINVAL;
573 };
574
575 static void __secure ddr_sw_self_refresh_exit(void)
576 {
577         int ret;
578
579         /* Enable all clocks */
580         setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR,
581                      RCC_DDRITFCR_DDRC1EN | RCC_DDRITFCR_DDRC2EN |
582                      RCC_DDRITFCR_DDRPHYCEN | RCC_DDRITFCR_DDRPHYCAPBEN |
583                      RCC_DDRITFCR_DDRCAPBEN);
584
585         /* Handshake */
586         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE);
587
588         /* Mask dfi_init_complete_en */
589         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_DFIMISC,
590                      DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
591
592         /* Ack */
593         setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE);
594         ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_SWSTAT,
595                               DDRCTRL_SWSTAT_SW_DONE_ACK,
596                               DDRCTRL_SWSTAT_SW_DONE_ACK);
597         if (ret)
598                 hang();
599
600         /* Switch controller clocks (uMCTL2/PUBL) to DLL ref clock */
601         setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL);
602
603         /* Enable all DLLs: GLITCH window */
604         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACDLLCR,
605                      DDRPHYC_ACDLLCR_DLLDIS);
606
607         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX0DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
608
609         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX1DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
610
611         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX2DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
612
613         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX3DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
614
615         /* Additional delay to avoid early DLL clock switch */
616         secure_udelay(50);
617
618         /* Switch controller clocks (uMCTL2/PUBL) to DLL ref clock */
619         clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL);
620
621         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACDLLCR, DDRPHYC_ACDLLCR_DLLSRST);
622
623         secure_udelay(10);
624
625         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACDLLCR, DDRPHYC_ACDLLCR_DLLSRST);
626
627         /* PHY partial init: (DLL lock and ITM reset) */
628         writel(DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK |
629                DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_INIT,
630                STM32_DDRPHYC_BASE + DDRPHYC_PIR);
631
632         /* Need to wait at least 10 clock cycles before accessing PGSR */
633         secure_udelay(1);
634
635         /* Pool end of init */
636         ret = secure_waitbits(STM32_DDRPHYC_BASE + DDRPHYC_PGSR,
637                               DDRPHYC_PGSR_IDONE, DDRPHYC_PGSR_IDONE);
638         if (ret)
639                 hang();
640
641         /* Handshake */
642         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE);
643
644         /* Unmask dfi_init_complete_en to uMCTL2 */
645         setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_DFIMISC, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
646
647         /* Ack */
648         setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE);
649         ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_SWSTAT,
650                               DDRCTRL_SWSTAT_SW_DONE_ACK,
651                               DDRCTRL_SWSTAT_SW_DONE_ACK);
652         if (ret)
653                 hang();
654
655         /* Deactivate sw retention in PWR */
656         clrbits_le32(STM32_PWR_BASE + PWR_CR3, PWR_CR3_DDRRETEN);
657
658         /* Enable PZQ cell (PUBL register) */
659         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ZQ0CR0, DDRPHYC_ZQ0CRN_ZQPD);
660
661         /* Enable pad drivers */
662         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACPDD);
663
664         /* Enable command/address output driver */
665         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACOE);
666
667         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_CKPDD_MASK);
668
669         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_CSPDD_MASK);
670
671         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDD);
672
673         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDR);
674
675         /* Release latch */
676         setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_CKOE);
677
678         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_ODTPDD_MASK);
679
680         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_NL2PD);
681
682         clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_CKEPDD_MASK);
683
684         /* Remove selfrefresh */
685         clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
686
687         /* Wait operating_mode == normal */
688         ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_STAT,
689                               DDRCTRL_STAT_OPERATING_MODE_MASK,
690                               DDRCTRL_STAT_OPERATING_MODE_NORMAL);
691         if (ret)
692                 hang();
693
694         /* AXI ports are no longer blocked from taking transactions */
695         setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_0, DDRCTRL_PCTRL_N_PORT_EN);
696         setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_1, DDRCTRL_PCTRL_N_PORT_EN);
697
698         setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_AXIDCGEN);
699 }
700
701 void __secure psci_system_suspend(u32 __always_unused function_id,
702                                   u32 ep, u32 context_id)
703 {
704         u32 saved_mcudivr, saved_pll3cr, saved_pll4cr, saved_mssckselr;
705         u32 gicd_addr = stm32mp_get_gicd_base_address();
706         u32 cpu = psci_get_cpu_id();
707         u32 sp = (u32)__secure_stack_end - (cpu << ARM_PSCI_STACK_SHIFT);
708         bool iwdg1_wake = false;
709         bool iwdg2_wake = false;
710         bool other_wake = false;
711         u32 saved_pwrctl, reg;
712         u32 gic_enabled[8];
713         u32 irqs;
714         int i;
715
716         /* Cache enable mask of all 256 SPI */
717         for (i = 0; i < ARRAY_SIZE(gic_enabled); i++)
718                 gic_enabled[i] = readl(gicd_addr + GICD_ISENABLERn + 0x4 + 4 * i);
719
720         /* Disable IO compensation */
721
722         /* Place current APSRC/ANSRC into RAPSRC/RANSRC */
723         reg = readl(STM32_SYSCFG_BASE + SYSCFG_CMPCR);
724         reg >>= 8;
725         reg &= 0xff << 16;
726         reg |= SYSCFG_CMPCR_SW_CTRL;
727         writel(reg, STM32_SYSCFG_BASE + SYSCFG_CMPCR);
728         writel(SYSCFG_CMPENR_MPUEN, STM32_SYSCFG_BASE + SYSCFG_CMPENCLRR);
729
730         writel(RCC_MP_CIFR_WKUPF, STM32_RCC_BASE + RCC_MP_CIFR);
731         setbits_le32(STM32_RCC_BASE + RCC_MP_CIER, RCC_MP_CIFR_WKUPF);
732
733         setbits_le32(STM32_PWR_BASE + PWR_MPUCR,
734                      PWR_MPUCR_CSSF | PWR_MPUCR_CSTDBYDIS);
735
736         saved_mcudivr = readl(STM32_RCC_BASE + RCC_MCUDIVR);
737         saved_pll3cr = readl(STM32_RCC_BASE + RCC_PLL3CR);
738         saved_pll4cr = readl(STM32_RCC_BASE + RCC_PLL4CR);
739         saved_mssckselr = readl(STM32_RCC_BASE + RCC_MSSCKSELR);
740
741         psci_v7_flush_dcache_all();
742         ddr_sr_mode_ssr(&saved_pwrctl);
743         ddr_sw_self_refresh_in();
744         setbits_le32(STM32_PWR_BASE + PWR_CR3, PWR_CR3_DDRSREN);
745         writel(0x3, STM32_RCC_BASE + RCC_MP_SREQSETR);
746
747         /* Ping the IWDG before entering suspend */
748         iwdg1_wake = !!(gic_enabled[4] & BIT(22));      /* SPI 150 */
749         iwdg2_wake = !!(gic_enabled[4] & BIT(23));      /* SPI 151 */
750
751         for (;;) {
752                 /* Ping IWDG1 and ACK pretimer IRQ */
753                 if (iwdg1_wake) {
754                         writel(IWDG_KR_RELOAD_KEY, STM32_IWDG1_BASE + IWDG_KR);
755                         writel(IWDG_EWCR_EWIC, STM32_IWDG1_BASE + IWDG_EWCR);
756                 }
757
758                 /* Ping IWDG2 and ACK pretimer IRQ */
759                 if (iwdg2_wake) {
760                         writel(IWDG_KR_RELOAD_KEY, STM32_IWDG2_BASE + IWDG_KR);
761                         writel(IWDG_EWCR_EWIC, STM32_IWDG2_BASE + IWDG_EWCR);
762                 }
763
764                 iwdg1_wake = false;
765                 iwdg2_wake = false;
766
767                 /* Zzz, enter stop mode */
768                 asm volatile(
769                         "isb\n"
770                         "dsb\n"
771                         "wfi\n");
772
773                 /* Determine the wake up source */
774                 for (i = 0; i < ARRAY_SIZE(gic_enabled); i++) {
775                         irqs = readl(gicd_addr + GICR_IGROUPMODRn + 0x4 + 4 * i);
776                         irqs &= gic_enabled[i];
777                         if (!irqs)
778                                 continue;
779
780                         /* Test whether IWDG pretimeout triggered the wake up. */
781                         if (i == 4) {   /* SPI Num 128..159 */
782                                 iwdg1_wake = !!(irqs & BIT(22));        /* SPI 150 */
783                                 iwdg2_wake = !!(irqs & BIT(23));        /* SPI 151 */
784                                 irqs &= ~(BIT(22) | BIT(23));
785                         }
786
787                         /* Test whether there is any other wake up trigger. */
788                         if (irqs) {
789                                 other_wake = true;
790                                 break;
791                         }
792                 }
793
794                 /* Other wake up triggers pending, let OS deal with all of it. */
795                 if (other_wake)
796                         break;
797         }
798
799         writel(0x3, STM32_RCC_BASE + RCC_MP_SREQCLRR);
800         ddr_sw_self_refresh_exit();
801         ddr_sr_mode_restore(saved_pwrctl);
802
803         writel(saved_mcudivr, STM32_RCC_BASE + RCC_MCUDIVR);
804         writel(saved_pll3cr, STM32_RCC_BASE + RCC_PLL3CR);
805         writel(saved_pll4cr, STM32_RCC_BASE + RCC_PLL4CR);
806         writel(saved_mssckselr, STM32_RCC_BASE + RCC_MSSCKSELR);
807
808         writel(SYSCFG_CMPENR_MPUEN, STM32_SYSCFG_BASE + SYSCFG_CMPENSETR);
809         clrbits_le32(STM32_SYSCFG_BASE + SYSCFG_CMPCR, SYSCFG_CMPCR_SW_CTRL);
810
811         /*
812          * The system has resumed successfully. Rewrite LR register stored
813          * on stack with 'ep' value, so that on return from this PSCI call,
814          * the code would jump to that 'ep' resume entry point code path
815          * instead of the previous 'lr' register content which (e.g. with
816          * Linux) points to resume failure code path.
817          *
818          * See arch/arm/cpu/armv7/psci.S _smc_psci: for the stack layout
819          * used here, SP-4 is PC, SP-8 is LR, SP-12 is R7, and so on.
820          */
821         writel(ep, sp - 8);
822 }
This page took 0.065471 seconds and 2 git commands to generate.