]>
Commit | Line | Data |
---|---|---|
83d290c5 | 1 | // SPDX-License-Identifier: GPL-2.0+ |
ef509b90 VA |
2 | /* |
3 | * Keystone2: DDR3 initialization | |
4 | * | |
5 | * (C) Copyright 2012-2014 | |
6 | * Texas Instruments Incorporated, <www.ti.com> | |
ef509b90 VA |
7 | */ |
8 | ||
9a3b4ceb | 9 | #include <cpu_func.h> |
09140113 | 10 | #include <env.h> |
ef509b90 | 11 | #include <asm/io.h> |
101eec50 | 12 | #include <common.h> |
89f44bb0 | 13 | #include <asm/arch/msmc.h> |
0b868589 | 14 | #include <asm/arch/ddr3.h> |
6c343825 | 15 | #include <asm/arch/psc_defs.h> |
ef509b90 | 16 | |
89f44bb0 VA |
17 | #include <asm/ti-common/ti-edma3.h> |
18 | ||
19 | #define DDR3_EDMA_BLK_SIZE_SHIFT 10 | |
20 | #define DDR3_EDMA_BLK_SIZE (1 << DDR3_EDMA_BLK_SIZE_SHIFT) | |
21 | #define DDR3_EDMA_BCNT 0x8000 | |
22 | #define DDR3_EDMA_CCNT 1 | |
23 | #define DDR3_EDMA_XF_SIZE (DDR3_EDMA_BLK_SIZE * DDR3_EDMA_BCNT) | |
24 | #define DDR3_EDMA_SLOT_NUM 1 | |
25 | ||
0b868589 | 26 | void ddr3_init_ddrphy(u32 base, struct ddr3_phy_config *phy_cfg) |
ef509b90 VA |
27 | { |
28 | unsigned int tmp; | |
29 | ||
30 | while ((__raw_readl(base + KS2_DDRPHY_PGSR0_OFFSET) | |
31 | & 0x00000001) != 0x00000001) | |
32 | ; | |
33 | ||
34 | __raw_writel(phy_cfg->pllcr, base + KS2_DDRPHY_PLLCR_OFFSET); | |
35 | ||
36 | tmp = __raw_readl(base + KS2_DDRPHY_PGCR1_OFFSET); | |
37 | tmp &= ~(phy_cfg->pgcr1_mask); | |
38 | tmp |= phy_cfg->pgcr1_val; | |
39 | __raw_writel(tmp, base + KS2_DDRPHY_PGCR1_OFFSET); | |
40 | ||
41 | __raw_writel(phy_cfg->ptr0, base + KS2_DDRPHY_PTR0_OFFSET); | |
42 | __raw_writel(phy_cfg->ptr1, base + KS2_DDRPHY_PTR1_OFFSET); | |
43 | __raw_writel(phy_cfg->ptr3, base + KS2_DDRPHY_PTR3_OFFSET); | |
44 | __raw_writel(phy_cfg->ptr4, base + KS2_DDRPHY_PTR4_OFFSET); | |
45 | ||
46 | tmp = __raw_readl(base + KS2_DDRPHY_DCR_OFFSET); | |
47 | tmp &= ~(phy_cfg->dcr_mask); | |
48 | tmp |= phy_cfg->dcr_val; | |
49 | __raw_writel(tmp, base + KS2_DDRPHY_DCR_OFFSET); | |
50 | ||
51 | __raw_writel(phy_cfg->dtpr0, base + KS2_DDRPHY_DTPR0_OFFSET); | |
52 | __raw_writel(phy_cfg->dtpr1, base + KS2_DDRPHY_DTPR1_OFFSET); | |
53 | __raw_writel(phy_cfg->dtpr2, base + KS2_DDRPHY_DTPR2_OFFSET); | |
54 | __raw_writel(phy_cfg->mr0, base + KS2_DDRPHY_MR0_OFFSET); | |
55 | __raw_writel(phy_cfg->mr1, base + KS2_DDRPHY_MR1_OFFSET); | |
a76a6f3e | 56 | __raw_writel(phy_cfg->mr2, base + KS2_DDRPHY_MR2_OFFSET); |
ef509b90 VA |
57 | __raw_writel(phy_cfg->dtcr, base + KS2_DDRPHY_DTCR_OFFSET); |
58 | __raw_writel(phy_cfg->pgcr2, base + KS2_DDRPHY_PGCR2_OFFSET); | |
59 | ||
60 | __raw_writel(phy_cfg->zq0cr1, base + KS2_DDRPHY_ZQ0CR1_OFFSET); | |
61 | __raw_writel(phy_cfg->zq1cr1, base + KS2_DDRPHY_ZQ1CR1_OFFSET); | |
62 | __raw_writel(phy_cfg->zq2cr1, base + KS2_DDRPHY_ZQ2CR1_OFFSET); | |
63 | ||
64 | __raw_writel(phy_cfg->pir_v1, base + KS2_DDRPHY_PIR_OFFSET); | |
65 | while ((__raw_readl(base + KS2_DDRPHY_PGSR0_OFFSET) & 0x1) != 0x1) | |
66 | ; | |
67 | ||
235dd6e8 | 68 | if (cpu_is_k2g()) { |
e5e546aa CJF |
69 | clrsetbits_le32(base + KS2_DDRPHY_DATX8_2_OFFSET, |
70 | phy_cfg->datx8_2_mask, | |
71 | phy_cfg->datx8_2_val); | |
72 | ||
73 | clrsetbits_le32(base + KS2_DDRPHY_DATX8_3_OFFSET, | |
74 | phy_cfg->datx8_3_mask, | |
75 | phy_cfg->datx8_3_val); | |
76 | ||
77 | clrsetbits_le32(base + KS2_DDRPHY_DATX8_4_OFFSET, | |
78 | phy_cfg->datx8_4_mask, | |
79 | phy_cfg->datx8_4_val); | |
80 | ||
81 | clrsetbits_le32(base + KS2_DDRPHY_DATX8_5_OFFSET, | |
82 | phy_cfg->datx8_5_mask, | |
83 | phy_cfg->datx8_5_val); | |
84 | ||
85 | clrsetbits_le32(base + KS2_DDRPHY_DATX8_6_OFFSET, | |
86 | phy_cfg->datx8_6_mask, | |
87 | phy_cfg->datx8_6_val); | |
88 | ||
89 | clrsetbits_le32(base + KS2_DDRPHY_DATX8_7_OFFSET, | |
90 | phy_cfg->datx8_7_mask, | |
91 | phy_cfg->datx8_7_val); | |
92 | ||
93 | clrsetbits_le32(base + KS2_DDRPHY_DATX8_8_OFFSET, | |
94 | phy_cfg->datx8_8_mask, | |
95 | phy_cfg->datx8_8_val); | |
235dd6e8 VA |
96 | } |
97 | ||
ef509b90 VA |
98 | __raw_writel(phy_cfg->pir_v2, base + KS2_DDRPHY_PIR_OFFSET); |
99 | while ((__raw_readl(base + KS2_DDRPHY_PGSR0_OFFSET) & 0x1) != 0x1) | |
100 | ; | |
101 | } | |
102 | ||
0b868589 | 103 | void ddr3_init_ddremif(u32 base, struct ddr3_emif_config *emif_cfg) |
ef509b90 VA |
104 | { |
105 | __raw_writel(emif_cfg->sdcfg, base + KS2_DDR3_SDCFG_OFFSET); | |
106 | __raw_writel(emif_cfg->sdtim1, base + KS2_DDR3_SDTIM1_OFFSET); | |
107 | __raw_writel(emif_cfg->sdtim2, base + KS2_DDR3_SDTIM2_OFFSET); | |
108 | __raw_writel(emif_cfg->sdtim3, base + KS2_DDR3_SDTIM3_OFFSET); | |
109 | __raw_writel(emif_cfg->sdtim4, base + KS2_DDR3_SDTIM4_OFFSET); | |
110 | __raw_writel(emif_cfg->zqcfg, base + KS2_DDR3_ZQCFG_OFFSET); | |
111 | __raw_writel(emif_cfg->sdrfc, base + KS2_DDR3_SDRFC_OFFSET); | |
112 | } | |
101eec50 | 113 | |
89f44bb0 VA |
114 | int ddr3_ecc_support_rmw(u32 base) |
115 | { | |
116 | u32 value = __raw_readl(base + KS2_DDR3_MIDR_OFFSET); | |
117 | ||
118 | /* Check the DDR3 controller ID reg if the controllers | |
119 | supports ECC RMW or not */ | |
120 | if (value == 0x40461C02) | |
121 | return 1; | |
122 | ||
123 | return 0; | |
124 | } | |
125 | ||
126 | static void ddr3_ecc_config(u32 base, u32 value) | |
127 | { | |
128 | u32 data; | |
129 | ||
130 | __raw_writel(value, base + KS2_DDR3_ECC_CTRL_OFFSET); | |
131 | udelay(100000); /* delay required to synchronize across clock domains */ | |
132 | ||
133 | if (value & KS2_DDR3_ECC_EN) { | |
134 | /* Clear the 1-bit error count */ | |
135 | data = __raw_readl(base + KS2_DDR3_ONE_BIT_ECC_ERR_CNT_OFFSET); | |
136 | __raw_writel(data, base + KS2_DDR3_ONE_BIT_ECC_ERR_CNT_OFFSET); | |
137 | ||
138 | /* enable the ECC interrupt */ | |
139 | __raw_writel(KS2_DDR3_1B_ECC_ERR_SYS | KS2_DDR3_2B_ECC_ERR_SYS | | |
140 | KS2_DDR3_WR_ECC_ERR_SYS, | |
141 | base + KS2_DDR3_ECC_INT_ENABLE_SET_SYS_OFFSET); | |
142 | ||
143 | /* Clear the ECC error interrupt status */ | |
144 | __raw_writel(KS2_DDR3_1B_ECC_ERR_SYS | KS2_DDR3_2B_ECC_ERR_SYS | | |
145 | KS2_DDR3_WR_ECC_ERR_SYS, | |
146 | base + KS2_DDR3_ECC_INT_STATUS_OFFSET); | |
147 | } | |
148 | } | |
149 | ||
150 | static void ddr3_reset_data(u32 base, u32 ddr3_size) | |
151 | { | |
152 | u32 mpax[2]; | |
153 | u32 seg_num; | |
154 | u32 seg, blks, dst, edma_blks; | |
155 | struct edma3_slot_config slot; | |
156 | struct edma3_channel_config edma_channel; | |
157 | u32 edma_src[DDR3_EDMA_BLK_SIZE/4] __aligned(16) = {0, }; | |
158 | ||
159 | /* Setup an edma to copy the 1k block to the entire DDR */ | |
160 | puts("\nClear entire DDR3 memory to enable ECC\n"); | |
161 | ||
162 | /* save the SES MPAX regs */ | |
4361220d NM |
163 | if (cpu_is_k2g()) |
164 | msmc_get_ses_mpax(K2G_MSMC_SEGMENT_ARM, 0, mpax); | |
165 | else | |
166 | msmc_get_ses_mpax(K2HKLE_MSMC_SEGMENT_ARM, 0, mpax); | |
89f44bb0 VA |
167 | |
168 | /* setup edma slot 1 configuration */ | |
169 | slot.opt = EDMA3_SLOPT_TRANS_COMP_INT_ENB | | |
170 | EDMA3_SLOPT_COMP_CODE(0) | | |
171 | EDMA3_SLOPT_STATIC | EDMA3_SLOPT_AB_SYNC; | |
172 | slot.bcnt = DDR3_EDMA_BCNT; | |
173 | slot.acnt = DDR3_EDMA_BLK_SIZE; | |
174 | slot.ccnt = DDR3_EDMA_CCNT; | |
175 | slot.src_bidx = 0; | |
176 | slot.dst_bidx = DDR3_EDMA_BLK_SIZE; | |
177 | slot.src_cidx = 0; | |
178 | slot.dst_cidx = 0; | |
179 | slot.link = EDMA3_PARSET_NULL_LINK; | |
180 | slot.bcntrld = 0; | |
181 | edma3_slot_configure(KS2_EDMA0_BASE, DDR3_EDMA_SLOT_NUM, &slot); | |
182 | ||
183 | /* configure quik edma channel */ | |
184 | edma_channel.slot = DDR3_EDMA_SLOT_NUM; | |
185 | edma_channel.chnum = 0; | |
186 | edma_channel.complete_code = 0; | |
187 | /* event trigger after dst update */ | |
188 | edma_channel.trigger_slot_word = EDMA3_TWORD(dst); | |
189 | qedma3_start(KS2_EDMA0_BASE, &edma_channel); | |
190 | ||
191 | /* DDR3 size in segments (4KB seg size) */ | |
192 | seg_num = ddr3_size << (30 - KS2_MSMC_SEG_SIZE_SHIFT); | |
193 | ||
194 | for (seg = 0; seg < seg_num; seg += KS2_MSMC_MAP_SEG_NUM) { | |
195 | /* map 2GB 36-bit DDR address to 32-bit DDR address in EMIF | |
196 | access slave interface so that edma driver can access */ | |
4361220d NM |
197 | if (cpu_is_k2g()) { |
198 | msmc_map_ses_segment(K2G_MSMC_SEGMENT_ARM, 0, | |
199 | base >> KS2_MSMC_SEG_SIZE_SHIFT, | |
200 | KS2_MSMC_DST_SEG_BASE + seg, | |
201 | MPAX_SEG_2G); | |
202 | } else { | |
203 | msmc_map_ses_segment(K2HKLE_MSMC_SEGMENT_ARM, 0, | |
204 | base >> KS2_MSMC_SEG_SIZE_SHIFT, | |
205 | KS2_MSMC_DST_SEG_BASE + seg, | |
206 | MPAX_SEG_2G); | |
207 | } | |
89f44bb0 VA |
208 | |
209 | if ((seg_num - seg) > KS2_MSMC_MAP_SEG_NUM) | |
210 | edma_blks = KS2_MSMC_MAP_SEG_NUM << | |
211 | (KS2_MSMC_SEG_SIZE_SHIFT | |
212 | - DDR3_EDMA_BLK_SIZE_SHIFT); | |
213 | else | |
214 | edma_blks = (seg_num - seg) << (KS2_MSMC_SEG_SIZE_SHIFT | |
215 | - DDR3_EDMA_BLK_SIZE_SHIFT); | |
216 | ||
217 | /* Use edma driver to scrub 2GB DDR memory */ | |
218 | for (dst = base, blks = 0; blks < edma_blks; | |
219 | blks += DDR3_EDMA_BCNT, dst += DDR3_EDMA_XF_SIZE) { | |
220 | edma3_set_src_addr(KS2_EDMA0_BASE, | |
221 | edma_channel.slot, (u32)edma_src); | |
222 | edma3_set_dest_addr(KS2_EDMA0_BASE, | |
223 | edma_channel.slot, (u32)dst); | |
224 | ||
225 | while (edma3_check_for_transfer(KS2_EDMA0_BASE, | |
226 | &edma_channel)) | |
227 | udelay(10); | |
228 | } | |
229 | } | |
230 | ||
231 | qedma3_stop(KS2_EDMA0_BASE, &edma_channel); | |
232 | ||
233 | /* restore the SES MPAX regs */ | |
4361220d NM |
234 | if (cpu_is_k2g()) |
235 | msmc_set_ses_mpax(K2G_MSMC_SEGMENT_ARM, 0, mpax); | |
236 | else | |
237 | msmc_set_ses_mpax(K2HKLE_MSMC_SEGMENT_ARM, 0, mpax); | |
89f44bb0 VA |
238 | } |
239 | ||
240 | static void ddr3_ecc_init_range(u32 base) | |
241 | { | |
242 | u32 ecc_val = KS2_DDR3_ECC_EN; | |
243 | u32 rmw = ddr3_ecc_support_rmw(base); | |
244 | ||
245 | if (rmw) | |
246 | ecc_val |= KS2_DDR3_ECC_RMW_EN; | |
247 | ||
248 | __raw_writel(0, base + KS2_DDR3_ECC_ADDR_RANGE1_OFFSET); | |
249 | ||
250 | ddr3_ecc_config(base, ecc_val); | |
251 | } | |
252 | ||
253 | void ddr3_enable_ecc(u32 base, int test) | |
254 | { | |
255 | u32 ecc_val = KS2_DDR3_ECC_ENABLE; | |
256 | u32 rmw = ddr3_ecc_support_rmw(base); | |
257 | ||
258 | if (test) | |
259 | ecc_val |= KS2_DDR3_ECC_ADDR_RNG_1_EN; | |
260 | ||
261 | if (!rmw) { | |
262 | if (!test) | |
263 | /* by default, disable ecc when rmw = 0 and no | |
264 | ecc test */ | |
265 | ecc_val = 0; | |
266 | } else { | |
267 | ecc_val |= KS2_DDR3_ECC_RMW_EN; | |
268 | } | |
269 | ||
270 | ddr3_ecc_config(base, ecc_val); | |
271 | } | |
272 | ||
273 | void ddr3_disable_ecc(u32 base) | |
274 | { | |
275 | ddr3_ecc_config(base, 0); | |
276 | } | |
277 | ||
278 | #if defined(CONFIG_SOC_K2HK) || defined(CONFIG_SOC_K2L) | |
279 | static void cic_init(u32 base) | |
280 | { | |
281 | /* Disable CIC global interrupts */ | |
282 | __raw_writel(0, base + KS2_CIC_GLOBAL_ENABLE); | |
283 | ||
284 | /* Set to normal mode, no nesting, no priority hold */ | |
285 | __raw_writel(0, base + KS2_CIC_CTRL); | |
286 | __raw_writel(0, base + KS2_CIC_HOST_CTRL); | |
287 | ||
288 | /* Enable CIC global interrupts */ | |
289 | __raw_writel(1, base + KS2_CIC_GLOBAL_ENABLE); | |
290 | } | |
291 | ||
292 | static void cic_map_cic_to_gic(u32 base, u32 chan_num, u32 irq_num) | |
293 | { | |
294 | /* Map the system interrupt to a CIC channel */ | |
295 | __raw_writeb(chan_num, base + KS2_CIC_CHAN_MAP(0) + irq_num); | |
296 | ||
297 | /* Enable CIC system interrupt */ | |
298 | __raw_writel(irq_num, base + KS2_CIC_SYS_ENABLE_IDX_SET); | |
299 | ||
300 | /* Enable CIC Host interrupt */ | |
301 | __raw_writel(chan_num, base + KS2_CIC_HOST_ENABLE_IDX_SET); | |
302 | } | |
303 | ||
304 | static void ddr3_map_ecc_cic2_irq(u32 base) | |
305 | { | |
306 | cic_init(base); | |
307 | cic_map_cic_to_gic(base, KS2_CIC2_DDR3_ECC_CHAN_NUM, | |
308 | KS2_CIC2_DDR3_ECC_IRQ_NUM); | |
309 | } | |
310 | #endif | |
311 | ||
66c98a0c | 312 | void ddr3_init_ecc(u32 base, u32 ddr3_size) |
89f44bb0 | 313 | { |
89f44bb0 VA |
314 | if (!ddr3_ecc_support_rmw(base)) { |
315 | ddr3_disable_ecc(base); | |
316 | return; | |
317 | } | |
318 | ||
319 | ddr3_ecc_init_range(base); | |
89f44bb0 VA |
320 | ddr3_reset_data(CONFIG_SYS_SDRAM_BASE, ddr3_size); |
321 | ||
322 | /* mapping DDR3 ECC system interrupt from CIC2 to GIC */ | |
323 | #if defined(CONFIG_SOC_K2HK) || defined(CONFIG_SOC_K2L) | |
324 | ddr3_map_ecc_cic2_irq(KS2_CIC2_BASE); | |
325 | #endif | |
326 | ddr3_enable_ecc(base, 0); | |
327 | } | |
328 | ||
329 | void ddr3_check_ecc_int(u32 base) | |
330 | { | |
331 | char *env; | |
332 | int ecc_test = 0; | |
333 | u32 value = __raw_readl(base + KS2_DDR3_ECC_INT_STATUS_OFFSET); | |
334 | ||
00caae6d | 335 | env = env_get("ecc_test"); |
89f44bb0 VA |
336 | if (env) |
337 | ecc_test = simple_strtol(env, NULL, 0); | |
338 | ||
339 | if (value & KS2_DDR3_WR_ECC_ERR_SYS) | |
340 | puts("DDR3 ECC write error interrupted\n"); | |
341 | ||
342 | if (value & KS2_DDR3_2B_ECC_ERR_SYS) { | |
343 | puts("DDR3 ECC 2-bit error interrupted\n"); | |
344 | ||
345 | if (!ecc_test) { | |
346 | puts("Reseting the device ...\n"); | |
347 | reset_cpu(0); | |
348 | } | |
349 | } | |
350 | ||
351 | value = __raw_readl(base + KS2_DDR3_ONE_BIT_ECC_ERR_CNT_OFFSET); | |
352 | if (value) { | |
353 | printf("1-bit ECC err count: 0x%x\n", value); | |
354 | value = __raw_readl(base + | |
355 | KS2_DDR3_ONE_BIT_ECC_ERR_ADDR_LOG_OFFSET); | |
356 | printf("1-bit ECC err address log: 0x%x\n", value); | |
357 | } | |
358 | } | |
359 | ||
101eec50 HZ |
360 | void ddr3_reset_ddrphy(void) |
361 | { | |
362 | u32 tmp; | |
363 | ||
364 | /* Assert DDR3A PHY reset */ | |
3d315386 | 365 | tmp = readl(KS2_DDR3APLLCTL1); |
101eec50 | 366 | tmp |= KS2_DDR3_PLLCTRL_PHY_RESET; |
3d315386 | 367 | writel(tmp, KS2_DDR3APLLCTL1); |
101eec50 HZ |
368 | |
369 | /* wait 10us to catch the reset */ | |
370 | udelay(10); | |
371 | ||
372 | /* Release DDR3A PHY reset */ | |
3d315386 | 373 | tmp = readl(KS2_DDR3APLLCTL1); |
101eec50 | 374 | tmp &= ~KS2_DDR3_PLLCTRL_PHY_RESET; |
3d315386 | 375 | __raw_writel(tmp, KS2_DDR3APLLCTL1); |
101eec50 | 376 | } |
6c343825 MK |
377 | |
378 | #ifdef CONFIG_SOC_K2HK | |
379 | /** | |
380 | * ddr3_reset_workaround - reset workaround in case if leveling error | |
381 | * detected for PG 1.0 and 1.1 k2hk SoCs | |
382 | */ | |
383 | void ddr3_err_reset_workaround(void) | |
384 | { | |
385 | unsigned int tmp; | |
386 | unsigned int tmp_a; | |
387 | unsigned int tmp_b; | |
388 | ||
389 | /* | |
390 | * Check for PGSR0 error bits of DDR3 PHY. | |
391 | * Check for WLERR, QSGERR, WLAERR, | |
392 | * RDERR, WDERR, REERR, WEERR error to see if they are set or not | |
393 | */ | |
394 | tmp_a = __raw_readl(KS2_DDR3A_DDRPHYC + KS2_DDRPHY_PGSR0_OFFSET); | |
395 | tmp_b = __raw_readl(KS2_DDR3B_DDRPHYC + KS2_DDRPHY_PGSR0_OFFSET); | |
396 | ||
397 | if (((tmp_a & 0x0FE00000) != 0) || ((tmp_b & 0x0FE00000) != 0)) { | |
398 | printf("DDR Leveling Error Detected!\n"); | |
399 | printf("DDR3A PGSR0 = 0x%x\n", tmp_a); | |
400 | printf("DDR3B PGSR0 = 0x%x\n", tmp_b); | |
401 | ||
402 | /* | |
403 | * Write Keys to KICK registers to enable writes to registers | |
404 | * in boot config space | |
405 | */ | |
406 | __raw_writel(KS2_KICK0_MAGIC, KS2_KICK0); | |
407 | __raw_writel(KS2_KICK1_MAGIC, KS2_KICK1); | |
408 | ||
409 | /* | |
410 | * Move DDR3A Module out of reset isolation by setting | |
411 | * MDCTL23[12] = 0 | |
412 | */ | |
413 | tmp_a = __raw_readl(KS2_PSC_BASE + | |
414 | PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3A)); | |
415 | ||
416 | tmp_a = PSC_REG_MDCTL_SET_RESET_ISO(tmp_a, 0); | |
417 | __raw_writel(tmp_a, KS2_PSC_BASE + | |
418 | PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3A)); | |
419 | ||
420 | /* | |
421 | * Move DDR3B Module out of reset isolation by setting | |
422 | * MDCTL24[12] = 0 | |
423 | */ | |
424 | tmp_b = __raw_readl(KS2_PSC_BASE + | |
425 | PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3B)); | |
426 | tmp_b = PSC_REG_MDCTL_SET_RESET_ISO(tmp_b, 0); | |
427 | __raw_writel(tmp_b, KS2_PSC_BASE + | |
428 | PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3B)); | |
429 | ||
430 | /* | |
431 | * Write 0x5A69 Key to RSTCTRL[15:0] to unlock writes | |
432 | * to RSTCTRL and RSTCFG | |
433 | */ | |
434 | tmp = __raw_readl(KS2_RSTCTRL); | |
435 | tmp &= KS2_RSTCTRL_MASK; | |
436 | tmp |= KS2_RSTCTRL_KEY; | |
437 | __raw_writel(tmp, KS2_RSTCTRL); | |
438 | ||
439 | /* | |
440 | * Set PLL Controller to drive hard reset on SW trigger by | |
441 | * setting RSTCFG[13] = 0 | |
442 | */ | |
443 | tmp = __raw_readl(KS2_RSTCTRL_RSCFG); | |
444 | tmp &= ~KS2_RSTYPE_PLL_SOFT; | |
445 | __raw_writel(tmp, KS2_RSTCTRL_RSCFG); | |
446 | ||
447 | reset_cpu(0); | |
448 | } | |
449 | } | |
450 | #endif |