]>
Commit | Line | Data |
---|---|---|
0ae76531 DF |
1 | /* |
2 | * (C) Copyright 2013 | |
3 | * David Feng <[email protected]> | |
4 | * | |
5 | * SPDX-License-Identifier: GPL-2.0+ | |
6 | */ | |
7 | ||
8 | #include <asm-offsets.h> | |
9 | #include <config.h> | |
0ae76531 DF |
10 | #include <linux/linkage.h> |
11 | #include <asm/macro.h> | |
12 | #include <asm/armv8/mmu.h> | |
13 | ||
14 | /************************************************************************* | |
15 | * | |
16 | * Startup Code (reset vector) | |
17 | * | |
18 | *************************************************************************/ | |
19 | ||
20 | .globl _start | |
21 | _start: | |
22 | b reset | |
23 | ||
24 | .align 3 | |
25 | ||
26 | .globl _TEXT_BASE | |
27 | _TEXT_BASE: | |
28 | .quad CONFIG_SYS_TEXT_BASE | |
29 | ||
30 | /* | |
31 | * These are defined in the linker script. | |
32 | */ | |
33 | .globl _end_ofs | |
34 | _end_ofs: | |
35 | .quad _end - _start | |
36 | ||
37 | .globl _bss_start_ofs | |
38 | _bss_start_ofs: | |
39 | .quad __bss_start - _start | |
40 | ||
41 | .globl _bss_end_ofs | |
42 | _bss_end_ofs: | |
43 | .quad __bss_end - _start | |
44 | ||
45 | reset: | |
46 | /* | |
47 | * Could be EL3/EL2/EL1, Initial State: | |
48 | * Little Endian, MMU Disabled, i/dCache Disabled | |
49 | */ | |
50 | adr x0, vectors | |
51 | switch_el x1, 3f, 2f, 1f | |
1277bac0 DF |
52 | 3: msr vbar_el3, x0 |
53 | mrs x0, scr_el3 | |
c71645ad DF |
54 | orr x0, x0, #0xf /* SCR_EL3.NS|IRQ|FIQ|EA */ |
55 | msr scr_el3, x0 | |
0ae76531 DF |
56 | msr cptr_el3, xzr /* Enable FP/SIMD */ |
57 | ldr x0, =COUNTER_FREQUENCY | |
58 | msr cntfrq_el0, x0 /* Initialize CNTFRQ */ | |
59 | b 0f | |
60 | 2: msr vbar_el2, x0 | |
61 | mov x0, #0x33ff | |
62 | msr cptr_el2, x0 /* Enable FP/SIMD */ | |
63 | b 0f | |
64 | 1: msr vbar_el1, x0 | |
65 | mov x0, #3 << 20 | |
66 | msr cpacr_el1, x0 /* Enable FP/SIMD */ | |
67 | 0: | |
68 | ||
37118fb2 BS |
69 | /* Apply ARM core specific erratas */ |
70 | bl apply_core_errata | |
71 | ||
1e6ad55c YS |
72 | /* |
73 | * Cache/BPB/TLB Invalidate | |
74 | * i-cache is invalidated before enabled in icache_enable() | |
75 | * tlb is invalidated before mmu is enabled in dcache_enable() | |
76 | * d-cache is invalidated before enabled in dcache_enable() | |
77 | */ | |
0ae76531 DF |
78 | |
79 | /* Processor specific initialization */ | |
80 | bl lowlevel_init | |
81 | ||
23b5877c | 82 | #ifdef CONFIG_ARMV8_MULTIENTRY |
0ae76531 DF |
83 | branch_if_master x0, x1, master_cpu |
84 | ||
85 | /* | |
86 | * Slave CPUs | |
87 | */ | |
88 | slave_cpu: | |
89 | wfe | |
90 | ldr x1, =CPU_RELEASE_ADDR | |
91 | ldr x0, [x1] | |
92 | cbz x0, slave_cpu | |
93 | br x0 /* branch to the given address */ | |
0ae76531 | 94 | master_cpu: |
23b5877c LW |
95 | /* On the master CPU */ |
96 | #endif /* CONFIG_ARMV8_MULTIENTRY */ | |
97 | ||
0ae76531 DF |
98 | bl _main |
99 | ||
100 | /*-----------------------------------------------------------------------*/ | |
101 | ||
37118fb2 BS |
102 | WEAK(apply_core_errata) |
103 | ||
104 | mov x29, lr /* Save LR */ | |
105 | /* For now, we support Cortex-A57 specific errata only */ | |
106 | ||
107 | /* Check if we are running on a Cortex-A57 core */ | |
108 | branch_if_a57_core x0, apply_a57_core_errata | |
109 | 0: | |
110 | mov lr, x29 /* Restore LR */ | |
111 | ret | |
112 | ||
113 | apply_a57_core_errata: | |
114 | ||
115 | #ifdef CONFIG_ARM_ERRATA_828024 | |
116 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
117 | /* Disable non-allocate hint of w-b-n-a memory type */ | |
118 | mov x0, #0x1 << 49 | |
119 | /* Disable write streaming no L1-allocate threshold */ | |
120 | mov x0, #0x3 << 25 | |
121 | /* Disable write streaming no-allocate threshold */ | |
122 | mov x0, #0x3 << 27 | |
123 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ | |
124 | #endif | |
125 | ||
126 | #ifdef CONFIG_ARM_ERRATA_826974 | |
127 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
128 | /* Disable speculative load execution ahead of a DMB */ | |
129 | mov x0, #0x1 << 59 | |
130 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ | |
131 | #endif | |
132 | ||
133 | #ifdef CONFIG_ARM_ERRATA_833069 | |
134 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
135 | /* Disable Enable Invalidates of BTB bit */ | |
136 | and x0, x0, #0xE | |
137 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ | |
138 | #endif | |
139 | b 0b | |
140 | ENDPROC(apply_core_errata) | |
141 | ||
142 | /*-----------------------------------------------------------------------*/ | |
143 | ||
0ae76531 | 144 | WEAK(lowlevel_init) |
0ae76531 | 145 | mov x29, lr /* Save LR */ |
0ae76531 | 146 | |
23b5877c LW |
147 | #ifndef CONFIG_ARMV8_MULTIENTRY |
148 | /* | |
149 | * For single-entry systems the lowlevel init is very simple. | |
150 | */ | |
151 | ldr x0, =GICD_BASE | |
152 | bl gic_init_secure | |
153 | ||
154 | #else /* CONFIG_ARMV8_MULTIENTRY is set */ | |
155 | ||
c71645ad DF |
156 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) |
157 | branch_if_slave x0, 1f | |
158 | ldr x0, =GICD_BASE | |
159 | bl gic_init_secure | |
160 | 1: | |
161 | #if defined(CONFIG_GICV3) | |
162 | ldr x0, =GICR_BASE | |
163 | bl gic_init_secure_percpu | |
164 | #elif defined(CONFIG_GICV2) | |
165 | ldr x0, =GICD_BASE | |
166 | ldr x1, =GICC_BASE | |
167 | bl gic_init_secure_percpu | |
168 | #endif | |
169 | #endif | |
170 | ||
171 | branch_if_master x0, x1, 2f | |
0ae76531 DF |
172 | |
173 | /* | |
174 | * Slave should wait for master clearing spin table. | |
175 | * This sync prevent salves observing incorrect | |
176 | * value of spin table and jumping to wrong place. | |
177 | */ | |
c71645ad DF |
178 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) |
179 | #ifdef CONFIG_GICV2 | |
180 | ldr x0, =GICC_BASE | |
181 | #endif | |
182 | bl gic_wait_for_interrupt | |
183 | #endif | |
0ae76531 DF |
184 | |
185 | /* | |
c71645ad | 186 | * All slaves will enter EL2 and optionally EL1. |
0ae76531 DF |
187 | */ |
188 | bl armv8_switch_to_el2 | |
189 | #ifdef CONFIG_ARMV8_SWITCH_TO_EL1 | |
190 | bl armv8_switch_to_el1 | |
191 | #endif | |
192 | ||
23b5877c LW |
193 | #endif /* CONFIG_ARMV8_MULTIENTRY */ |
194 | ||
c71645ad | 195 | 2: |
0ae76531 DF |
196 | mov lr, x29 /* Restore LR */ |
197 | ret | |
198 | ENDPROC(lowlevel_init) | |
199 | ||
c71645ad DF |
200 | WEAK(smp_kick_all_cpus) |
201 | /* Kick secondary cpus up by SGI 0 interrupt */ | |
202 | mov x29, lr /* Save LR */ | |
203 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) | |
204 | ldr x0, =GICD_BASE | |
205 | bl gic_kick_secondary_cpus | |
206 | #endif | |
207 | mov lr, x29 /* Restore LR */ | |
208 | ret | |
209 | ENDPROC(smp_kick_all_cpus) | |
210 | ||
0ae76531 DF |
211 | /*-----------------------------------------------------------------------*/ |
212 | ||
213 | ENTRY(c_runtime_cpu_setup) | |
0ae76531 DF |
214 | /* Relocate vBAR */ |
215 | adr x0, vectors | |
216 | switch_el x1, 3f, 2f, 1f | |
217 | 3: msr vbar_el3, x0 | |
218 | b 0f | |
219 | 2: msr vbar_el2, x0 | |
220 | b 0f | |
221 | 1: msr vbar_el1, x0 | |
222 | 0: | |
223 | ||
224 | ret | |
225 | ENDPROC(c_runtime_cpu_setup) |