]>
Commit | Line | Data |
---|---|---|
9b07773f | 1 | /* |
678e008c | 2 | * armboot - Startup Code for ARM1176 CPU-core |
9b07773f GL |
3 | * |
4 | * Copyright (c) 2007 Samsung Electronics | |
5 | * | |
6 | * Copyright (C) 2008 | |
7 | * Guennadi Liakhovetki, DENX Software Engineering, <[email protected]> | |
8 | * | |
9 | * See file CREDITS for list of people who contributed to this | |
10 | * project. | |
11 | * | |
12 | * This program is free software; you can redistribute it and/or | |
13 | * modify it under the terms of the GNU General Public License as | |
14 | * published by the Free Software Foundation; either version 2 of | |
15 | * the License, or (at your option) any later version. | |
16 | * | |
17 | * This program is distributed in the hope that it will be useful, | |
18 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
19 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
20 | * GNU General Public License for more details. | |
21 | * | |
22 | * You should have received a copy of the GNU General Public License | |
23 | * along with this program; if not, write to the Free Software | |
24 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, | |
25 | * MA 02111-1307 USA | |
26 | * | |
27 | * 2007-09-21 - Restructured codes by jsgood ([email protected]) | |
28 | * 2007-09-21 - Added MoviNAND and OneNAND boot codes by | |
29 | * jsgood ([email protected]) | |
30 | * Base codes by scsuh (sc.suh) | |
31 | */ | |
32 | ||
25ddd1fb | 33 | #include <asm-offsets.h> |
9b07773f GL |
34 | #include <config.h> |
35 | #include <version.h> | |
36 | #ifdef CONFIG_ENABLE_MMU | |
37 | #include <asm/proc/domain.h> | |
38 | #endif | |
9b07773f | 39 | |
6d0f6bcf JCPV |
40 | #if !defined(CONFIG_ENABLE_MMU) && !defined(CONFIG_SYS_PHY_UBOOT_BASE) |
41 | #define CONFIG_SYS_PHY_UBOOT_BASE CONFIG_SYS_UBOOT_BASE | |
9b07773f GL |
42 | #endif |
43 | ||
44 | /* | |
45 | ************************************************************************* | |
46 | * | |
47 | * Jump vector table as in table 3.1 in [1] | |
48 | * | |
49 | ************************************************************************* | |
50 | */ | |
51 | ||
52 | .globl _start | |
53 | _start: b reset | |
54 | #ifndef CONFIG_NAND_SPL | |
55 | ldr pc, _undefined_instruction | |
56 | ldr pc, _software_interrupt | |
57 | ldr pc, _prefetch_abort | |
58 | ldr pc, _data_abort | |
59 | ldr pc, _not_used | |
60 | ldr pc, _irq | |
61 | ldr pc, _fiq | |
62 | ||
63 | _undefined_instruction: | |
64 | .word undefined_instruction | |
65 | _software_interrupt: | |
66 | .word software_interrupt | |
67 | _prefetch_abort: | |
68 | .word prefetch_abort | |
69 | _data_abort: | |
70 | .word data_abort | |
71 | _not_used: | |
72 | .word not_used | |
73 | _irq: | |
74 | .word irq | |
75 | _fiq: | |
76 | .word fiq | |
77 | _pad: | |
78 | .word 0x12345678 /* now 16*4=64 */ | |
79 | #else | |
80 | . = _start + 64 | |
81 | #endif | |
82 | ||
83 | .global _end_vect | |
84 | _end_vect: | |
85 | .balignl 16,0xdeadbeef | |
86 | /* | |
87 | ************************************************************************* | |
88 | * | |
89 | * Startup Code (reset vector) | |
90 | * | |
91 | * do important init only if we don't start from memory! | |
92 | * setup Memory and board specific bits prior to relocation. | |
93 | * relocate armboot to ram | |
94 | * setup stack | |
95 | * | |
96 | ************************************************************************* | |
97 | */ | |
98 | ||
a51dd67a | 99 | .globl _TEXT_BASE |
9b07773f | 100 | _TEXT_BASE: |
14d0a02a | 101 | .word CONFIG_SYS_TEXT_BASE |
9b07773f GL |
102 | |
103 | /* | |
104 | * Below variable is very important because we use MMU in U-Boot. | |
105 | * Without it, we cannot run code correctly before MMU is ON. | |
106 | * by scsuh. | |
107 | */ | |
108 | _TEXT_PHY_BASE: | |
6d0f6bcf | 109 | .word CONFIG_SYS_PHY_UBOOT_BASE |
9b07773f | 110 | |
9b07773f GL |
111 | /* |
112 | * These are defined in the board-specific linker script. | |
ea34c9d6 DA |
113 | * Subtracting _start from them lets the linker put their |
114 | * relative position in the executable instead of leaving | |
115 | * them null. | |
9b07773f | 116 | */ |
9b07773f | 117 | |
ea34c9d6 DA |
118 | .globl _bss_start_ofs |
119 | _bss_start_ofs: | |
120 | .word __bss_start - _start | |
9b07773f | 121 | |
ea34c9d6 DA |
122 | .globl _bss_end_ofs |
123 | _bss_end_ofs: | |
44c6e659 | 124 | .word __bss_end__ - _start |
a51dd67a | 125 | |
f326cbba PYC |
126 | .globl _end_ofs |
127 | _end_ofs: | |
128 | .word _end - _start | |
129 | ||
ea34c9d6 DA |
130 | /* IRQ stack memory (calculated at run-time) + 8 bytes */ |
131 | .globl IRQ_STACK_START_IN | |
132 | IRQ_STACK_START_IN: | |
133 | .word 0x0badc0de | |
a51dd67a HS |
134 | |
135 | /* | |
136 | * the actual reset code | |
137 | */ | |
138 | ||
139 | reset: | |
140 | /* | |
141 | * set the cpu to SVC32 mode | |
142 | */ | |
143 | mrs r0, cpsr | |
144 | bic r0, r0, #0x3f | |
145 | orr r0, r0, #0xd3 | |
146 | msr cpsr, r0 | |
147 | ||
148 | /* | |
149 | ************************************************************************* | |
150 | * | |
151 | * CPU_init_critical registers | |
152 | * | |
153 | * setup important registers | |
154 | * setup memory timing | |
155 | * | |
156 | ************************************************************************* | |
157 | */ | |
158 | /* | |
159 | * we do sys-critical inits only at reboot, | |
160 | * not when booting from ram! | |
161 | */ | |
162 | cpu_init_crit: | |
163 | /* | |
164 | * When booting from NAND - it has definitely been a reset, so, no need | |
165 | * to flush caches and disable the MMU | |
166 | */ | |
167 | #ifndef CONFIG_NAND_SPL | |
168 | /* | |
169 | * flush v4 I/D caches | |
170 | */ | |
171 | mov r0, #0 | |
172 | mcr p15, 0, r0, c7, c7, 0 /* flush v3/v4 cache */ | |
173 | mcr p15, 0, r0, c8, c7, 0 /* flush v4 TLB */ | |
174 | ||
175 | /* | |
176 | * disable MMU stuff and caches | |
177 | */ | |
178 | mrc p15, 0, r0, c1, c0, 0 | |
179 | bic r0, r0, #0x00002300 @ clear bits 13, 9:8 (--V- --RS) | |
180 | bic r0, r0, #0x00000087 @ clear bits 7, 2:0 (B--- -CAM) | |
181 | orr r0, r0, #0x00000002 @ set bit 2 (A) Align | |
182 | orr r0, r0, #0x00001000 @ set bit 12 (I) I-Cache | |
183 | ||
184 | /* Prepare to disable the MMU */ | |
185 | adr r2, mmu_disable_phys | |
14d0a02a | 186 | sub r2, r2, #(CONFIG_SYS_PHY_UBOOT_BASE - CONFIG_SYS_TEXT_BASE) |
a51dd67a HS |
187 | b mmu_disable |
188 | ||
189 | .align 5 | |
190 | /* Run in a single cache-line */ | |
191 | mmu_disable: | |
192 | mcr p15, 0, r0, c1, c0, 0 | |
193 | nop | |
194 | nop | |
195 | mov pc, r2 | |
196 | mmu_disable_phys: | |
197 | ||
198 | #ifdef CONFIG_DISABLE_TCM | |
199 | /* | |
200 | * Disable the TCMs | |
201 | */ | |
202 | mrc p15, 0, r0, c0, c0, 2 /* Return TCM details */ | |
203 | cmp r0, #0 | |
204 | beq skip_tcmdisable | |
205 | mov r1, #0 | |
206 | mov r2, #1 | |
207 | tst r0, r2 | |
208 | mcrne p15, 0, r1, c9, c1, 1 /* Disable Instruction TCM if present*/ | |
209 | tst r0, r2, LSL #16 | |
210 | mcrne p15, 0, r1, c9, c1, 0 /* Disable Data TCM if present*/ | |
211 | skip_tcmdisable: | |
212 | #endif | |
213 | #endif | |
214 | ||
215 | #ifdef CONFIG_PERIPORT_REMAP | |
216 | /* Peri port setup */ | |
217 | ldr r0, =CONFIG_PERIPORT_BASE | |
218 | orr r0, r0, #CONFIG_PERIPORT_SIZE | |
219 | mcr p15,0,r0,c15,c2,4 | |
220 | #endif | |
221 | ||
222 | /* | |
223 | * Go setup Memory and board specific bits prior to relocation. | |
224 | */ | |
225 | bl lowlevel_init /* go setup pll,mux,memory */ | |
226 | ||
e05e5de7 | 227 | bl _main |
a51dd67a HS |
228 | |
229 | /*------------------------------------------------------------------------------*/ | |
230 | ||
231 | /* | |
232 | * void relocate_code (addr_sp, gd, addr_moni) | |
233 | * | |
234 | * This "function" does not return, instead it continues in RAM | |
235 | * after relocating the monitor code. | |
236 | * | |
237 | */ | |
238 | .globl relocate_code | |
239 | relocate_code: | |
240 | mov r4, r0 /* save addr_sp */ | |
241 | mov r5, r1 /* save addr of gd */ | |
242 | mov r6, r2 /* save addr of destination */ | |
a51dd67a | 243 | |
a51dd67a | 244 | adr r0, _start |
a1a47d3c | 245 | cmp r0, r6 |
76abfa57 | 246 | moveq r9, #0 /* no relocation. relocation offset(r9) = 0 */ |
e05e5de7 | 247 | beq relocate_done /* skip relocation */ |
a78fb68f | 248 | mov r1, r6 /* r1 <- scratch for copy_loop */ |
ea34c9d6 DA |
249 | ldr r3, _bss_start_ofs |
250 | add r2, r0, r3 /* r2 <- source end address */ | |
a51dd67a | 251 | |
a51dd67a HS |
252 | copy_loop: |
253 | ldmia r0!, {r9-r10} /* copy from source address [r0] */ | |
a78fb68f | 254 | stmia r1!, {r9-r10} /* copy to target address [r1] */ |
da90d4ce AA |
255 | cmp r0, r2 /* until source end address [r2] */ |
256 | blo copy_loop | |
a51dd67a | 257 | |
401bb30b | 258 | #ifndef CONFIG_SPL_BUILD |
ea34c9d6 DA |
259 | /* |
260 | * fix .rel.dyn relocations | |
261 | */ | |
262 | ldr r0, _TEXT_BASE /* r0 <- Text base */ | |
a78fb68f | 263 | sub r9, r6, r0 /* r9 <- relocation offset */ |
ea34c9d6 DA |
264 | ldr r10, _dynsym_start_ofs /* r10 <- sym table ofs */ |
265 | add r10, r10, r0 /* r10 <- sym table in FLASH */ | |
266 | ldr r2, _rel_dyn_start_ofs /* r2 <- rel dyn start ofs */ | |
267 | add r2, r2, r0 /* r2 <- rel dyn start in FLASH */ | |
268 | ldr r3, _rel_dyn_end_ofs /* r3 <- rel dyn end ofs */ | |
269 | add r3, r3, r0 /* r3 <- rel dyn end in FLASH */ | |
a51dd67a | 270 | fixloop: |
e42a7dff AA |
271 | ldr r0, [r2] /* r0 <- location to fix up, IN FLASH! */ |
272 | add r0, r0, r9 /* r0 <- location to fix up in RAM */ | |
ea34c9d6 | 273 | ldr r1, [r2, #4] |
1f52d89f AB |
274 | and r7, r1, #0xff |
275 | cmp r7, #23 /* relative fixup? */ | |
ea34c9d6 | 276 | beq fixrel |
1f52d89f | 277 | cmp r7, #2 /* absolute fixup? */ |
ea34c9d6 DA |
278 | beq fixabs |
279 | /* ignore unknown type of fixup */ | |
280 | b fixnext | |
281 | fixabs: | |
282 | /* absolute fix: set location to (offset) symbol value */ | |
283 | mov r1, r1, LSR #4 /* r1 <- symbol index in .dynsym */ | |
284 | add r1, r10, r1 /* r1 <- address of symbol in table */ | |
285 | ldr r1, [r1, #4] /* r1 <- symbol value */ | |
3600945b | 286 | add r1, r1, r9 /* r1 <- relocated sym addr */ |
ea34c9d6 DA |
287 | b fixnext |
288 | fixrel: | |
289 | /* relative fix: increase location by offset */ | |
290 | ldr r1, [r0] | |
291 | add r1, r1, r9 | |
292 | fixnext: | |
293 | str r1, [r0] | |
e42a7dff | 294 | add r2, r2, #8 /* each rel.dyn entry is 8 bytes */ |
a51dd67a | 295 | cmp r2, r3 |
ea34c9d6 | 296 | blo fixloop |
a51dd67a | 297 | #endif |
a51dd67a HS |
298 | |
299 | #ifdef CONFIG_ENABLE_MMU | |
300 | enable_mmu: | |
301 | /* enable domain access */ | |
302 | ldr r5, =0x0000ffff | |
303 | mcr p15, 0, r5, c3, c0, 0 /* load domain access register */ | |
304 | ||
305 | /* Set the TTB register */ | |
306 | ldr r0, _mmu_table_base | |
307 | ldr r1, =CONFIG_SYS_PHY_UBOOT_BASE | |
308 | ldr r2, =0xfff00000 | |
309 | bic r0, r0, r2 | |
310 | orr r1, r0, r1 | |
311 | mcr p15, 0, r1, c2, c0, 0 | |
312 | ||
313 | /* Enable the MMU */ | |
314 | mrc p15, 0, r0, c1, c0, 0 | |
315 | orr r0, r0, #1 /* Set CR_M to enable MMU */ | |
316 | ||
317 | /* Prepare to enable the MMU */ | |
318 | adr r1, skip_hw_init | |
319 | and r1, r1, #0x3fc | |
320 | ldr r2, _TEXT_BASE | |
321 | ldr r3, =0xfff00000 | |
322 | and r2, r2, r3 | |
323 | orr r2, r2, r1 | |
324 | b mmu_enable | |
325 | ||
326 | .align 5 | |
327 | /* Run in a single cache-line */ | |
328 | mmu_enable: | |
329 | ||
330 | mcr p15, 0, r0, c1, c0, 0 | |
331 | nop | |
332 | nop | |
333 | mov pc, r2 | |
334 | skip_hw_init: | |
335 | #endif | |
336 | ||
e05e5de7 | 337 | relocate_done: |
a51dd67a | 338 | |
e05e5de7 | 339 | bx lr |
a51dd67a | 340 | |
e42a7dff AA |
341 | _rel_dyn_start_ofs: |
342 | .word __rel_dyn_start - _start | |
343 | _rel_dyn_end_ofs: | |
344 | .word __rel_dyn_end - _start | |
345 | _dynsym_start_ofs: | |
346 | .word __dynsym_start - _start | |
347 | ||
9b07773f GL |
348 | #ifdef CONFIG_ENABLE_MMU |
349 | _mmu_table_base: | |
350 | .word mmu_table | |
351 | #endif | |
352 | ||
e05e5de7 AA |
353 | .globl c_runtime_cpu_setup |
354 | c_runtime_cpu_setup: | |
355 | ||
356 | mov pc, lr | |
357 | ||
9b07773f GL |
358 | #ifndef CONFIG_NAND_SPL |
359 | /* | |
360 | * we assume that cache operation is done before. (eg. cleanup_before_linux()) | |
361 | * actually, we don't need to do anything about cache if not use d-cache in | |
362 | * U-Boot. So, in this function we clean only MMU. by scsuh | |
363 | * | |
364 | * void theLastJump(void *kernel, int arch_num, uint boot_params); | |
365 | */ | |
366 | #ifdef CONFIG_ENABLE_MMU | |
367 | .globl theLastJump | |
368 | theLastJump: | |
369 | mov r9, r0 | |
370 | ldr r3, =0xfff00000 | |
371 | ldr r4, _TEXT_PHY_BASE | |
372 | adr r5, phy_last_jump | |
373 | bic r5, r5, r3 | |
374 | orr r5, r5, r4 | |
375 | mov pc, r5 | |
376 | phy_last_jump: | |
377 | /* | |
378 | * disable MMU stuff | |
379 | */ | |
380 | mrc p15, 0, r0, c1, c0, 0 | |
381 | bic r0, r0, #0x00002300 /* clear bits 13, 9:8 (--V- --RS) */ | |
382 | bic r0, r0, #0x00000087 /* clear bits 7, 2:0 (B--- -CAM) */ | |
383 | orr r0, r0, #0x00000002 /* set bit 2 (A) Align */ | |
384 | orr r0, r0, #0x00001000 /* set bit 12 (I) I-Cache */ | |
385 | mcr p15, 0, r0, c1, c0, 0 | |
386 | ||
387 | mcr p15, 0, r0, c8, c7, 0 /* flush v4 TLB */ | |
388 | ||
389 | mov r0, #0 | |
390 | mov pc, r9 | |
391 | #endif | |
678e008c CC |
392 | |
393 | ||
9b07773f GL |
394 | /* |
395 | ************************************************************************* | |
396 | * | |
397 | * Interrupt handling | |
398 | * | |
399 | ************************************************************************* | |
400 | */ | |
401 | @ | |
402 | @ IRQ stack frame. | |
403 | @ | |
404 | #define S_FRAME_SIZE 72 | |
405 | ||
406 | #define S_OLD_R0 68 | |
407 | #define S_PSR 64 | |
408 | #define S_PC 60 | |
409 | #define S_LR 56 | |
410 | #define S_SP 52 | |
411 | ||
412 | #define S_IP 48 | |
413 | #define S_FP 44 | |
414 | #define S_R10 40 | |
415 | #define S_R9 36 | |
416 | #define S_R8 32 | |
417 | #define S_R7 28 | |
418 | #define S_R6 24 | |
419 | #define S_R5 20 | |
420 | #define S_R4 16 | |
421 | #define S_R3 12 | |
422 | #define S_R2 8 | |
423 | #define S_R1 4 | |
424 | #define S_R0 0 | |
425 | ||
426 | #define MODE_SVC 0x13 | |
427 | #define I_BIT 0x80 | |
428 | ||
429 | /* | |
430 | * use bad_save_user_regs for abort/prefetch/undef/swi ... | |
431 | */ | |
432 | ||
433 | .macro bad_save_user_regs | |
434 | /* carve out a frame on current user stack */ | |
435 | sub sp, sp, #S_FRAME_SIZE | |
436 | /* Save user registers (now in svc mode) r0-r12 */ | |
437 | stmia sp, {r0 - r12} | |
438 | ||
a51dd67a | 439 | ldr r2, IRQ_STACK_START_IN |
9b07773f GL |
440 | /* get values for "aborted" pc and cpsr (into parm regs) */ |
441 | ldmia r2, {r2 - r3} | |
442 | /* grab pointer to old stack */ | |
443 | add r0, sp, #S_FRAME_SIZE | |
444 | ||
445 | add r5, sp, #S_SP | |
446 | mov r1, lr | |
447 | /* save sp_SVC, lr_SVC, pc, cpsr */ | |
448 | stmia r5, {r0 - r3} | |
449 | /* save current stack into r0 (param register) */ | |
450 | mov r0, sp | |
451 | .endm | |
452 | ||
453 | .macro get_bad_stack | |
a51dd67a | 454 | ldr r13, IRQ_STACK_START_IN @ setup our mode stack |
9b07773f GL |
455 | |
456 | /* save caller lr in position 0 of saved stack */ | |
457 | str lr, [r13] | |
458 | /* get the spsr */ | |
459 | mrs lr, spsr | |
460 | /* save spsr in position 1 of saved stack */ | |
461 | str lr, [r13, #4] | |
462 | ||
463 | /* prepare SVC-Mode */ | |
464 | mov r13, #MODE_SVC | |
465 | @ msr spsr_c, r13 | |
466 | /* switch modes, make sure moves will execute */ | |
467 | msr spsr, r13 | |
468 | /* capture return pc */ | |
469 | mov lr, pc | |
470 | /* jump to next instruction & switch modes. */ | |
471 | movs pc, lr | |
472 | .endm | |
473 | ||
474 | .macro get_bad_stack_swi | |
475 | /* space on current stack for scratch reg. */ | |
476 | sub r13, r13, #4 | |
477 | /* save R0's value. */ | |
478 | str r0, [r13] | |
a51dd67a | 479 | ldr r13, IRQ_STACK_START_IN @ setup our mode stack |
9b07773f GL |
480 | /* save caller lr in position 0 of saved stack */ |
481 | str lr, [r0] | |
482 | /* get the spsr */ | |
483 | mrs r0, spsr | |
484 | /* save spsr in position 1 of saved stack */ | |
485 | str lr, [r0, #4] | |
486 | /* restore r0 */ | |
487 | ldr r0, [r13] | |
488 | /* pop stack entry */ | |
489 | add r13, r13, #4 | |
490 | .endm | |
491 | ||
492 | /* | |
493 | * exception handlers | |
494 | */ | |
495 | .align 5 | |
496 | undefined_instruction: | |
497 | get_bad_stack | |
498 | bad_save_user_regs | |
499 | bl do_undefined_instruction | |
500 | ||
501 | .align 5 | |
502 | software_interrupt: | |
503 | get_bad_stack_swi | |
504 | bad_save_user_regs | |
505 | bl do_software_interrupt | |
506 | ||
507 | .align 5 | |
508 | prefetch_abort: | |
509 | get_bad_stack | |
510 | bad_save_user_regs | |
511 | bl do_prefetch_abort | |
512 | ||
513 | .align 5 | |
514 | data_abort: | |
515 | get_bad_stack | |
516 | bad_save_user_regs | |
517 | bl do_data_abort | |
518 | ||
519 | .align 5 | |
520 | not_used: | |
521 | get_bad_stack | |
522 | bad_save_user_regs | |
523 | bl do_not_used | |
524 | ||
525 | .align 5 | |
526 | irq: | |
527 | get_bad_stack | |
528 | bad_save_user_regs | |
529 | bl do_irq | |
530 | ||
531 | .align 5 | |
532 | fiq: | |
533 | get_bad_stack | |
534 | bad_save_user_regs | |
535 | bl do_fiq | |
536 | #endif /* CONFIG_NAND_SPL */ |