]>
Commit | Line | Data |
---|---|---|
14cf11af PM |
1 | /* |
2 | * FPU support code, moved here from head.S so that it can be used | |
3 | * by chips which use other head-whatever.S files. | |
4 | * | |
fea23bfe PM |
5 | * Copyright (C) 1995-1996 Gary Thomas ([email protected]) |
6 | * Copyright (C) 1996 Cort Dougan <[email protected]> | |
7 | * Copyright (C) 1996 Paul Mackerras. | |
8 | * Copyright (C) 1997 Dan Malek ([email protected]). | |
9 | * | |
14cf11af PM |
10 | * This program is free software; you can redistribute it and/or |
11 | * modify it under the terms of the GNU General Public License | |
12 | * as published by the Free Software Foundation; either version | |
13 | * 2 of the License, or (at your option) any later version. | |
14 | * | |
15 | */ | |
16 | ||
b3b8dc6c | 17 | #include <asm/reg.h> |
14cf11af PM |
18 | #include <asm/page.h> |
19 | #include <asm/mmu.h> | |
20 | #include <asm/pgtable.h> | |
21 | #include <asm/cputable.h> | |
22 | #include <asm/cache.h> | |
23 | #include <asm/thread_info.h> | |
24 | #include <asm/ppc_asm.h> | |
25 | #include <asm/asm-offsets.h> | |
46f52210 | 26 | #include <asm/ptrace.h> |
14cf11af | 27 | |
72ffff5b MN |
28 | #ifdef CONFIG_VSX |
29 | #define REST_32FPVSRS(n,c,base) \ | |
30 | BEGIN_FTR_SECTION \ | |
31 | b 2f; \ | |
32 | END_FTR_SECTION_IFSET(CPU_FTR_VSX); \ | |
33 | REST_32FPRS(n,base); \ | |
34 | b 3f; \ | |
35 | 2: REST_32VSRS(n,c,base); \ | |
36 | 3: | |
37 | ||
38 | #define SAVE_32FPVSRS(n,c,base) \ | |
39 | BEGIN_FTR_SECTION \ | |
40 | b 2f; \ | |
41 | END_FTR_SECTION_IFSET(CPU_FTR_VSX); \ | |
42 | SAVE_32FPRS(n,base); \ | |
43 | b 3f; \ | |
44 | 2: SAVE_32VSRS(n,c,base); \ | |
45 | 3: | |
46 | #else | |
47 | #define REST_32FPVSRS(n,b,base) REST_32FPRS(n, base) | |
48 | #define SAVE_32FPVSRS(n,b,base) SAVE_32FPRS(n, base) | |
49 | #endif | |
50 | ||
14cf11af PM |
51 | /* |
52 | * This task wants to use the FPU now. | |
53 | * On UP, disable FP for the task which had the FPU previously, | |
54 | * and save its floating-point registers in its thread_struct. | |
55 | * Load up this task's FP registers from its thread_struct, | |
56 | * enable the FPU for the current task and return to the task. | |
57 | */ | |
b85a046a | 58 | _GLOBAL(load_up_fpu) |
14cf11af PM |
59 | mfmsr r5 |
60 | ori r5,r5,MSR_FP | |
ce48b210 MN |
61 | #ifdef CONFIG_VSX |
62 | BEGIN_FTR_SECTION | |
63 | oris r5,r5,MSR_VSX@h | |
64 | END_FTR_SECTION_IFSET(CPU_FTR_VSX) | |
65 | #endif | |
14cf11af PM |
66 | SYNC |
67 | MTMSRD(r5) /* enable use of fpu now */ | |
68 | isync | |
69 | /* | |
70 | * For SMP, we don't do lazy FPU switching because it just gets too | |
71 | * horrendously complex, especially when a task switches from one CPU | |
72 | * to another. Instead we call giveup_fpu in switch_to. | |
73 | */ | |
74 | #ifndef CONFIG_SMP | |
e58c3495 | 75 | LOAD_REG_ADDRBASE(r3, last_task_used_math) |
6316222e | 76 | toreal(r3) |
e58c3495 | 77 | PPC_LL r4,ADDROFF(last_task_used_math)(r3) |
3ddfbcf1 | 78 | PPC_LCMPI 0,r4,0 |
14cf11af | 79 | beq 1f |
6316222e | 80 | toreal(r4) |
14cf11af | 81 | addi r4,r4,THREAD /* want last_task_used_math->thread */ |
ce48b210 | 82 | SAVE_32FPVSRS(0, r5, r4) |
14cf11af | 83 | mffs fr0 |
25c8a78b | 84 | stfd fr0,THREAD_FPSCR(r4) |
3ddfbcf1 | 85 | PPC_LL r5,PT_REGS(r4) |
6316222e | 86 | toreal(r5) |
3ddfbcf1 | 87 | PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5) |
14cf11af PM |
88 | li r10,MSR_FP|MSR_FE0|MSR_FE1 |
89 | andc r4,r4,r10 /* disable FP for previous task */ | |
3ddfbcf1 | 90 | PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5) |
14cf11af PM |
91 | 1: |
92 | #endif /* CONFIG_SMP */ | |
93 | /* enable use of FP after return */ | |
b85a046a | 94 | #ifdef CONFIG_PPC32 |
ee43eb78 | 95 | mfspr r5,SPRN_SPRG_THREAD /* current task's THREAD (phys) */ |
14cf11af PM |
96 | lwz r4,THREAD_FPEXC_MODE(r5) |
97 | ori r9,r9,MSR_FP /* enable FP for current */ | |
98 | or r9,r9,r4 | |
b85a046a PM |
99 | #else |
100 | ld r4,PACACURRENT(r13) | |
101 | addi r5,r4,THREAD /* Get THREAD */ | |
e2f5a3c1 | 102 | lwz r4,THREAD_FPEXC_MODE(r5) |
b85a046a PM |
103 | ori r12,r12,MSR_FP |
104 | or r12,r12,r4 | |
105 | std r12,_MSR(r1) | |
106 | #endif | |
25c8a78b | 107 | lfd fr0,THREAD_FPSCR(r5) |
3a2c48cf | 108 | MTFSF_L(fr0) |
ce48b210 | 109 | REST_32FPVSRS(0, r4, r5) |
14cf11af PM |
110 | #ifndef CONFIG_SMP |
111 | subi r4,r5,THREAD | |
6316222e | 112 | fromreal(r4) |
e58c3495 | 113 | PPC_STL r4,ADDROFF(last_task_used_math)(r3) |
14cf11af PM |
114 | #endif /* CONFIG_SMP */ |
115 | /* restore registers and return */ | |
116 | /* we haven't used ctr or xer or lr */ | |
6f3d8e69 | 117 | blr |
14cf11af | 118 | |
14cf11af PM |
119 | /* |
120 | * giveup_fpu(tsk) | |
121 | * Disable FP for the task given as the argument, | |
122 | * and save the floating-point registers in its thread_struct. | |
123 | * Enables the FPU for use in the kernel on return. | |
124 | */ | |
b85a046a | 125 | _GLOBAL(giveup_fpu) |
14cf11af PM |
126 | mfmsr r5 |
127 | ori r5,r5,MSR_FP | |
ce48b210 MN |
128 | #ifdef CONFIG_VSX |
129 | BEGIN_FTR_SECTION | |
130 | oris r5,r5,MSR_VSX@h | |
131 | END_FTR_SECTION_IFSET(CPU_FTR_VSX) | |
132 | #endif | |
14cf11af PM |
133 | SYNC_601 |
134 | ISYNC_601 | |
135 | MTMSRD(r5) /* enable use of fpu now */ | |
136 | SYNC_601 | |
137 | isync | |
3ddfbcf1 | 138 | PPC_LCMPI 0,r3,0 |
14cf11af PM |
139 | beqlr- /* if no previous owner, done */ |
140 | addi r3,r3,THREAD /* want THREAD of task */ | |
3ddfbcf1 DG |
141 | PPC_LL r5,PT_REGS(r3) |
142 | PPC_LCMPI 0,r5,0 | |
ce48b210 | 143 | SAVE_32FPVSRS(0, r4 ,r3) |
14cf11af | 144 | mffs fr0 |
25c8a78b | 145 | stfd fr0,THREAD_FPSCR(r3) |
14cf11af | 146 | beq 1f |
3ddfbcf1 | 147 | PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5) |
14cf11af | 148 | li r3,MSR_FP|MSR_FE0|MSR_FE1 |
7e875e9d MN |
149 | #ifdef CONFIG_VSX |
150 | BEGIN_FTR_SECTION | |
151 | oris r3,r3,MSR_VSX@h | |
152 | END_FTR_SECTION_IFSET(CPU_FTR_VSX) | |
153 | #endif | |
14cf11af | 154 | andc r4,r4,r3 /* disable FP for previous task */ |
3ddfbcf1 | 155 | PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5) |
14cf11af PM |
156 | 1: |
157 | #ifndef CONFIG_SMP | |
158 | li r5,0 | |
e58c3495 DG |
159 | LOAD_REG_ADDRBASE(r4,last_task_used_math) |
160 | PPC_STL r5,ADDROFF(last_task_used_math)(r4) | |
14cf11af PM |
161 | #endif /* CONFIG_SMP */ |
162 | blr | |
25c8a78b DG |
163 | |
164 | /* | |
165 | * These are used in the alignment trap handler when emulating | |
166 | * single-precision loads and stores. | |
25c8a78b DG |
167 | */ |
168 | ||
169 | _GLOBAL(cvt_fd) | |
25c8a78b DG |
170 | lfs 0,0(r3) |
171 | stfd 0,0(r4) | |
25c8a78b DG |
172 | blr |
173 | ||
174 | _GLOBAL(cvt_df) | |
25c8a78b DG |
175 | lfd 0,0(r3) |
176 | stfs 0,0(r4) | |
25c8a78b | 177 | blr |