]>
Commit | Line | Data |
---|---|---|
2874c5fd | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
14cf11af | 2 | /* |
14cf11af | 3 | * Copyright (C) 2002 Paul Mackerras, IBM Corp. |
14cf11af | 4 | */ |
39326182 | 5 | #include <linux/export.h> |
14cf11af PM |
6 | #include <asm/processor.h> |
7 | #include <asm/ppc_asm.h> | |
ec0c464c | 8 | #include <asm/asm-compat.h> |
2c86cd18 | 9 | #include <asm/feature-fixups.h> |
14cf11af | 10 | |
98c45f51 PM |
11 | #ifndef SELFTEST_CASE |
12 | /* 0 == most CPUs, 1 == POWER6, 2 == Cell */ | |
13 | #define SELFTEST_CASE 0 | |
14 | #endif | |
15 | ||
20151169 PM |
16 | #ifdef __BIG_ENDIAN__ |
17 | #define sLd sld /* Shift towards low-numbered address. */ | |
18 | #define sHd srd /* Shift towards high-numbered address. */ | |
19 | #else | |
20 | #define sLd srd /* Shift towards low-numbered address. */ | |
21 | #define sHd sld /* Shift towards high-numbered address. */ | |
22 | #endif | |
23 | ||
a7c81ce3 PM |
24 | /* |
25 | * These macros are used to generate exception table entries. | |
26 | * The exception handlers below use the original arguments | |
27 | * (stored on the stack) and the point where we're up to in | |
28 | * the destination buffer, i.e. the address of the first | |
29 | * unmodified byte. Generally r3 points into the destination | |
30 | * buffer, but the first unmodified byte is at a variable | |
31 | * offset from r3. In the code below, the symbol r3_offset | |
32 | * is set to indicate the current offset at each point in | |
33 | * the code. This offset is then used as a negative offset | |
34 | * from the exception handler code, and those instructions | |
35 | * before the exception handlers are addi instructions that | |
36 | * adjust r3 to point to the correct place. | |
37 | */ | |
38 | .macro lex /* exception handler for load */ | |
39 | 100: EX_TABLE(100b, .Lld_exc - r3_offset) | |
40 | .endm | |
41 | ||
42 | .macro stex /* exception handler for store */ | |
43 | 100: EX_TABLE(100b, .Lst_exc - r3_offset) | |
44 | .endm | |
45 | ||
14cf11af | 46 | .align 7 |
169c7cee | 47 | _GLOBAL_TOC(__copy_tofrom_user) |
15a3204d | 48 | #ifdef CONFIG_PPC_BOOK3S_64 |
a66086b8 AB |
49 | BEGIN_FTR_SECTION |
50 | nop | |
51 | FTR_SECTION_ELSE | |
52 | b __copy_tofrom_user_power7 | |
53 | ALT_FTR_SECTION_END_IFCLR(CPU_FTR_VMX_COPY) | |
15a3204d | 54 | #endif |
a66086b8 | 55 | _GLOBAL(__copy_tofrom_user_base) |
a7c81ce3 | 56 | /* first check for a 4kB copy on a 4kB boundary */ |
14cf11af PM |
57 | cmpldi cr1,r5,16 |
58 | cmpdi cr6,r5,4096 | |
59 | or r0,r3,r4 | |
60 | neg r6,r3 /* LS 3 bits = # bytes to 8-byte dest bdry */ | |
61 | andi. r0,r0,4095 | |
62 | std r3,-24(r1) | |
63 | crand cr0*4+2,cr0*4+2,cr6*4+2 | |
64 | std r4,-16(r1) | |
65 | std r5,-8(r1) | |
66 | dcbt 0,r4 | |
3c726f8d | 67 | beq .Lcopy_page_4K |
14cf11af | 68 | andi. r6,r6,7 |
694caf02 | 69 | PPC_MTOCRF(0x01,r5) |
14cf11af | 70 | blt cr1,.Lshort_copy |
a4e22f02 MN |
71 | /* Below we want to nop out the bne if we're on a CPU that has the |
72 | * CPU_FTR_UNALIGNED_LD_STD bit set and the CPU_FTR_CP_USE_DCBTZ bit | |
73 | * cleared. | |
74 | * At the time of writing the only CPU that has this combination of bits | |
75 | * set is Power6. | |
76 | */ | |
98c45f51 | 77 | test_feature = (SELFTEST_CASE == 1) |
a4e22f02 MN |
78 | BEGIN_FTR_SECTION |
79 | nop | |
80 | FTR_SECTION_ELSE | |
14cf11af | 81 | bne .Ldst_unaligned |
a4e22f02 MN |
82 | ALT_FTR_SECTION_END(CPU_FTR_UNALIGNED_LD_STD | CPU_FTR_CP_USE_DCBTZ, \ |
83 | CPU_FTR_UNALIGNED_LD_STD) | |
14cf11af | 84 | .Ldst_aligned: |
14cf11af | 85 | addi r3,r3,-16 |
a7c81ce3 | 86 | r3_offset = 16 |
98c45f51 | 87 | test_feature = (SELFTEST_CASE == 0) |
a4e22f02 MN |
88 | BEGIN_FTR_SECTION |
89 | andi. r0,r4,7 | |
14cf11af | 90 | bne .Lsrc_unaligned |
a4e22f02 | 91 | END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) |
789c299c AB |
92 | blt cr1,.Ldo_tail /* if < 16 bytes to copy */ |
93 | srdi r0,r5,5 | |
94 | cmpdi cr1,r0,0 | |
a7c81ce3 PM |
95 | lex; ld r7,0(r4) |
96 | lex; ld r6,8(r4) | |
789c299c AB |
97 | addi r4,r4,16 |
98 | mtctr r0 | |
99 | andi. r0,r5,0x10 | |
100 | beq 22f | |
101 | addi r3,r3,16 | |
a7c81ce3 | 102 | r3_offset = 0 |
789c299c AB |
103 | addi r4,r4,-16 |
104 | mr r9,r7 | |
105 | mr r8,r6 | |
106 | beq cr1,72f | |
a7c81ce3 PM |
107 | 21: |
108 | lex; ld r7,16(r4) | |
109 | lex; ld r6,24(r4) | |
789c299c | 110 | addi r4,r4,32 |
a7c81ce3 PM |
111 | stex; std r9,0(r3) |
112 | r3_offset = 8 | |
113 | stex; std r8,8(r3) | |
114 | r3_offset = 16 | |
115 | 22: | |
116 | lex; ld r9,0(r4) | |
117 | lex; ld r8,8(r4) | |
118 | stex; std r7,16(r3) | |
119 | r3_offset = 24 | |
120 | stex; std r6,24(r3) | |
789c299c | 121 | addi r3,r3,32 |
a7c81ce3 | 122 | r3_offset = 0 |
14cf11af | 123 | bdnz 21b |
a7c81ce3 PM |
124 | 72: |
125 | stex; std r9,0(r3) | |
126 | r3_offset = 8 | |
127 | stex; std r8,8(r3) | |
128 | r3_offset = 16 | |
789c299c | 129 | andi. r5,r5,0xf |
14cf11af | 130 | beq+ 3f |
789c299c | 131 | addi r4,r4,16 |
14cf11af | 132 | .Ldo_tail: |
789c299c | 133 | addi r3,r3,16 |
a7c81ce3 | 134 | r3_offset = 0 |
789c299c | 135 | bf cr7*4+0,246f |
a7c81ce3 | 136 | lex; ld r9,0(r4) |
789c299c | 137 | addi r4,r4,8 |
a7c81ce3 | 138 | stex; std r9,0(r3) |
789c299c AB |
139 | addi r3,r3,8 |
140 | 246: bf cr7*4+1,1f | |
a7c81ce3 | 141 | lex; lwz r9,0(r4) |
f72b728b | 142 | addi r4,r4,4 |
a7c81ce3 | 143 | stex; stw r9,0(r3) |
14cf11af PM |
144 | addi r3,r3,4 |
145 | 1: bf cr7*4+2,2f | |
a7c81ce3 | 146 | lex; lhz r9,0(r4) |
f72b728b | 147 | addi r4,r4,2 |
a7c81ce3 | 148 | stex; sth r9,0(r3) |
14cf11af PM |
149 | addi r3,r3,2 |
150 | 2: bf cr7*4+3,3f | |
a7c81ce3 PM |
151 | lex; lbz r9,0(r4) |
152 | stex; stb r9,0(r3) | |
14cf11af PM |
153 | 3: li r3,0 |
154 | blr | |
155 | ||
156 | .Lsrc_unaligned: | |
a7c81ce3 | 157 | r3_offset = 16 |
14cf11af PM |
158 | srdi r6,r5,3 |
159 | addi r5,r5,-16 | |
160 | subf r4,r0,r4 | |
161 | srdi r7,r5,4 | |
162 | sldi r10,r0,3 | |
163 | cmpldi cr6,r6,3 | |
164 | andi. r5,r5,7 | |
165 | mtctr r7 | |
166 | subfic r11,r10,64 | |
167 | add r5,r5,r0 | |
168 | bt cr7*4+0,28f | |
169 | ||
a7c81ce3 PM |
170 | lex; ld r9,0(r4) /* 3+2n loads, 2+2n stores */ |
171 | lex; ld r0,8(r4) | |
20151169 | 172 | sLd r6,r9,r10 |
a7c81ce3 | 173 | lex; ldu r9,16(r4) |
20151169 PM |
174 | sHd r7,r0,r11 |
175 | sLd r8,r0,r10 | |
14cf11af PM |
176 | or r7,r7,r6 |
177 | blt cr6,79f | |
a7c81ce3 | 178 | lex; ld r0,8(r4) |
14cf11af PM |
179 | b 2f |
180 | ||
a7c81ce3 PM |
181 | 28: |
182 | lex; ld r0,0(r4) /* 4+2n loads, 3+2n stores */ | |
183 | lex; ldu r9,8(r4) | |
20151169 | 184 | sLd r8,r0,r10 |
14cf11af | 185 | addi r3,r3,-8 |
a7c81ce3 | 186 | r3_offset = 24 |
14cf11af | 187 | blt cr6,5f |
a7c81ce3 | 188 | lex; ld r0,8(r4) |
20151169 PM |
189 | sHd r12,r9,r11 |
190 | sLd r6,r9,r10 | |
a7c81ce3 | 191 | lex; ldu r9,16(r4) |
14cf11af | 192 | or r12,r8,r12 |
20151169 PM |
193 | sHd r7,r0,r11 |
194 | sLd r8,r0,r10 | |
14cf11af | 195 | addi r3,r3,16 |
a7c81ce3 | 196 | r3_offset = 8 |
14cf11af PM |
197 | beq cr6,78f |
198 | ||
199 | 1: or r7,r7,r6 | |
a7c81ce3 PM |
200 | lex; ld r0,8(r4) |
201 | stex; std r12,8(r3) | |
202 | r3_offset = 16 | |
20151169 PM |
203 | 2: sHd r12,r9,r11 |
204 | sLd r6,r9,r10 | |
a7c81ce3 | 205 | lex; ldu r9,16(r4) |
14cf11af | 206 | or r12,r8,r12 |
a7c81ce3 PM |
207 | stex; stdu r7,16(r3) |
208 | r3_offset = 8 | |
20151169 PM |
209 | sHd r7,r0,r11 |
210 | sLd r8,r0,r10 | |
14cf11af PM |
211 | bdnz 1b |
212 | ||
a7c81ce3 PM |
213 | 78: |
214 | stex; std r12,8(r3) | |
215 | r3_offset = 16 | |
14cf11af | 216 | or r7,r7,r6 |
a7c81ce3 PM |
217 | 79: |
218 | stex; std r7,16(r3) | |
219 | r3_offset = 24 | |
20151169 | 220 | 5: sHd r12,r9,r11 |
14cf11af | 221 | or r12,r8,r12 |
a7c81ce3 PM |
222 | stex; std r12,24(r3) |
223 | r3_offset = 32 | |
14cf11af PM |
224 | bne 6f |
225 | li r3,0 | |
226 | blr | |
227 | 6: cmpwi cr1,r5,8 | |
228 | addi r3,r3,32 | |
a7c81ce3 | 229 | r3_offset = 0 |
20151169 | 230 | sLd r9,r9,r10 |
f72b728b | 231 | ble cr1,7f |
a7c81ce3 | 232 | lex; ld r0,8(r4) |
20151169 | 233 | sHd r7,r0,r11 |
14cf11af | 234 | or r9,r7,r9 |
f72b728b MN |
235 | 7: |
236 | bf cr7*4+1,1f | |
20151169 | 237 | #ifdef __BIG_ENDIAN__ |
f72b728b | 238 | rotldi r9,r9,32 |
20151169 | 239 | #endif |
a7c81ce3 | 240 | stex; stw r9,0(r3) |
20151169 PM |
241 | #ifdef __LITTLE_ENDIAN__ |
242 | rotrdi r9,r9,32 | |
243 | #endif | |
f72b728b MN |
244 | addi r3,r3,4 |
245 | 1: bf cr7*4+2,2f | |
20151169 | 246 | #ifdef __BIG_ENDIAN__ |
f72b728b | 247 | rotldi r9,r9,16 |
20151169 | 248 | #endif |
a7c81ce3 | 249 | stex; sth r9,0(r3) |
20151169 PM |
250 | #ifdef __LITTLE_ENDIAN__ |
251 | rotrdi r9,r9,16 | |
252 | #endif | |
f72b728b MN |
253 | addi r3,r3,2 |
254 | 2: bf cr7*4+3,3f | |
20151169 | 255 | #ifdef __BIG_ENDIAN__ |
f72b728b | 256 | rotldi r9,r9,8 |
20151169 | 257 | #endif |
a7c81ce3 | 258 | stex; stb r9,0(r3) |
20151169 PM |
259 | #ifdef __LITTLE_ENDIAN__ |
260 | rotrdi r9,r9,8 | |
261 | #endif | |
f72b728b MN |
262 | 3: li r3,0 |
263 | blr | |
14cf11af PM |
264 | |
265 | .Ldst_unaligned: | |
a7c81ce3 | 266 | r3_offset = 0 |
694caf02 | 267 | PPC_MTOCRF(0x01,r6) /* put #bytes to 8B bdry into cr7 */ |
14cf11af PM |
268 | subf r5,r6,r5 |
269 | li r7,0 | |
a4e22f02 | 270 | cmpldi cr1,r5,16 |
14cf11af | 271 | bf cr7*4+3,1f |
a7c81ce3 PM |
272 | 100: EX_TABLE(100b, .Lld_exc_r7) |
273 | lbz r0,0(r4) | |
274 | 100: EX_TABLE(100b, .Lst_exc_r7) | |
275 | stb r0,0(r3) | |
14cf11af PM |
276 | addi r7,r7,1 |
277 | 1: bf cr7*4+2,2f | |
a7c81ce3 PM |
278 | 100: EX_TABLE(100b, .Lld_exc_r7) |
279 | lhzx r0,r7,r4 | |
280 | 100: EX_TABLE(100b, .Lst_exc_r7) | |
281 | sthx r0,r7,r3 | |
14cf11af PM |
282 | addi r7,r7,2 |
283 | 2: bf cr7*4+1,3f | |
a7c81ce3 PM |
284 | 100: EX_TABLE(100b, .Lld_exc_r7) |
285 | lwzx r0,r7,r4 | |
286 | 100: EX_TABLE(100b, .Lst_exc_r7) | |
287 | stwx r0,r7,r3 | |
694caf02 | 288 | 3: PPC_MTOCRF(0x01,r5) |
14cf11af PM |
289 | add r4,r6,r4 |
290 | add r3,r6,r3 | |
291 | b .Ldst_aligned | |
292 | ||
293 | .Lshort_copy: | |
a7c81ce3 | 294 | r3_offset = 0 |
14cf11af | 295 | bf cr7*4+0,1f |
a7c81ce3 PM |
296 | lex; lwz r0,0(r4) |
297 | lex; lwz r9,4(r4) | |
14cf11af | 298 | addi r4,r4,8 |
a7c81ce3 PM |
299 | stex; stw r0,0(r3) |
300 | stex; stw r9,4(r3) | |
14cf11af PM |
301 | addi r3,r3,8 |
302 | 1: bf cr7*4+1,2f | |
a7c81ce3 | 303 | lex; lwz r0,0(r4) |
14cf11af | 304 | addi r4,r4,4 |
a7c81ce3 | 305 | stex; stw r0,0(r3) |
14cf11af PM |
306 | addi r3,r3,4 |
307 | 2: bf cr7*4+2,3f | |
a7c81ce3 | 308 | lex; lhz r0,0(r4) |
14cf11af | 309 | addi r4,r4,2 |
a7c81ce3 | 310 | stex; sth r0,0(r3) |
14cf11af PM |
311 | addi r3,r3,2 |
312 | 3: bf cr7*4+3,4f | |
a7c81ce3 PM |
313 | lex; lbz r0,0(r4) |
314 | stex; stb r0,0(r3) | |
14cf11af PM |
315 | 4: li r3,0 |
316 | blr | |
317 | ||
318 | /* | |
319 | * exception handlers follow | |
320 | * we have to return the number of bytes not copied | |
321 | * for an exception on a load, we set the rest of the destination to 0 | |
a7c81ce3 PM |
322 | * Note that the number of bytes of instructions for adjusting r3 needs |
323 | * to equal the amount of the adjustment, due to the trick of using | |
324 | * .Lld_exc - r3_offset as the handler address. | |
14cf11af PM |
325 | */ |
326 | ||
a7c81ce3 | 327 | .Lld_exc_r7: |
14cf11af | 328 | add r3,r3,r7 |
a7c81ce3 PM |
329 | b .Lld_exc |
330 | ||
331 | /* adjust by 24 */ | |
14cf11af | 332 | addi r3,r3,8 |
a7c81ce3 PM |
333 | nop |
334 | /* adjust by 16 */ | |
14cf11af | 335 | addi r3,r3,8 |
a7c81ce3 PM |
336 | nop |
337 | /* adjust by 8 */ | |
14cf11af | 338 | addi r3,r3,8 |
a7c81ce3 | 339 | nop |
14cf11af PM |
340 | |
341 | /* | |
a7c81ce3 PM |
342 | * Here we have had a fault on a load and r3 points to the first |
343 | * unmodified byte of the destination. We use the original arguments | |
344 | * and r3 to work out how much wasn't copied. Since we load some | |
345 | * distance ahead of the stores, we continue copying byte-by-byte until | |
346 | * we hit the load fault again in order to copy as much as possible. | |
14cf11af | 347 | */ |
a7c81ce3 PM |
348 | .Lld_exc: |
349 | ld r6,-24(r1) | |
14cf11af PM |
350 | ld r4,-16(r1) |
351 | ld r5,-8(r1) | |
352 | subf r6,r6,r3 | |
353 | add r4,r4,r6 | |
354 | subf r5,r6,r5 /* #bytes left to go */ | |
355 | ||
356 | /* | |
357 | * first see if we can copy any more bytes before hitting another exception | |
358 | */ | |
359 | mtctr r5 | |
a7c81ce3 PM |
360 | r3_offset = 0 |
361 | 100: EX_TABLE(100b, .Ldone) | |
14cf11af PM |
362 | 43: lbz r0,0(r4) |
363 | addi r4,r4,1 | |
a7c81ce3 | 364 | stex; stb r0,0(r3) |
14cf11af PM |
365 | addi r3,r3,1 |
366 | bdnz 43b | |
367 | li r3,0 /* huh? all copied successfully this time? */ | |
368 | blr | |
369 | ||
370 | /* | |
3448890c | 371 | * here we have trapped again, amount remaining is in ctr. |
14cf11af | 372 | */ |
a7c81ce3 PM |
373 | .Ldone: |
374 | mfctr r3 | |
14cf11af PM |
375 | blr |
376 | ||
377 | /* | |
f8db2007 PM |
378 | * exception handlers for stores: we need to work out how many bytes |
379 | * weren't copied, and we may need to copy some more. | |
a7c81ce3 PM |
380 | * Note that the number of bytes of instructions for adjusting r3 needs |
381 | * to equal the amount of the adjustment, due to the trick of using | |
382 | * .Lst_exc - r3_offset as the handler address. | |
14cf11af | 383 | */ |
a7c81ce3 | 384 | .Lst_exc_r7: |
14cf11af | 385 | add r3,r3,r7 |
a7c81ce3 PM |
386 | b .Lst_exc |
387 | ||
388 | /* adjust by 24 */ | |
14cf11af | 389 | addi r3,r3,8 |
a7c81ce3 PM |
390 | nop |
391 | /* adjust by 16 */ | |
14cf11af | 392 | addi r3,r3,8 |
a7c81ce3 PM |
393 | nop |
394 | /* adjust by 8 */ | |
14cf11af | 395 | addi r3,r3,4 |
a7c81ce3 | 396 | /* adjust by 4 */ |
14cf11af | 397 | addi r3,r3,4 |
a7c81ce3 | 398 | .Lst_exc: |
f8db2007 PM |
399 | ld r6,-24(r1) /* original destination pointer */ |
400 | ld r4,-16(r1) /* original source pointer */ | |
401 | ld r5,-8(r1) /* original number of bytes */ | |
402 | add r7,r6,r5 | |
403 | /* | |
404 | * If the destination pointer isn't 8-byte aligned, | |
405 | * we may have got the exception as a result of a | |
406 | * store that overlapped a page boundary, so we may be | |
407 | * able to copy a few more bytes. | |
408 | */ | |
409 | 17: andi. r0,r3,7 | |
410 | beq 19f | |
411 | subf r8,r6,r3 /* #bytes copied */ | |
412 | 100: EX_TABLE(100b,19f) | |
413 | lbzx r0,r8,r4 | |
414 | 100: EX_TABLE(100b,19f) | |
415 | stb r0,0(r3) | |
416 | addi r3,r3,1 | |
417 | cmpld r3,r7 | |
418 | blt 17b | |
419 | 19: subf r3,r3,r7 /* #bytes not copied in r3 */ | |
3448890c | 420 | blr |
14cf11af | 421 | |
14cf11af PM |
422 | /* |
423 | * Routine to copy a whole page of data, optimized for POWER4. | |
424 | * On POWER4 it is more than 50% faster than the simple loop | |
0f369103 | 425 | * above (following the .Ldst_aligned label). |
14cf11af | 426 | */ |
a7c81ce3 PM |
427 | .macro exc |
428 | 100: EX_TABLE(100b, .Labort) | |
429 | .endm | |
3c726f8d | 430 | .Lcopy_page_4K: |
14cf11af PM |
431 | std r31,-32(1) |
432 | std r30,-40(1) | |
433 | std r29,-48(1) | |
434 | std r28,-56(1) | |
435 | std r27,-64(1) | |
436 | std r26,-72(1) | |
437 | std r25,-80(1) | |
438 | std r24,-88(1) | |
439 | std r23,-96(1) | |
440 | std r22,-104(1) | |
441 | std r21,-112(1) | |
442 | std r20,-120(1) | |
443 | li r5,4096/32 - 1 | |
444 | addi r3,r3,-8 | |
445 | li r0,5 | |
446 | 0: addi r5,r5,-24 | |
447 | mtctr r0 | |
a7c81ce3 PM |
448 | exc; ld r22,640(4) |
449 | exc; ld r21,512(4) | |
450 | exc; ld r20,384(4) | |
451 | exc; ld r11,256(4) | |
452 | exc; ld r9,128(4) | |
453 | exc; ld r7,0(4) | |
454 | exc; ld r25,648(4) | |
455 | exc; ld r24,520(4) | |
456 | exc; ld r23,392(4) | |
457 | exc; ld r10,264(4) | |
458 | exc; ld r8,136(4) | |
459 | exc; ldu r6,8(4) | |
14cf11af PM |
460 | cmpwi r5,24 |
461 | 1: | |
a7c81ce3 PM |
462 | exc; std r22,648(3) |
463 | exc; std r21,520(3) | |
464 | exc; std r20,392(3) | |
465 | exc; std r11,264(3) | |
466 | exc; std r9,136(3) | |
467 | exc; std r7,8(3) | |
468 | exc; ld r28,648(4) | |
469 | exc; ld r27,520(4) | |
470 | exc; ld r26,392(4) | |
471 | exc; ld r31,264(4) | |
472 | exc; ld r30,136(4) | |
473 | exc; ld r29,8(4) | |
474 | exc; std r25,656(3) | |
475 | exc; std r24,528(3) | |
476 | exc; std r23,400(3) | |
477 | exc; std r10,272(3) | |
478 | exc; std r8,144(3) | |
479 | exc; std r6,16(3) | |
480 | exc; ld r22,656(4) | |
481 | exc; ld r21,528(4) | |
482 | exc; ld r20,400(4) | |
483 | exc; ld r11,272(4) | |
484 | exc; ld r9,144(4) | |
485 | exc; ld r7,16(4) | |
486 | exc; std r28,664(3) | |
487 | exc; std r27,536(3) | |
488 | exc; std r26,408(3) | |
489 | exc; std r31,280(3) | |
490 | exc; std r30,152(3) | |
491 | exc; stdu r29,24(3) | |
492 | exc; ld r25,664(4) | |
493 | exc; ld r24,536(4) | |
494 | exc; ld r23,408(4) | |
495 | exc; ld r10,280(4) | |
496 | exc; ld r8,152(4) | |
497 | exc; ldu r6,24(4) | |
14cf11af | 498 | bdnz 1b |
a7c81ce3 PM |
499 | exc; std r22,648(3) |
500 | exc; std r21,520(3) | |
501 | exc; std r20,392(3) | |
502 | exc; std r11,264(3) | |
503 | exc; std r9,136(3) | |
504 | exc; std r7,8(3) | |
505 | addi r4,r4,640 | |
506 | addi r3,r3,648 | |
14cf11af PM |
507 | bge 0b |
508 | mtctr r5 | |
a7c81ce3 PM |
509 | exc; ld r7,0(4) |
510 | exc; ld r8,8(4) | |
511 | exc; ldu r9,16(4) | |
14cf11af | 512 | 3: |
a7c81ce3 PM |
513 | exc; ld r10,8(4) |
514 | exc; std r7,8(3) | |
515 | exc; ld r7,16(4) | |
516 | exc; std r8,16(3) | |
517 | exc; ld r8,24(4) | |
518 | exc; std r9,24(3) | |
519 | exc; ldu r9,32(4) | |
520 | exc; stdu r10,32(3) | |
14cf11af PM |
521 | bdnz 3b |
522 | 4: | |
a7c81ce3 PM |
523 | exc; ld r10,8(4) |
524 | exc; std r7,8(3) | |
525 | exc; std r8,16(3) | |
526 | exc; std r9,24(3) | |
527 | exc; std r10,32(3) | |
14cf11af PM |
528 | 9: ld r20,-120(1) |
529 | ld r21,-112(1) | |
530 | ld r22,-104(1) | |
531 | ld r23,-96(1) | |
532 | ld r24,-88(1) | |
533 | ld r25,-80(1) | |
534 | ld r26,-72(1) | |
535 | ld r27,-64(1) | |
536 | ld r28,-56(1) | |
537 | ld r29,-48(1) | |
538 | ld r30,-40(1) | |
539 | ld r31,-32(1) | |
540 | li r3,0 | |
541 | blr | |
542 | ||
543 | /* | |
544 | * on an exception, reset to the beginning and jump back into the | |
545 | * standard __copy_tofrom_user | |
546 | */ | |
a7c81ce3 PM |
547 | .Labort: |
548 | ld r20,-120(1) | |
14cf11af PM |
549 | ld r21,-112(1) |
550 | ld r22,-104(1) | |
551 | ld r23,-96(1) | |
552 | ld r24,-88(1) | |
553 | ld r25,-80(1) | |
554 | ld r26,-72(1) | |
555 | ld r27,-64(1) | |
556 | ld r28,-56(1) | |
557 | ld r29,-48(1) | |
558 | ld r30,-40(1) | |
559 | ld r31,-32(1) | |
560 | ld r3,-24(r1) | |
561 | ld r4,-16(r1) | |
562 | li r5,4096 | |
563 | b .Ldst_aligned | |
9445aa1a | 564 | EXPORT_SYMBOL(__copy_tofrom_user) |