]>
Commit | Line | Data |
---|---|---|
3f50dbc1 | 1 | /* |
1da177e4 LT |
2 | * User address space access functions. |
3 | * The non inlined parts of asm-i386/uaccess.h are here. | |
4 | * | |
5 | * Copyright 1997 Andi Kleen <[email protected]> | |
6 | * Copyright 1997 Linus Torvalds | |
7 | */ | |
1da177e4 LT |
8 | #include <linux/mm.h> |
9 | #include <linux/highmem.h> | |
10 | #include <linux/blkdev.h> | |
11 | #include <linux/module.h> | |
3fcfab16 | 12 | #include <linux/backing-dev.h> |
b6a8b316 | 13 | #include <linux/interrupt.h> |
1da177e4 LT |
14 | #include <asm/uaccess.h> |
15 | #include <asm/mmx.h> | |
9c675128 | 16 | #include <asm/asm.h> |
1da177e4 | 17 | |
8bfcb396 TP |
18 | #ifdef CONFIG_X86_INTEL_USERCOPY |
19 | /* | |
20 | * Alignment at which movsl is preferred for bulk memory copies. | |
21 | */ | |
22 | struct movsl_mask movsl_mask __read_mostly; | |
23 | #endif | |
24 | ||
1da177e4 LT |
25 | static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n) |
26 | { | |
27 | #ifdef CONFIG_X86_INTEL_USERCOPY | |
28 | if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask)) | |
29 | return 0; | |
30 | #endif | |
31 | return 1; | |
32 | } | |
3f50dbc1 PC |
33 | #define movsl_is_ok(a1, a2, n) \ |
34 | __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n)) | |
1da177e4 | 35 | |
1da177e4 LT |
36 | /* |
37 | * Zero Userspace | |
38 | */ | |
39 | ||
40 | #define __do_clear_user(addr,size) \ | |
41 | do { \ | |
42 | int __d0; \ | |
3ee1afa3 | 43 | might_fault(); \ |
3f50dbc1 | 44 | __asm__ __volatile__( \ |
63bcff2a | 45 | ASM_STAC "\n" \ |
1da177e4 LT |
46 | "0: rep; stosl\n" \ |
47 | " movl %2,%0\n" \ | |
48 | "1: rep; stosb\n" \ | |
63bcff2a | 49 | "2: " ASM_CLAC "\n" \ |
1da177e4 LT |
50 | ".section .fixup,\"ax\"\n" \ |
51 | "3: lea 0(%2,%0,4),%0\n" \ | |
52 | " jmp 2b\n" \ | |
53 | ".previous\n" \ | |
28777441 PA |
54 | _ASM_EXTABLE(0b,3b) \ |
55 | _ASM_EXTABLE(1b,2b) \ | |
1da177e4 LT |
56 | : "=&c"(size), "=&D" (__d0) \ |
57 | : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \ | |
58 | } while (0) | |
59 | ||
60 | /** | |
61 | * clear_user: - Zero a block of memory in user space. | |
62 | * @to: Destination address, in user space. | |
63 | * @n: Number of bytes to zero. | |
64 | * | |
65 | * Zero a block of memory in user space. | |
66 | * | |
67 | * Returns number of bytes that could not be cleared. | |
68 | * On success, this will be zero. | |
69 | */ | |
70 | unsigned long | |
71 | clear_user(void __user *to, unsigned long n) | |
72 | { | |
1d18ef48 | 73 | might_fault(); |
1da177e4 LT |
74 | if (access_ok(VERIFY_WRITE, to, n)) |
75 | __do_clear_user(to, n); | |
76 | return n; | |
77 | } | |
129f6946 | 78 | EXPORT_SYMBOL(clear_user); |
1da177e4 LT |
79 | |
80 | /** | |
81 | * __clear_user: - Zero a block of memory in user space, with less checking. | |
82 | * @to: Destination address, in user space. | |
83 | * @n: Number of bytes to zero. | |
84 | * | |
85 | * Zero a block of memory in user space. Caller must check | |
86 | * the specified block with access_ok() before calling this function. | |
87 | * | |
88 | * Returns number of bytes that could not be cleared. | |
89 | * On success, this will be zero. | |
90 | */ | |
91 | unsigned long | |
92 | __clear_user(void __user *to, unsigned long n) | |
93 | { | |
94 | __do_clear_user(to, n); | |
95 | return n; | |
96 | } | |
129f6946 | 97 | EXPORT_SYMBOL(__clear_user); |
1da177e4 | 98 | |
1da177e4 LT |
99 | #ifdef CONFIG_X86_INTEL_USERCOPY |
100 | static unsigned long | |
101 | __copy_user_intel(void __user *to, const void *from, unsigned long size) | |
102 | { | |
103 | int d0, d1; | |
104 | __asm__ __volatile__( | |
105 | " .align 2,0x90\n" | |
106 | "1: movl 32(%4), %%eax\n" | |
107 | " cmpl $67, %0\n" | |
108 | " jbe 3f\n" | |
109 | "2: movl 64(%4), %%eax\n" | |
110 | " .align 2,0x90\n" | |
111 | "3: movl 0(%4), %%eax\n" | |
112 | "4: movl 4(%4), %%edx\n" | |
113 | "5: movl %%eax, 0(%3)\n" | |
114 | "6: movl %%edx, 4(%3)\n" | |
115 | "7: movl 8(%4), %%eax\n" | |
116 | "8: movl 12(%4),%%edx\n" | |
117 | "9: movl %%eax, 8(%3)\n" | |
118 | "10: movl %%edx, 12(%3)\n" | |
119 | "11: movl 16(%4), %%eax\n" | |
120 | "12: movl 20(%4), %%edx\n" | |
121 | "13: movl %%eax, 16(%3)\n" | |
122 | "14: movl %%edx, 20(%3)\n" | |
123 | "15: movl 24(%4), %%eax\n" | |
124 | "16: movl 28(%4), %%edx\n" | |
125 | "17: movl %%eax, 24(%3)\n" | |
126 | "18: movl %%edx, 28(%3)\n" | |
127 | "19: movl 32(%4), %%eax\n" | |
128 | "20: movl 36(%4), %%edx\n" | |
129 | "21: movl %%eax, 32(%3)\n" | |
130 | "22: movl %%edx, 36(%3)\n" | |
131 | "23: movl 40(%4), %%eax\n" | |
132 | "24: movl 44(%4), %%edx\n" | |
133 | "25: movl %%eax, 40(%3)\n" | |
134 | "26: movl %%edx, 44(%3)\n" | |
135 | "27: movl 48(%4), %%eax\n" | |
136 | "28: movl 52(%4), %%edx\n" | |
137 | "29: movl %%eax, 48(%3)\n" | |
138 | "30: movl %%edx, 52(%3)\n" | |
139 | "31: movl 56(%4), %%eax\n" | |
140 | "32: movl 60(%4), %%edx\n" | |
141 | "33: movl %%eax, 56(%3)\n" | |
142 | "34: movl %%edx, 60(%3)\n" | |
143 | " addl $-64, %0\n" | |
144 | " addl $64, %4\n" | |
145 | " addl $64, %3\n" | |
146 | " cmpl $63, %0\n" | |
147 | " ja 1b\n" | |
148 | "35: movl %0, %%eax\n" | |
149 | " shrl $2, %0\n" | |
150 | " andl $3, %%eax\n" | |
151 | " cld\n" | |
152 | "99: rep; movsl\n" | |
153 | "36: movl %%eax, %0\n" | |
154 | "37: rep; movsb\n" | |
155 | "100:\n" | |
156 | ".section .fixup,\"ax\"\n" | |
157 | "101: lea 0(%%eax,%0,4),%0\n" | |
158 | " jmp 100b\n" | |
159 | ".previous\n" | |
9c675128 PA |
160 | _ASM_EXTABLE(1b,100b) |
161 | _ASM_EXTABLE(2b,100b) | |
162 | _ASM_EXTABLE(3b,100b) | |
163 | _ASM_EXTABLE(4b,100b) | |
164 | _ASM_EXTABLE(5b,100b) | |
165 | _ASM_EXTABLE(6b,100b) | |
166 | _ASM_EXTABLE(7b,100b) | |
167 | _ASM_EXTABLE(8b,100b) | |
168 | _ASM_EXTABLE(9b,100b) | |
169 | _ASM_EXTABLE(10b,100b) | |
170 | _ASM_EXTABLE(11b,100b) | |
171 | _ASM_EXTABLE(12b,100b) | |
172 | _ASM_EXTABLE(13b,100b) | |
173 | _ASM_EXTABLE(14b,100b) | |
174 | _ASM_EXTABLE(15b,100b) | |
175 | _ASM_EXTABLE(16b,100b) | |
176 | _ASM_EXTABLE(17b,100b) | |
177 | _ASM_EXTABLE(18b,100b) | |
178 | _ASM_EXTABLE(19b,100b) | |
179 | _ASM_EXTABLE(20b,100b) | |
180 | _ASM_EXTABLE(21b,100b) | |
181 | _ASM_EXTABLE(22b,100b) | |
182 | _ASM_EXTABLE(23b,100b) | |
183 | _ASM_EXTABLE(24b,100b) | |
184 | _ASM_EXTABLE(25b,100b) | |
185 | _ASM_EXTABLE(26b,100b) | |
186 | _ASM_EXTABLE(27b,100b) | |
187 | _ASM_EXTABLE(28b,100b) | |
188 | _ASM_EXTABLE(29b,100b) | |
189 | _ASM_EXTABLE(30b,100b) | |
190 | _ASM_EXTABLE(31b,100b) | |
191 | _ASM_EXTABLE(32b,100b) | |
192 | _ASM_EXTABLE(33b,100b) | |
193 | _ASM_EXTABLE(34b,100b) | |
194 | _ASM_EXTABLE(35b,100b) | |
195 | _ASM_EXTABLE(36b,100b) | |
196 | _ASM_EXTABLE(37b,100b) | |
197 | _ASM_EXTABLE(99b,101b) | |
1da177e4 LT |
198 | : "=&c"(size), "=&D" (d0), "=&S" (d1) |
199 | : "1"(to), "2"(from), "0"(size) | |
200 | : "eax", "edx", "memory"); | |
201 | return size; | |
202 | } | |
203 | ||
204 | static unsigned long | |
205 | __copy_user_zeroing_intel(void *to, const void __user *from, unsigned long size) | |
206 | { | |
207 | int d0, d1; | |
208 | __asm__ __volatile__( | |
209 | " .align 2,0x90\n" | |
210 | "0: movl 32(%4), %%eax\n" | |
3f50dbc1 PC |
211 | " cmpl $67, %0\n" |
212 | " jbe 2f\n" | |
1da177e4 | 213 | "1: movl 64(%4), %%eax\n" |
3f50dbc1 PC |
214 | " .align 2,0x90\n" |
215 | "2: movl 0(%4), %%eax\n" | |
216 | "21: movl 4(%4), %%edx\n" | |
217 | " movl %%eax, 0(%3)\n" | |
218 | " movl %%edx, 4(%3)\n" | |
219 | "3: movl 8(%4), %%eax\n" | |
220 | "31: movl 12(%4),%%edx\n" | |
221 | " movl %%eax, 8(%3)\n" | |
1da177e4 LT |
222 | " movl %%edx, 12(%3)\n" |
223 | "4: movl 16(%4), %%eax\n" | |
224 | "41: movl 20(%4), %%edx\n" | |
225 | " movl %%eax, 16(%3)\n" | |
226 | " movl %%edx, 20(%3)\n" | |
227 | "10: movl 24(%4), %%eax\n" | |
228 | "51: movl 28(%4), %%edx\n" | |
229 | " movl %%eax, 24(%3)\n" | |
230 | " movl %%edx, 28(%3)\n" | |
231 | "11: movl 32(%4), %%eax\n" | |
232 | "61: movl 36(%4), %%edx\n" | |
233 | " movl %%eax, 32(%3)\n" | |
234 | " movl %%edx, 36(%3)\n" | |
235 | "12: movl 40(%4), %%eax\n" | |
236 | "71: movl 44(%4), %%edx\n" | |
237 | " movl %%eax, 40(%3)\n" | |
238 | " movl %%edx, 44(%3)\n" | |
239 | "13: movl 48(%4), %%eax\n" | |
240 | "81: movl 52(%4), %%edx\n" | |
241 | " movl %%eax, 48(%3)\n" | |
242 | " movl %%edx, 52(%3)\n" | |
243 | "14: movl 56(%4), %%eax\n" | |
244 | "91: movl 60(%4), %%edx\n" | |
245 | " movl %%eax, 56(%3)\n" | |
246 | " movl %%edx, 60(%3)\n" | |
3f50dbc1 PC |
247 | " addl $-64, %0\n" |
248 | " addl $64, %4\n" | |
249 | " addl $64, %3\n" | |
250 | " cmpl $63, %0\n" | |
251 | " ja 0b\n" | |
252 | "5: movl %0, %%eax\n" | |
253 | " shrl $2, %0\n" | |
254 | " andl $3, %%eax\n" | |
255 | " cld\n" | |
256 | "6: rep; movsl\n" | |
1da177e4 | 257 | " movl %%eax,%0\n" |
3f50dbc1 PC |
258 | "7: rep; movsb\n" |
259 | "8:\n" | |
1da177e4 | 260 | ".section .fixup,\"ax\"\n" |
3f50dbc1 PC |
261 | "9: lea 0(%%eax,%0,4),%0\n" |
262 | "16: pushl %0\n" | |
263 | " pushl %%eax\n" | |
1da177e4 | 264 | " xorl %%eax,%%eax\n" |
3f50dbc1 PC |
265 | " rep; stosb\n" |
266 | " popl %%eax\n" | |
267 | " popl %0\n" | |
268 | " jmp 8b\n" | |
269 | ".previous\n" | |
9c675128 PA |
270 | _ASM_EXTABLE(0b,16b) |
271 | _ASM_EXTABLE(1b,16b) | |
272 | _ASM_EXTABLE(2b,16b) | |
273 | _ASM_EXTABLE(21b,16b) | |
274 | _ASM_EXTABLE(3b,16b) | |
275 | _ASM_EXTABLE(31b,16b) | |
276 | _ASM_EXTABLE(4b,16b) | |
277 | _ASM_EXTABLE(41b,16b) | |
278 | _ASM_EXTABLE(10b,16b) | |
279 | _ASM_EXTABLE(51b,16b) | |
280 | _ASM_EXTABLE(11b,16b) | |
281 | _ASM_EXTABLE(61b,16b) | |
282 | _ASM_EXTABLE(12b,16b) | |
283 | _ASM_EXTABLE(71b,16b) | |
284 | _ASM_EXTABLE(13b,16b) | |
285 | _ASM_EXTABLE(81b,16b) | |
286 | _ASM_EXTABLE(14b,16b) | |
287 | _ASM_EXTABLE(91b,16b) | |
288 | _ASM_EXTABLE(6b,9b) | |
289 | _ASM_EXTABLE(7b,16b) | |
1da177e4 LT |
290 | : "=&c"(size), "=&D" (d0), "=&S" (d1) |
291 | : "1"(to), "2"(from), "0"(size) | |
292 | : "eax", "edx", "memory"); | |
293 | return size; | |
294 | } | |
c22ce143 HY |
295 | |
296 | /* | |
297 | * Non Temporal Hint version of __copy_user_zeroing_intel. It is cache aware. | |
298 | * [email protected] | |
299 | */ | |
300 | ||
301 | static unsigned long __copy_user_zeroing_intel_nocache(void *to, | |
302 | const void __user *from, unsigned long size) | |
303 | { | |
3f50dbc1 | 304 | int d0, d1; |
c22ce143 HY |
305 | |
306 | __asm__ __volatile__( | |
307 | " .align 2,0x90\n" | |
308 | "0: movl 32(%4), %%eax\n" | |
309 | " cmpl $67, %0\n" | |
310 | " jbe 2f\n" | |
311 | "1: movl 64(%4), %%eax\n" | |
312 | " .align 2,0x90\n" | |
313 | "2: movl 0(%4), %%eax\n" | |
314 | "21: movl 4(%4), %%edx\n" | |
315 | " movnti %%eax, 0(%3)\n" | |
316 | " movnti %%edx, 4(%3)\n" | |
317 | "3: movl 8(%4), %%eax\n" | |
318 | "31: movl 12(%4),%%edx\n" | |
319 | " movnti %%eax, 8(%3)\n" | |
320 | " movnti %%edx, 12(%3)\n" | |
321 | "4: movl 16(%4), %%eax\n" | |
322 | "41: movl 20(%4), %%edx\n" | |
323 | " movnti %%eax, 16(%3)\n" | |
324 | " movnti %%edx, 20(%3)\n" | |
325 | "10: movl 24(%4), %%eax\n" | |
326 | "51: movl 28(%4), %%edx\n" | |
327 | " movnti %%eax, 24(%3)\n" | |
328 | " movnti %%edx, 28(%3)\n" | |
329 | "11: movl 32(%4), %%eax\n" | |
330 | "61: movl 36(%4), %%edx\n" | |
331 | " movnti %%eax, 32(%3)\n" | |
332 | " movnti %%edx, 36(%3)\n" | |
333 | "12: movl 40(%4), %%eax\n" | |
334 | "71: movl 44(%4), %%edx\n" | |
335 | " movnti %%eax, 40(%3)\n" | |
336 | " movnti %%edx, 44(%3)\n" | |
337 | "13: movl 48(%4), %%eax\n" | |
338 | "81: movl 52(%4), %%edx\n" | |
339 | " movnti %%eax, 48(%3)\n" | |
340 | " movnti %%edx, 52(%3)\n" | |
341 | "14: movl 56(%4), %%eax\n" | |
342 | "91: movl 60(%4), %%edx\n" | |
343 | " movnti %%eax, 56(%3)\n" | |
344 | " movnti %%edx, 60(%3)\n" | |
345 | " addl $-64, %0\n" | |
346 | " addl $64, %4\n" | |
347 | " addl $64, %3\n" | |
348 | " cmpl $63, %0\n" | |
349 | " ja 0b\n" | |
350 | " sfence \n" | |
351 | "5: movl %0, %%eax\n" | |
352 | " shrl $2, %0\n" | |
353 | " andl $3, %%eax\n" | |
354 | " cld\n" | |
355 | "6: rep; movsl\n" | |
356 | " movl %%eax,%0\n" | |
357 | "7: rep; movsb\n" | |
358 | "8:\n" | |
359 | ".section .fixup,\"ax\"\n" | |
360 | "9: lea 0(%%eax,%0,4),%0\n" | |
361 | "16: pushl %0\n" | |
362 | " pushl %%eax\n" | |
363 | " xorl %%eax,%%eax\n" | |
364 | " rep; stosb\n" | |
365 | " popl %%eax\n" | |
366 | " popl %0\n" | |
367 | " jmp 8b\n" | |
368 | ".previous\n" | |
9c675128 PA |
369 | _ASM_EXTABLE(0b,16b) |
370 | _ASM_EXTABLE(1b,16b) | |
371 | _ASM_EXTABLE(2b,16b) | |
372 | _ASM_EXTABLE(21b,16b) | |
373 | _ASM_EXTABLE(3b,16b) | |
374 | _ASM_EXTABLE(31b,16b) | |
375 | _ASM_EXTABLE(4b,16b) | |
376 | _ASM_EXTABLE(41b,16b) | |
377 | _ASM_EXTABLE(10b,16b) | |
378 | _ASM_EXTABLE(51b,16b) | |
379 | _ASM_EXTABLE(11b,16b) | |
380 | _ASM_EXTABLE(61b,16b) | |
381 | _ASM_EXTABLE(12b,16b) | |
382 | _ASM_EXTABLE(71b,16b) | |
383 | _ASM_EXTABLE(13b,16b) | |
384 | _ASM_EXTABLE(81b,16b) | |
385 | _ASM_EXTABLE(14b,16b) | |
386 | _ASM_EXTABLE(91b,16b) | |
387 | _ASM_EXTABLE(6b,9b) | |
388 | _ASM_EXTABLE(7b,16b) | |
c22ce143 HY |
389 | : "=&c"(size), "=&D" (d0), "=&S" (d1) |
390 | : "1"(to), "2"(from), "0"(size) | |
391 | : "eax", "edx", "memory"); | |
392 | return size; | |
393 | } | |
394 | ||
7c12d811 N |
395 | static unsigned long __copy_user_intel_nocache(void *to, |
396 | const void __user *from, unsigned long size) | |
397 | { | |
3f50dbc1 | 398 | int d0, d1; |
7c12d811 N |
399 | |
400 | __asm__ __volatile__( | |
401 | " .align 2,0x90\n" | |
402 | "0: movl 32(%4), %%eax\n" | |
403 | " cmpl $67, %0\n" | |
404 | " jbe 2f\n" | |
405 | "1: movl 64(%4), %%eax\n" | |
406 | " .align 2,0x90\n" | |
407 | "2: movl 0(%4), %%eax\n" | |
408 | "21: movl 4(%4), %%edx\n" | |
409 | " movnti %%eax, 0(%3)\n" | |
410 | " movnti %%edx, 4(%3)\n" | |
411 | "3: movl 8(%4), %%eax\n" | |
412 | "31: movl 12(%4),%%edx\n" | |
413 | " movnti %%eax, 8(%3)\n" | |
414 | " movnti %%edx, 12(%3)\n" | |
415 | "4: movl 16(%4), %%eax\n" | |
416 | "41: movl 20(%4), %%edx\n" | |
417 | " movnti %%eax, 16(%3)\n" | |
418 | " movnti %%edx, 20(%3)\n" | |
419 | "10: movl 24(%4), %%eax\n" | |
420 | "51: movl 28(%4), %%edx\n" | |
421 | " movnti %%eax, 24(%3)\n" | |
422 | " movnti %%edx, 28(%3)\n" | |
423 | "11: movl 32(%4), %%eax\n" | |
424 | "61: movl 36(%4), %%edx\n" | |
425 | " movnti %%eax, 32(%3)\n" | |
426 | " movnti %%edx, 36(%3)\n" | |
427 | "12: movl 40(%4), %%eax\n" | |
428 | "71: movl 44(%4), %%edx\n" | |
429 | " movnti %%eax, 40(%3)\n" | |
430 | " movnti %%edx, 44(%3)\n" | |
431 | "13: movl 48(%4), %%eax\n" | |
432 | "81: movl 52(%4), %%edx\n" | |
433 | " movnti %%eax, 48(%3)\n" | |
434 | " movnti %%edx, 52(%3)\n" | |
435 | "14: movl 56(%4), %%eax\n" | |
436 | "91: movl 60(%4), %%edx\n" | |
437 | " movnti %%eax, 56(%3)\n" | |
438 | " movnti %%edx, 60(%3)\n" | |
439 | " addl $-64, %0\n" | |
440 | " addl $64, %4\n" | |
441 | " addl $64, %3\n" | |
442 | " cmpl $63, %0\n" | |
443 | " ja 0b\n" | |
444 | " sfence \n" | |
445 | "5: movl %0, %%eax\n" | |
446 | " shrl $2, %0\n" | |
447 | " andl $3, %%eax\n" | |
448 | " cld\n" | |
449 | "6: rep; movsl\n" | |
450 | " movl %%eax,%0\n" | |
451 | "7: rep; movsb\n" | |
452 | "8:\n" | |
453 | ".section .fixup,\"ax\"\n" | |
454 | "9: lea 0(%%eax,%0,4),%0\n" | |
455 | "16: jmp 8b\n" | |
456 | ".previous\n" | |
9c675128 PA |
457 | _ASM_EXTABLE(0b,16b) |
458 | _ASM_EXTABLE(1b,16b) | |
459 | _ASM_EXTABLE(2b,16b) | |
460 | _ASM_EXTABLE(21b,16b) | |
461 | _ASM_EXTABLE(3b,16b) | |
462 | _ASM_EXTABLE(31b,16b) | |
463 | _ASM_EXTABLE(4b,16b) | |
464 | _ASM_EXTABLE(41b,16b) | |
465 | _ASM_EXTABLE(10b,16b) | |
466 | _ASM_EXTABLE(51b,16b) | |
467 | _ASM_EXTABLE(11b,16b) | |
468 | _ASM_EXTABLE(61b,16b) | |
469 | _ASM_EXTABLE(12b,16b) | |
470 | _ASM_EXTABLE(71b,16b) | |
471 | _ASM_EXTABLE(13b,16b) | |
472 | _ASM_EXTABLE(81b,16b) | |
473 | _ASM_EXTABLE(14b,16b) | |
474 | _ASM_EXTABLE(91b,16b) | |
475 | _ASM_EXTABLE(6b,9b) | |
476 | _ASM_EXTABLE(7b,16b) | |
7c12d811 N |
477 | : "=&c"(size), "=&D" (d0), "=&S" (d1) |
478 | : "1"(to), "2"(from), "0"(size) | |
479 | : "eax", "edx", "memory"); | |
480 | return size; | |
481 | } | |
482 | ||
1da177e4 | 483 | #else |
c22ce143 | 484 | |
1da177e4 LT |
485 | /* |
486 | * Leave these declared but undefined. They should not be any references to | |
487 | * them | |
488 | */ | |
c22ce143 HY |
489 | unsigned long __copy_user_zeroing_intel(void *to, const void __user *from, |
490 | unsigned long size); | |
491 | unsigned long __copy_user_intel(void __user *to, const void *from, | |
492 | unsigned long size); | |
493 | unsigned long __copy_user_zeroing_intel_nocache(void *to, | |
494 | const void __user *from, unsigned long size); | |
1da177e4 LT |
495 | #endif /* CONFIG_X86_INTEL_USERCOPY */ |
496 | ||
497 | /* Generic arbitrary sized copy. */ | |
3f50dbc1 | 498 | #define __copy_user(to, from, size) \ |
1da177e4 LT |
499 | do { \ |
500 | int __d0, __d1, __d2; \ | |
501 | __asm__ __volatile__( \ | |
502 | " cmp $7,%0\n" \ | |
503 | " jbe 1f\n" \ | |
504 | " movl %1,%0\n" \ | |
505 | " negl %0\n" \ | |
506 | " andl $7,%0\n" \ | |
507 | " subl %0,%3\n" \ | |
508 | "4: rep; movsb\n" \ | |
509 | " movl %3,%0\n" \ | |
510 | " shrl $2,%0\n" \ | |
511 | " andl $3,%3\n" \ | |
512 | " .align 2,0x90\n" \ | |
513 | "0: rep; movsl\n" \ | |
514 | " movl %3,%0\n" \ | |
515 | "1: rep; movsb\n" \ | |
516 | "2:\n" \ | |
517 | ".section .fixup,\"ax\"\n" \ | |
518 | "5: addl %3,%0\n" \ | |
519 | " jmp 2b\n" \ | |
520 | "3: lea 0(%3,%0,4),%0\n" \ | |
521 | " jmp 2b\n" \ | |
522 | ".previous\n" \ | |
9c675128 PA |
523 | _ASM_EXTABLE(4b,5b) \ |
524 | _ASM_EXTABLE(0b,3b) \ | |
525 | _ASM_EXTABLE(1b,2b) \ | |
1da177e4 LT |
526 | : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \ |
527 | : "3"(size), "0"(size), "1"(to), "2"(from) \ | |
528 | : "memory"); \ | |
529 | } while (0) | |
530 | ||
3f50dbc1 | 531 | #define __copy_user_zeroing(to, from, size) \ |
1da177e4 LT |
532 | do { \ |
533 | int __d0, __d1, __d2; \ | |
534 | __asm__ __volatile__( \ | |
535 | " cmp $7,%0\n" \ | |
536 | " jbe 1f\n" \ | |
537 | " movl %1,%0\n" \ | |
538 | " negl %0\n" \ | |
539 | " andl $7,%0\n" \ | |
540 | " subl %0,%3\n" \ | |
541 | "4: rep; movsb\n" \ | |
542 | " movl %3,%0\n" \ | |
543 | " shrl $2,%0\n" \ | |
544 | " andl $3,%3\n" \ | |
545 | " .align 2,0x90\n" \ | |
546 | "0: rep; movsl\n" \ | |
547 | " movl %3,%0\n" \ | |
548 | "1: rep; movsb\n" \ | |
549 | "2:\n" \ | |
550 | ".section .fixup,\"ax\"\n" \ | |
551 | "5: addl %3,%0\n" \ | |
552 | " jmp 6f\n" \ | |
553 | "3: lea 0(%3,%0,4),%0\n" \ | |
554 | "6: pushl %0\n" \ | |
555 | " pushl %%eax\n" \ | |
556 | " xorl %%eax,%%eax\n" \ | |
557 | " rep; stosb\n" \ | |
558 | " popl %%eax\n" \ | |
559 | " popl %0\n" \ | |
560 | " jmp 2b\n" \ | |
561 | ".previous\n" \ | |
9c675128 PA |
562 | _ASM_EXTABLE(4b,5b) \ |
563 | _ASM_EXTABLE(0b,3b) \ | |
564 | _ASM_EXTABLE(1b,6b) \ | |
1da177e4 LT |
565 | : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \ |
566 | : "3"(size), "0"(size), "1"(to), "2"(from) \ | |
567 | : "memory"); \ | |
568 | } while (0) | |
569 | ||
c22ce143 HY |
570 | unsigned long __copy_to_user_ll(void __user *to, const void *from, |
571 | unsigned long n) | |
1da177e4 | 572 | { |
63bcff2a | 573 | stac(); |
1da177e4 LT |
574 | if (movsl_is_ok(to, from, n)) |
575 | __copy_user(to, from, n); | |
576 | else | |
577 | n = __copy_user_intel(to, from, n); | |
63bcff2a | 578 | clac(); |
1da177e4 LT |
579 | return n; |
580 | } | |
129f6946 | 581 | EXPORT_SYMBOL(__copy_to_user_ll); |
1da177e4 | 582 | |
c22ce143 HY |
583 | unsigned long __copy_from_user_ll(void *to, const void __user *from, |
584 | unsigned long n) | |
1da177e4 | 585 | { |
63bcff2a | 586 | stac(); |
1da177e4 LT |
587 | if (movsl_is_ok(to, from, n)) |
588 | __copy_user_zeroing(to, from, n); | |
589 | else | |
590 | n = __copy_user_zeroing_intel(to, from, n); | |
63bcff2a | 591 | clac(); |
1da177e4 LT |
592 | return n; |
593 | } | |
129f6946 | 594 | EXPORT_SYMBOL(__copy_from_user_ll); |
1da177e4 | 595 | |
7c12d811 N |
596 | unsigned long __copy_from_user_ll_nozero(void *to, const void __user *from, |
597 | unsigned long n) | |
598 | { | |
63bcff2a | 599 | stac(); |
7c12d811 N |
600 | if (movsl_is_ok(to, from, n)) |
601 | __copy_user(to, from, n); | |
602 | else | |
603 | n = __copy_user_intel((void __user *)to, | |
604 | (const void *)from, n); | |
63bcff2a | 605 | clac(); |
7c12d811 N |
606 | return n; |
607 | } | |
608 | EXPORT_SYMBOL(__copy_from_user_ll_nozero); | |
609 | ||
c22ce143 HY |
610 | unsigned long __copy_from_user_ll_nocache(void *to, const void __user *from, |
611 | unsigned long n) | |
612 | { | |
63bcff2a | 613 | stac(); |
c22ce143 | 614 | #ifdef CONFIG_X86_INTEL_USERCOPY |
3f50dbc1 PC |
615 | if (n > 64 && cpu_has_xmm2) |
616 | n = __copy_user_zeroing_intel_nocache(to, from, n); | |
c22ce143 HY |
617 | else |
618 | __copy_user_zeroing(to, from, n); | |
619 | #else | |
3f50dbc1 | 620 | __copy_user_zeroing(to, from, n); |
c22ce143 | 621 | #endif |
63bcff2a | 622 | clac(); |
c22ce143 HY |
623 | return n; |
624 | } | |
914c8269 | 625 | EXPORT_SYMBOL(__copy_from_user_ll_nocache); |
c22ce143 | 626 | |
7c12d811 N |
627 | unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from, |
628 | unsigned long n) | |
629 | { | |
63bcff2a | 630 | stac(); |
7c12d811 | 631 | #ifdef CONFIG_X86_INTEL_USERCOPY |
3f50dbc1 PC |
632 | if (n > 64 && cpu_has_xmm2) |
633 | n = __copy_user_intel_nocache(to, from, n); | |
7c12d811 N |
634 | else |
635 | __copy_user(to, from, n); | |
636 | #else | |
3f50dbc1 | 637 | __copy_user(to, from, n); |
7c12d811 | 638 | #endif |
63bcff2a | 639 | clac(); |
7c12d811 N |
640 | return n; |
641 | } | |
914c8269 | 642 | EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero); |
7c12d811 | 643 | |
1da177e4 LT |
644 | /** |
645 | * copy_to_user: - Copy a block of data into user space. | |
646 | * @to: Destination address, in user space. | |
647 | * @from: Source address, in kernel space. | |
648 | * @n: Number of bytes to copy. | |
649 | * | |
b3c395ef DH |
650 | * Context: User context only. This function may sleep if pagefaults are |
651 | * enabled. | |
1da177e4 LT |
652 | * |
653 | * Copy data from kernel space to user space. | |
654 | * | |
655 | * Returns number of bytes that could not be copied. | |
656 | * On success, this will be zero. | |
657 | */ | |
7a3d9b0f | 658 | unsigned long _copy_to_user(void __user *to, const void *from, unsigned n) |
1da177e4 | 659 | { |
1da177e4 LT |
660 | if (access_ok(VERIFY_WRITE, to, n)) |
661 | n = __copy_to_user(to, from, n); | |
662 | return n; | |
663 | } | |
7a3d9b0f | 664 | EXPORT_SYMBOL(_copy_to_user); |
1da177e4 LT |
665 | |
666 | /** | |
667 | * copy_from_user: - Copy a block of data from user space. | |
668 | * @to: Destination address, in kernel space. | |
669 | * @from: Source address, in user space. | |
670 | * @n: Number of bytes to copy. | |
671 | * | |
b3c395ef DH |
672 | * Context: User context only. This function may sleep if pagefaults are |
673 | * enabled. | |
1da177e4 LT |
674 | * |
675 | * Copy data from user space to kernel space. | |
676 | * | |
677 | * Returns number of bytes that could not be copied. | |
678 | * On success, this will be zero. | |
679 | * | |
680 | * If some data could not be copied, this function will pad the copied | |
681 | * data to the requested size using zero bytes. | |
682 | */ | |
3df7b41a | 683 | unsigned long _copy_from_user(void *to, const void __user *from, unsigned n) |
1da177e4 | 684 | { |
1da177e4 LT |
685 | if (access_ok(VERIFY_READ, from, n)) |
686 | n = __copy_from_user(to, from, n); | |
687 | else | |
688 | memset(to, 0, n); | |
689 | return n; | |
690 | } | |
9f0cf4ad | 691 | EXPORT_SYMBOL(_copy_from_user); |