]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
102fa15c | 6 | * Copyright (c) 1994 - 1997, 99, 2000, 06, 07 Ralf Baechle ([email protected]) |
1da177e4 LT |
7 | * Copyright (c) 1999, 2000 Silicon Graphics, Inc. |
8 | */ | |
9 | #ifndef _ASM_BITOPS_H | |
10 | #define _ASM_BITOPS_H | |
11 | ||
1da177e4 | 12 | #include <linux/compiler.h> |
4ffd8b38 | 13 | #include <linux/irqflags.h> |
1da177e4 | 14 | #include <linux/types.h> |
0004a9df | 15 | #include <asm/barrier.h> |
ec917c2c | 16 | #include <asm/bug.h> |
1da177e4 LT |
17 | #include <asm/byteorder.h> /* sigh ... */ |
18 | #include <asm/cpu-features.h> | |
4ffd8b38 RB |
19 | #include <asm/sgidefs.h> |
20 | #include <asm/war.h> | |
1da177e4 LT |
21 | |
22 | #if (_MIPS_SZLONG == 32) | |
23 | #define SZLONG_LOG 5 | |
24 | #define SZLONG_MASK 31UL | |
aac8aa77 MR |
25 | #define __LL "ll " |
26 | #define __SC "sc " | |
102fa15c RB |
27 | #define __INS "ins " |
28 | #define __EXT "ext " | |
1da177e4 LT |
29 | #elif (_MIPS_SZLONG == 64) |
30 | #define SZLONG_LOG 6 | |
31 | #define SZLONG_MASK 63UL | |
aac8aa77 MR |
32 | #define __LL "lld " |
33 | #define __SC "scd " | |
102fa15c RB |
34 | #define __INS "dins " |
35 | #define __EXT "dext " | |
1da177e4 LT |
36 | #endif |
37 | ||
1da177e4 LT |
38 | /* |
39 | * clear_bit() doesn't provide any barrier for the compiler. | |
40 | */ | |
17099b11 RB |
41 | #define smp_mb__before_clear_bit() smp_llsc_mb() |
42 | #define smp_mb__after_clear_bit() smp_llsc_mb() | |
1da177e4 | 43 | |
1da177e4 LT |
44 | /* |
45 | * set_bit - Atomically set a bit in memory | |
46 | * @nr: the bit to set | |
47 | * @addr: the address to start counting from | |
48 | * | |
49 | * This function is atomic and may not be reordered. See __set_bit() | |
50 | * if you do not require the atomic guarantees. | |
51 | * Note that @nr may be almost arbitrarily large; this function is not | |
52 | * restricted to acting on a single-word quantity. | |
53 | */ | |
54 | static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |
55 | { | |
56 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
b961153b | 57 | unsigned short bit = nr & SZLONG_MASK; |
1da177e4 LT |
58 | unsigned long temp; |
59 | ||
60 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
61 | __asm__ __volatile__( | |
c4559f67 | 62 | " .set mips3 \n" |
1da177e4 LT |
63 | "1: " __LL "%0, %1 # set_bit \n" |
64 | " or %0, %2 \n" | |
aac8aa77 | 65 | " " __SC "%0, %1 \n" |
1da177e4 | 66 | " beqzl %0, 1b \n" |
aac8aa77 | 67 | " .set mips0 \n" |
1da177e4 | 68 | : "=&r" (temp), "=m" (*m) |
b961153b | 69 | : "ir" (1UL << bit), "m" (*m)); |
102fa15c | 70 | #ifdef CONFIG_CPU_MIPSR2 |
b961153b | 71 | } else if (__builtin_constant_p(bit)) { |
102fa15c RB |
72 | __asm__ __volatile__( |
73 | "1: " __LL "%0, %1 # set_bit \n" | |
74 | " " __INS "%0, %4, %2, 1 \n" | |
75 | " " __SC "%0, %1 \n" | |
76 | " beqz %0, 2f \n" | |
77 | " .subsection 2 \n" | |
78 | "2: b 1b \n" | |
79 | " .previous \n" | |
80 | : "=&r" (temp), "=m" (*m) | |
b961153b | 81 | : "ir" (bit), "m" (*m), "r" (~0)); |
102fa15c | 82 | #endif /* CONFIG_CPU_MIPSR2 */ |
1da177e4 LT |
83 | } else if (cpu_has_llsc) { |
84 | __asm__ __volatile__( | |
c4559f67 | 85 | " .set mips3 \n" |
1da177e4 LT |
86 | "1: " __LL "%0, %1 # set_bit \n" |
87 | " or %0, %2 \n" | |
aac8aa77 | 88 | " " __SC "%0, %1 \n" |
f65e4fa8 RB |
89 | " beqz %0, 2f \n" |
90 | " .subsection 2 \n" | |
91 | "2: b 1b \n" | |
92 | " .previous \n" | |
aac8aa77 | 93 | " .set mips0 \n" |
1da177e4 | 94 | : "=&r" (temp), "=m" (*m) |
b961153b | 95 | : "ir" (1UL << bit), "m" (*m)); |
1da177e4 LT |
96 | } else { |
97 | volatile unsigned long *a = addr; | |
98 | unsigned long mask; | |
4ffd8b38 | 99 | unsigned long flags; |
1da177e4 LT |
100 | |
101 | a += nr >> SZLONG_LOG; | |
b961153b | 102 | mask = 1UL << bit; |
49edd098 | 103 | raw_local_irq_save(flags); |
1da177e4 | 104 | *a |= mask; |
49edd098 | 105 | raw_local_irq_restore(flags); |
1da177e4 LT |
106 | } |
107 | } | |
108 | ||
1da177e4 LT |
109 | /* |
110 | * clear_bit - Clears a bit in memory | |
111 | * @nr: Bit to clear | |
112 | * @addr: Address to start counting from | |
113 | * | |
114 | * clear_bit() is atomic and may not be reordered. However, it does | |
115 | * not contain a memory barrier, so if it is used for locking purposes, | |
116 | * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() | |
117 | * in order to ensure changes are visible on other processors. | |
118 | */ | |
119 | static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |
120 | { | |
121 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
b961153b | 122 | unsigned short bit = nr & SZLONG_MASK; |
1da177e4 LT |
123 | unsigned long temp; |
124 | ||
125 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
126 | __asm__ __volatile__( | |
c4559f67 | 127 | " .set mips3 \n" |
1da177e4 LT |
128 | "1: " __LL "%0, %1 # clear_bit \n" |
129 | " and %0, %2 \n" | |
130 | " " __SC "%0, %1 \n" | |
131 | " beqzl %0, 1b \n" | |
aac8aa77 | 132 | " .set mips0 \n" |
1da177e4 | 133 | : "=&r" (temp), "=m" (*m) |
b961153b | 134 | : "ir" (~(1UL << bit)), "m" (*m)); |
102fa15c | 135 | #ifdef CONFIG_CPU_MIPSR2 |
b961153b | 136 | } else if (__builtin_constant_p(bit)) { |
102fa15c RB |
137 | __asm__ __volatile__( |
138 | "1: " __LL "%0, %1 # clear_bit \n" | |
139 | " " __INS "%0, $0, %2, 1 \n" | |
140 | " " __SC "%0, %1 \n" | |
141 | " beqz %0, 2f \n" | |
142 | " .subsection 2 \n" | |
143 | "2: b 1b \n" | |
144 | " .previous \n" | |
145 | : "=&r" (temp), "=m" (*m) | |
b961153b | 146 | : "ir" (bit), "m" (*m)); |
102fa15c | 147 | #endif /* CONFIG_CPU_MIPSR2 */ |
1da177e4 LT |
148 | } else if (cpu_has_llsc) { |
149 | __asm__ __volatile__( | |
c4559f67 | 150 | " .set mips3 \n" |
1da177e4 LT |
151 | "1: " __LL "%0, %1 # clear_bit \n" |
152 | " and %0, %2 \n" | |
153 | " " __SC "%0, %1 \n" | |
f65e4fa8 RB |
154 | " beqz %0, 2f \n" |
155 | " .subsection 2 \n" | |
156 | "2: b 1b \n" | |
157 | " .previous \n" | |
aac8aa77 | 158 | " .set mips0 \n" |
1da177e4 | 159 | : "=&r" (temp), "=m" (*m) |
b961153b | 160 | : "ir" (~(1UL << bit)), "m" (*m)); |
1da177e4 LT |
161 | } else { |
162 | volatile unsigned long *a = addr; | |
163 | unsigned long mask; | |
4ffd8b38 | 164 | unsigned long flags; |
1da177e4 LT |
165 | |
166 | a += nr >> SZLONG_LOG; | |
b961153b | 167 | mask = 1UL << bit; |
49edd098 | 168 | raw_local_irq_save(flags); |
1da177e4 | 169 | *a &= ~mask; |
49edd098 | 170 | raw_local_irq_restore(flags); |
1da177e4 LT |
171 | } |
172 | } | |
173 | ||
1da177e4 LT |
174 | /* |
175 | * change_bit - Toggle a bit in memory | |
176 | * @nr: Bit to change | |
177 | * @addr: Address to start counting from | |
178 | * | |
179 | * change_bit() is atomic and may not be reordered. | |
180 | * Note that @nr may be almost arbitrarily large; this function is not | |
181 | * restricted to acting on a single-word quantity. | |
182 | */ | |
183 | static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |
184 | { | |
b961153b RB |
185 | unsigned short bit = nr & SZLONG_MASK; |
186 | ||
1da177e4 LT |
187 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
188 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
189 | unsigned long temp; | |
190 | ||
191 | __asm__ __volatile__( | |
c4559f67 | 192 | " .set mips3 \n" |
1da177e4 LT |
193 | "1: " __LL "%0, %1 # change_bit \n" |
194 | " xor %0, %2 \n" | |
aac8aa77 | 195 | " " __SC "%0, %1 \n" |
1da177e4 | 196 | " beqzl %0, 1b \n" |
aac8aa77 | 197 | " .set mips0 \n" |
1da177e4 | 198 | : "=&r" (temp), "=m" (*m) |
b961153b | 199 | : "ir" (1UL << bit), "m" (*m)); |
1da177e4 LT |
200 | } else if (cpu_has_llsc) { |
201 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
202 | unsigned long temp; | |
203 | ||
204 | __asm__ __volatile__( | |
c4559f67 | 205 | " .set mips3 \n" |
1da177e4 LT |
206 | "1: " __LL "%0, %1 # change_bit \n" |
207 | " xor %0, %2 \n" | |
aac8aa77 | 208 | " " __SC "%0, %1 \n" |
f65e4fa8 RB |
209 | " beqz %0, 2f \n" |
210 | " .subsection 2 \n" | |
211 | "2: b 1b \n" | |
212 | " .previous \n" | |
aac8aa77 | 213 | " .set mips0 \n" |
1da177e4 | 214 | : "=&r" (temp), "=m" (*m) |
b961153b | 215 | : "ir" (1UL << bit), "m" (*m)); |
1da177e4 LT |
216 | } else { |
217 | volatile unsigned long *a = addr; | |
218 | unsigned long mask; | |
4ffd8b38 | 219 | unsigned long flags; |
1da177e4 LT |
220 | |
221 | a += nr >> SZLONG_LOG; | |
b961153b | 222 | mask = 1UL << bit; |
49edd098 | 223 | raw_local_irq_save(flags); |
1da177e4 | 224 | *a ^= mask; |
49edd098 | 225 | raw_local_irq_restore(flags); |
1da177e4 LT |
226 | } |
227 | } | |
228 | ||
1da177e4 LT |
229 | /* |
230 | * test_and_set_bit - Set a bit and return its old value | |
231 | * @nr: Bit to set | |
232 | * @addr: Address to count from | |
233 | * | |
234 | * This operation is atomic and cannot be reordered. | |
235 | * It also implies a memory barrier. | |
236 | */ | |
237 | static inline int test_and_set_bit(unsigned long nr, | |
238 | volatile unsigned long *addr) | |
239 | { | |
b961153b | 240 | unsigned short bit = nr & SZLONG_MASK; |
ff72b7a6 | 241 | unsigned long res; |
b961153b | 242 | |
1da177e4 LT |
243 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
244 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 245 | unsigned long temp; |
1da177e4 LT |
246 | |
247 | __asm__ __volatile__( | |
c4559f67 | 248 | " .set mips3 \n" |
1da177e4 LT |
249 | "1: " __LL "%0, %1 # test_and_set_bit \n" |
250 | " or %2, %0, %3 \n" | |
251 | " " __SC "%2, %1 \n" | |
252 | " beqzl %2, 1b \n" | |
253 | " and %2, %0, %3 \n" | |
aac8aa77 | 254 | " .set mips0 \n" |
1da177e4 | 255 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 256 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 257 | : "memory"); |
1da177e4 LT |
258 | } else if (cpu_has_llsc) { |
259 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 260 | unsigned long temp; |
1da177e4 LT |
261 | |
262 | __asm__ __volatile__( | |
aac8aa77 MR |
263 | " .set push \n" |
264 | " .set noreorder \n" | |
c4559f67 | 265 | " .set mips3 \n" |
aac8aa77 | 266 | "1: " __LL "%0, %1 # test_and_set_bit \n" |
1da177e4 LT |
267 | " or %2, %0, %3 \n" |
268 | " " __SC "%2, %1 \n" | |
f65e4fa8 | 269 | " beqz %2, 2f \n" |
1da177e4 | 270 | " and %2, %0, %3 \n" |
f65e4fa8 RB |
271 | " .subsection 2 \n" |
272 | "2: b 1b \n" | |
273 | " nop \n" | |
274 | " .previous \n" | |
aac8aa77 | 275 | " .set pop \n" |
1da177e4 | 276 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 277 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 278 | : "memory"); |
1da177e4 LT |
279 | } else { |
280 | volatile unsigned long *a = addr; | |
281 | unsigned long mask; | |
4ffd8b38 | 282 | unsigned long flags; |
1da177e4 LT |
283 | |
284 | a += nr >> SZLONG_LOG; | |
b961153b | 285 | mask = 1UL << bit; |
49edd098 | 286 | raw_local_irq_save(flags); |
ff72b7a6 | 287 | res = (mask & *a); |
1da177e4 | 288 | *a |= mask; |
49edd098 | 289 | raw_local_irq_restore(flags); |
1da177e4 | 290 | } |
0004a9df | 291 | |
17099b11 | 292 | smp_llsc_mb(); |
ff72b7a6 RB |
293 | |
294 | return res != 0; | |
1da177e4 LT |
295 | } |
296 | ||
1da177e4 LT |
297 | /* |
298 | * test_and_clear_bit - Clear a bit and return its old value | |
299 | * @nr: Bit to clear | |
300 | * @addr: Address to count from | |
301 | * | |
302 | * This operation is atomic and cannot be reordered. | |
303 | * It also implies a memory barrier. | |
304 | */ | |
305 | static inline int test_and_clear_bit(unsigned long nr, | |
306 | volatile unsigned long *addr) | |
307 | { | |
b961153b | 308 | unsigned short bit = nr & SZLONG_MASK; |
ff72b7a6 | 309 | unsigned long res; |
b961153b | 310 | |
1da177e4 LT |
311 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
312 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
8e09ffb6 | 313 | unsigned long temp; |
1da177e4 LT |
314 | |
315 | __asm__ __volatile__( | |
c4559f67 | 316 | " .set mips3 \n" |
1da177e4 LT |
317 | "1: " __LL "%0, %1 # test_and_clear_bit \n" |
318 | " or %2, %0, %3 \n" | |
319 | " xor %2, %3 \n" | |
aac8aa77 | 320 | " " __SC "%2, %1 \n" |
1da177e4 LT |
321 | " beqzl %2, 1b \n" |
322 | " and %2, %0, %3 \n" | |
aac8aa77 | 323 | " .set mips0 \n" |
1da177e4 | 324 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 325 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 326 | : "memory"); |
102fa15c RB |
327 | #ifdef CONFIG_CPU_MIPSR2 |
328 | } else if (__builtin_constant_p(nr)) { | |
329 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 330 | unsigned long temp; |
102fa15c RB |
331 | |
332 | __asm__ __volatile__( | |
333 | "1: " __LL "%0, %1 # test_and_clear_bit \n" | |
334 | " " __EXT "%2, %0, %3, 1 \n" | |
335 | " " __INS "%0, $0, %3, 1 \n" | |
336 | " " __SC "%0, %1 \n" | |
337 | " beqz %0, 2f \n" | |
338 | " .subsection 2 \n" | |
339 | "2: b 1b \n" | |
340 | " .previous \n" | |
341 | : "=&r" (temp), "=m" (*m), "=&r" (res) | |
b961153b | 342 | : "ri" (bit), "m" (*m) |
102fa15c | 343 | : "memory"); |
102fa15c | 344 | #endif |
1da177e4 LT |
345 | } else if (cpu_has_llsc) { |
346 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 347 | unsigned long temp; |
1da177e4 LT |
348 | |
349 | __asm__ __volatile__( | |
aac8aa77 MR |
350 | " .set push \n" |
351 | " .set noreorder \n" | |
c4559f67 | 352 | " .set mips3 \n" |
aac8aa77 | 353 | "1: " __LL "%0, %1 # test_and_clear_bit \n" |
1da177e4 LT |
354 | " or %2, %0, %3 \n" |
355 | " xor %2, %3 \n" | |
aac8aa77 | 356 | " " __SC "%2, %1 \n" |
f65e4fa8 | 357 | " beqz %2, 2f \n" |
1da177e4 | 358 | " and %2, %0, %3 \n" |
f65e4fa8 RB |
359 | " .subsection 2 \n" |
360 | "2: b 1b \n" | |
361 | " nop \n" | |
362 | " .previous \n" | |
aac8aa77 | 363 | " .set pop \n" |
1da177e4 | 364 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 365 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 366 | : "memory"); |
1da177e4 LT |
367 | } else { |
368 | volatile unsigned long *a = addr; | |
369 | unsigned long mask; | |
4ffd8b38 | 370 | unsigned long flags; |
1da177e4 LT |
371 | |
372 | a += nr >> SZLONG_LOG; | |
b961153b | 373 | mask = 1UL << bit; |
49edd098 | 374 | raw_local_irq_save(flags); |
ff72b7a6 | 375 | res = (mask & *a); |
1da177e4 | 376 | *a &= ~mask; |
49edd098 | 377 | raw_local_irq_restore(flags); |
1da177e4 | 378 | } |
0004a9df | 379 | |
17099b11 | 380 | smp_llsc_mb(); |
ff72b7a6 RB |
381 | |
382 | return res != 0; | |
1da177e4 LT |
383 | } |
384 | ||
1da177e4 LT |
385 | /* |
386 | * test_and_change_bit - Change a bit and return its old value | |
387 | * @nr: Bit to change | |
388 | * @addr: Address to count from | |
389 | * | |
390 | * This operation is atomic and cannot be reordered. | |
391 | * It also implies a memory barrier. | |
392 | */ | |
393 | static inline int test_and_change_bit(unsigned long nr, | |
394 | volatile unsigned long *addr) | |
395 | { | |
b961153b | 396 | unsigned short bit = nr & SZLONG_MASK; |
ff72b7a6 | 397 | unsigned long res; |
b961153b | 398 | |
1da177e4 LT |
399 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
400 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 401 | unsigned long temp; |
1da177e4 LT |
402 | |
403 | __asm__ __volatile__( | |
c4559f67 | 404 | " .set mips3 \n" |
aac8aa77 | 405 | "1: " __LL "%0, %1 # test_and_change_bit \n" |
1da177e4 | 406 | " xor %2, %0, %3 \n" |
aac8aa77 | 407 | " " __SC "%2, %1 \n" |
1da177e4 LT |
408 | " beqzl %2, 1b \n" |
409 | " and %2, %0, %3 \n" | |
aac8aa77 | 410 | " .set mips0 \n" |
1da177e4 | 411 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 412 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 413 | : "memory"); |
1da177e4 LT |
414 | } else if (cpu_has_llsc) { |
415 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 416 | unsigned long temp; |
1da177e4 LT |
417 | |
418 | __asm__ __volatile__( | |
aac8aa77 MR |
419 | " .set push \n" |
420 | " .set noreorder \n" | |
c4559f67 | 421 | " .set mips3 \n" |
aac8aa77 | 422 | "1: " __LL "%0, %1 # test_and_change_bit \n" |
1da177e4 | 423 | " xor %2, %0, %3 \n" |
aac8aa77 | 424 | " " __SC "\t%2, %1 \n" |
f65e4fa8 | 425 | " beqz %2, 2f \n" |
1da177e4 | 426 | " and %2, %0, %3 \n" |
f65e4fa8 RB |
427 | " .subsection 2 \n" |
428 | "2: b 1b \n" | |
429 | " nop \n" | |
430 | " .previous \n" | |
aac8aa77 | 431 | " .set pop \n" |
1da177e4 | 432 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 433 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 434 | : "memory"); |
1da177e4 LT |
435 | } else { |
436 | volatile unsigned long *a = addr; | |
ff72b7a6 | 437 | unsigned long mask; |
4ffd8b38 | 438 | unsigned long flags; |
1da177e4 LT |
439 | |
440 | a += nr >> SZLONG_LOG; | |
b961153b | 441 | mask = 1UL << bit; |
49edd098 | 442 | raw_local_irq_save(flags); |
ff72b7a6 | 443 | res = (mask & *a); |
1da177e4 | 444 | *a ^= mask; |
49edd098 | 445 | raw_local_irq_restore(flags); |
1da177e4 | 446 | } |
0004a9df | 447 | |
17099b11 | 448 | smp_llsc_mb(); |
ff72b7a6 RB |
449 | |
450 | return res != 0; | |
1da177e4 LT |
451 | } |
452 | ||
3c9ee7ef | 453 | #include <asm-generic/bitops/non-atomic.h> |
1da177e4 LT |
454 | |
455 | /* | |
ec917c2c | 456 | * Return the bit position (0..63) of the most significant 1 bit in a word |
65903265 RB |
457 | * Returns -1 if no 1 bit exists |
458 | */ | |
ec917c2c | 459 | static inline int __ilog2(unsigned long x) |
65903265 RB |
460 | { |
461 | int lz; | |
462 | ||
ec917c2c RB |
463 | if (sizeof(x) == 4) { |
464 | __asm__ ( | |
465 | " .set push \n" | |
466 | " .set mips32 \n" | |
467 | " clz %0, %1 \n" | |
468 | " .set pop \n" | |
469 | : "=r" (lz) | |
470 | : "r" (x)); | |
65903265 | 471 | |
ec917c2c RB |
472 | return 31 - lz; |
473 | } | |
474 | ||
475 | BUG_ON(sizeof(x) != 8); | |
65903265 RB |
476 | |
477 | __asm__ ( | |
478 | " .set push \n" | |
479 | " .set mips64 \n" | |
480 | " dclz %0, %1 \n" | |
481 | " .set pop \n" | |
482 | : "=r" (lz) | |
483 | : "r" (x)); | |
484 | ||
485 | return 63 - lz; | |
486 | } | |
65903265 | 487 | |
3c9ee7ef AM |
488 | #if defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64) |
489 | ||
65903265 RB |
490 | /* |
491 | * __ffs - find first bit in word. | |
1da177e4 LT |
492 | * @word: The word to search |
493 | * | |
65903265 RB |
494 | * Returns 0..SZLONG-1 |
495 | * Undefined if no bit exists, so code should check against 0 first. | |
1da177e4 | 496 | */ |
65903265 | 497 | static inline unsigned long __ffs(unsigned long word) |
1da177e4 | 498 | { |
65903265 | 499 | return __ilog2(word & -word); |
1da177e4 LT |
500 | } |
501 | ||
502 | /* | |
bc818247 | 503 | * fls - find last bit set. |
1da177e4 LT |
504 | * @word: The word to search |
505 | * | |
bc818247 AN |
506 | * This is defined the same way as ffs. |
507 | * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. | |
1da177e4 | 508 | */ |
bc818247 | 509 | static inline int fls(int word) |
1da177e4 | 510 | { |
bc818247 | 511 | __asm__ ("clz %0, %1" : "=r" (word) : "r" (word)); |
65903265 | 512 | |
bc818247 | 513 | return 32 - word; |
1da177e4 LT |
514 | } |
515 | ||
bc818247 AN |
516 | #if defined(CONFIG_64BIT) && defined(CONFIG_CPU_MIPS64) |
517 | static inline int fls64(__u64 word) | |
65903265 | 518 | { |
bc818247 AN |
519 | __asm__ ("dclz %0, %1" : "=r" (word) : "r" (word)); |
520 | ||
521 | return 64 - word; | |
65903265 | 522 | } |
bc818247 AN |
523 | #else |
524 | #include <asm-generic/bitops/fls64.h> | |
525 | #endif | |
65903265 RB |
526 | |
527 | /* | |
bc818247 | 528 | * ffs - find first bit set. |
65903265 RB |
529 | * @word: The word to search |
530 | * | |
bc818247 AN |
531 | * This is defined the same way as |
532 | * the libc and compiler builtin ffs routines, therefore | |
533 | * differs in spirit from the above ffz (man ffs). | |
65903265 | 534 | */ |
bc818247 | 535 | static inline int ffs(int word) |
65903265 | 536 | { |
bc818247 AN |
537 | if (!word) |
538 | return 0; | |
2caf1900 | 539 | |
bc818247 | 540 | return fls(word & -word); |
65903265 RB |
541 | } |
542 | ||
3c9ee7ef | 543 | #else |
1da177e4 | 544 | |
3c9ee7ef AM |
545 | #include <asm-generic/bitops/__ffs.h> |
546 | #include <asm-generic/bitops/ffs.h> | |
3c9ee7ef | 547 | #include <asm-generic/bitops/fls.h> |
bc818247 | 548 | #include <asm-generic/bitops/fls64.h> |
1da177e4 | 549 | |
3c9ee7ef | 550 | #endif /*defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64) */ |
1da177e4 | 551 | |
bc818247 | 552 | #include <asm-generic/bitops/ffz.h> |
3c9ee7ef | 553 | #include <asm-generic/bitops/find.h> |
1da177e4 LT |
554 | |
555 | #ifdef __KERNEL__ | |
556 | ||
3c9ee7ef AM |
557 | #include <asm-generic/bitops/sched.h> |
558 | #include <asm-generic/bitops/hweight.h> | |
559 | #include <asm-generic/bitops/ext2-non-atomic.h> | |
560 | #include <asm-generic/bitops/ext2-atomic.h> | |
561 | #include <asm-generic/bitops/minix.h> | |
1da177e4 LT |
562 | |
563 | #endif /* __KERNEL__ */ | |
564 | ||
565 | #endif /* _ASM_BITOPS_H */ |