]> bbs.cooldavid.org Git - net-next-2.6.git/blame - arch/mips/include/asm/bitops.h
Merge branch 'pm-fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/rafael/suspe...
[net-next-2.6.git] / arch / mips / include / asm / bitops.h
CommitLineData
1da177e4
LT
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
102fa15c 6 * Copyright (c) 1994 - 1997, 99, 2000, 06, 07 Ralf Baechle (ralf@linux-mips.org)
1da177e4
LT
7 * Copyright (c) 1999, 2000 Silicon Graphics, Inc.
8 */
9#ifndef _ASM_BITOPS_H
10#define _ASM_BITOPS_H
11
0624517d
JS
12#ifndef _LINUX_BITOPS_H
13#error only <linux/bitops.h> can be included directly
14#endif
15
1da177e4 16#include <linux/compiler.h>
4ffd8b38 17#include <linux/irqflags.h>
1da177e4 18#include <linux/types.h>
0004a9df 19#include <asm/barrier.h>
ec917c2c 20#include <asm/bug.h>
1da177e4
LT
21#include <asm/byteorder.h> /* sigh ... */
22#include <asm/cpu-features.h>
4ffd8b38
RB
23#include <asm/sgidefs.h>
24#include <asm/war.h>
1da177e4 25
49a89efb 26#if _MIPS_SZLONG == 32
1da177e4
LT
27#define SZLONG_LOG 5
28#define SZLONG_MASK 31UL
aac8aa77
MR
29#define __LL "ll "
30#define __SC "sc "
102fa15c
RB
31#define __INS "ins "
32#define __EXT "ext "
49a89efb 33#elif _MIPS_SZLONG == 64
1da177e4
LT
34#define SZLONG_LOG 6
35#define SZLONG_MASK 63UL
aac8aa77
MR
36#define __LL "lld "
37#define __SC "scd "
102fa15c
RB
38#define __INS "dins "
39#define __EXT "dext "
1da177e4
LT
40#endif
41
1da177e4
LT
42/*
43 * clear_bit() doesn't provide any barrier for the compiler.
44 */
f252ffd5 45#define smp_mb__before_clear_bit() smp_mb__before_llsc()
17099b11 46#define smp_mb__after_clear_bit() smp_llsc_mb()
1da177e4 47
1da177e4
LT
48/*
49 * set_bit - Atomically set a bit in memory
50 * @nr: the bit to set
51 * @addr: the address to start counting from
52 *
53 * This function is atomic and may not be reordered. See __set_bit()
54 * if you do not require the atomic guarantees.
55 * Note that @nr may be almost arbitrarily large; this function is not
56 * restricted to acting on a single-word quantity.
57 */
58static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
59{
60 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
b961153b 61 unsigned short bit = nr & SZLONG_MASK;
1da177e4
LT
62 unsigned long temp;
63
b791d119 64 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 65 __asm__ __volatile__(
c4559f67 66 " .set mips3 \n"
1da177e4
LT
67 "1: " __LL "%0, %1 # set_bit \n"
68 " or %0, %2 \n"
aac8aa77 69 " " __SC "%0, %1 \n"
1da177e4 70 " beqzl %0, 1b \n"
aac8aa77 71 " .set mips0 \n"
1da177e4 72 : "=&r" (temp), "=m" (*m)
b961153b 73 : "ir" (1UL << bit), "m" (*m));
102fa15c 74#ifdef CONFIG_CPU_MIPSR2
b791d119 75 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
7837314d
RB
76 do {
77 __asm__ __volatile__(
78 " " __LL "%0, %1 # set_bit \n"
79 " " __INS "%0, %3, %2, 1 \n"
80 " " __SC "%0, %1 \n"
81 : "=&r" (temp), "+m" (*m)
82 : "ir" (bit), "r" (~0));
83 } while (unlikely(!temp));
102fa15c 84#endif /* CONFIG_CPU_MIPSR2 */
b791d119 85 } else if (kernel_uses_llsc) {
7837314d
RB
86 do {
87 __asm__ __volatile__(
88 " .set mips3 \n"
89 " " __LL "%0, %1 # set_bit \n"
90 " or %0, %2 \n"
91 " " __SC "%0, %1 \n"
92 " .set mips0 \n"
93 : "=&r" (temp), "+m" (*m)
94 : "ir" (1UL << bit));
95 } while (unlikely(!temp));
1da177e4
LT
96 } else {
97 volatile unsigned long *a = addr;
98 unsigned long mask;
4ffd8b38 99 unsigned long flags;
1da177e4
LT
100
101 a += nr >> SZLONG_LOG;
b961153b 102 mask = 1UL << bit;
49edd098 103 raw_local_irq_save(flags);
1da177e4 104 *a |= mask;
49edd098 105 raw_local_irq_restore(flags);
1da177e4
LT
106 }
107}
108
1da177e4
LT
109/*
110 * clear_bit - Clears a bit in memory
111 * @nr: Bit to clear
112 * @addr: Address to start counting from
113 *
114 * clear_bit() is atomic and may not be reordered. However, it does
115 * not contain a memory barrier, so if it is used for locking purposes,
116 * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit()
117 * in order to ensure changes are visible on other processors.
118 */
119static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
120{
121 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
b961153b 122 unsigned short bit = nr & SZLONG_MASK;
1da177e4
LT
123 unsigned long temp;
124
b791d119 125 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 126 __asm__ __volatile__(
c4559f67 127 " .set mips3 \n"
1da177e4
LT
128 "1: " __LL "%0, %1 # clear_bit \n"
129 " and %0, %2 \n"
130 " " __SC "%0, %1 \n"
131 " beqzl %0, 1b \n"
aac8aa77 132 " .set mips0 \n"
7837314d
RB
133 : "=&r" (temp), "+m" (*m)
134 : "ir" (~(1UL << bit)));
102fa15c 135#ifdef CONFIG_CPU_MIPSR2
b791d119 136 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
7837314d
RB
137 do {
138 __asm__ __volatile__(
139 " " __LL "%0, %1 # clear_bit \n"
140 " " __INS "%0, $0, %2, 1 \n"
141 " " __SC "%0, %1 \n"
142 : "=&r" (temp), "+m" (*m)
143 : "ir" (bit));
144 } while (unlikely(!temp));
102fa15c 145#endif /* CONFIG_CPU_MIPSR2 */
b791d119 146 } else if (kernel_uses_llsc) {
7837314d
RB
147 do {
148 __asm__ __volatile__(
149 " .set mips3 \n"
150 " " __LL "%0, %1 # clear_bit \n"
151 " and %0, %2 \n"
152 " " __SC "%0, %1 \n"
153 " .set mips0 \n"
154 : "=&r" (temp), "+m" (*m)
155 : "ir" (~(1UL << bit)));
156 } while (unlikely(!temp));
1da177e4
LT
157 } else {
158 volatile unsigned long *a = addr;
159 unsigned long mask;
4ffd8b38 160 unsigned long flags;
1da177e4
LT
161
162 a += nr >> SZLONG_LOG;
b961153b 163 mask = 1UL << bit;
49edd098 164 raw_local_irq_save(flags);
1da177e4 165 *a &= ~mask;
49edd098 166 raw_local_irq_restore(flags);
1da177e4
LT
167 }
168}
169
728697cd
NP
170/*
171 * clear_bit_unlock - Clears a bit in memory
172 * @nr: Bit to clear
173 * @addr: Address to start counting from
174 *
175 * clear_bit() is atomic and implies release semantics before the memory
176 * operation. It can be used for an unlock.
177 */
178static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
179{
180 smp_mb__before_clear_bit();
181 clear_bit(nr, addr);
182}
183
1da177e4
LT
184/*
185 * change_bit - Toggle a bit in memory
186 * @nr: Bit to change
187 * @addr: Address to start counting from
188 *
189 * change_bit() is atomic and may not be reordered.
190 * Note that @nr may be almost arbitrarily large; this function is not
191 * restricted to acting on a single-word quantity.
192 */
193static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
194{
b961153b
RB
195 unsigned short bit = nr & SZLONG_MASK;
196
b791d119 197 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4
LT
198 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
199 unsigned long temp;
200
201 __asm__ __volatile__(
c4559f67 202 " .set mips3 \n"
1da177e4
LT
203 "1: " __LL "%0, %1 # change_bit \n"
204 " xor %0, %2 \n"
aac8aa77 205 " " __SC "%0, %1 \n"
1da177e4 206 " beqzl %0, 1b \n"
aac8aa77 207 " .set mips0 \n"
7837314d
RB
208 : "=&r" (temp), "+m" (*m)
209 : "ir" (1UL << bit));
b791d119 210 } else if (kernel_uses_llsc) {
1da177e4
LT
211 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
212 unsigned long temp;
213
7837314d
RB
214 do {
215 __asm__ __volatile__(
216 " .set mips3 \n"
217 " " __LL "%0, %1 # change_bit \n"
218 " xor %0, %2 \n"
219 " " __SC "%0, %1 \n"
220 " .set mips0 \n"
221 : "=&r" (temp), "+m" (*m)
222 : "ir" (1UL << bit));
223 } while (unlikely(!temp));
1da177e4
LT
224 } else {
225 volatile unsigned long *a = addr;
226 unsigned long mask;
4ffd8b38 227 unsigned long flags;
1da177e4
LT
228
229 a += nr >> SZLONG_LOG;
b961153b 230 mask = 1UL << bit;
49edd098 231 raw_local_irq_save(flags);
1da177e4 232 *a ^= mask;
49edd098 233 raw_local_irq_restore(flags);
1da177e4
LT
234 }
235}
236
1da177e4
LT
237/*
238 * test_and_set_bit - Set a bit and return its old value
239 * @nr: Bit to set
240 * @addr: Address to count from
241 *
242 * This operation is atomic and cannot be reordered.
243 * It also implies a memory barrier.
244 */
245static inline int test_and_set_bit(unsigned long nr,
246 volatile unsigned long *addr)
247{
b961153b 248 unsigned short bit = nr & SZLONG_MASK;
ff72b7a6 249 unsigned long res;
b961153b 250
f252ffd5 251 smp_mb__before_llsc();
c8f30ae5 252
b791d119 253 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 254 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 255 unsigned long temp;
1da177e4
LT
256
257 __asm__ __volatile__(
c4559f67 258 " .set mips3 \n"
1da177e4
LT
259 "1: " __LL "%0, %1 # test_and_set_bit \n"
260 " or %2, %0, %3 \n"
261 " " __SC "%2, %1 \n"
262 " beqzl %2, 1b \n"
263 " and %2, %0, %3 \n"
aac8aa77 264 " .set mips0 \n"
7837314d
RB
265 : "=&r" (temp), "+m" (*m), "=&r" (res)
266 : "r" (1UL << bit)
1da177e4 267 : "memory");
b791d119 268 } else if (kernel_uses_llsc) {
1da177e4 269 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 270 unsigned long temp;
1da177e4 271
7837314d
RB
272 do {
273 __asm__ __volatile__(
274 " .set mips3 \n"
275 " " __LL "%0, %1 # test_and_set_bit \n"
276 " or %2, %0, %3 \n"
277 " " __SC "%2, %1 \n"
278 " .set mips0 \n"
279 : "=&r" (temp), "+m" (*m), "=&r" (res)
280 : "r" (1UL << bit)
281 : "memory");
282 } while (unlikely(!res));
283
284 res = temp & (1UL << bit);
1da177e4
LT
285 } else {
286 volatile unsigned long *a = addr;
287 unsigned long mask;
4ffd8b38 288 unsigned long flags;
1da177e4
LT
289
290 a += nr >> SZLONG_LOG;
b961153b 291 mask = 1UL << bit;
49edd098 292 raw_local_irq_save(flags);
ff72b7a6 293 res = (mask & *a);
1da177e4 294 *a |= mask;
49edd098 295 raw_local_irq_restore(flags);
1da177e4 296 }
0004a9df 297
17099b11 298 smp_llsc_mb();
ff72b7a6
RB
299
300 return res != 0;
1da177e4
LT
301}
302
728697cd
NP
303/*
304 * test_and_set_bit_lock - Set a bit and return its old value
305 * @nr: Bit to set
306 * @addr: Address to count from
307 *
308 * This operation is atomic and implies acquire ordering semantics
309 * after the memory operation.
310 */
311static inline int test_and_set_bit_lock(unsigned long nr,
312 volatile unsigned long *addr)
313{
314 unsigned short bit = nr & SZLONG_MASK;
315 unsigned long res;
316
b791d119 317 if (kernel_uses_llsc && R10000_LLSC_WAR) {
728697cd
NP
318 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
319 unsigned long temp;
320
321 __asm__ __volatile__(
322 " .set mips3 \n"
323 "1: " __LL "%0, %1 # test_and_set_bit \n"
324 " or %2, %0, %3 \n"
325 " " __SC "%2, %1 \n"
326 " beqzl %2, 1b \n"
327 " and %2, %0, %3 \n"
328 " .set mips0 \n"
7837314d
RB
329 : "=&r" (temp), "+m" (*m), "=&r" (res)
330 : "r" (1UL << bit)
728697cd 331 : "memory");
b791d119 332 } else if (kernel_uses_llsc) {
728697cd
NP
333 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
334 unsigned long temp;
335
7837314d
RB
336 do {
337 __asm__ __volatile__(
338 " .set mips3 \n"
339 " " __LL "%0, %1 # test_and_set_bit \n"
340 " or %2, %0, %3 \n"
341 " " __SC "%2, %1 \n"
342 " .set mips0 \n"
343 : "=&r" (temp), "+m" (*m), "=&r" (res)
344 : "r" (1UL << bit)
345 : "memory");
346 } while (unlikely(!res));
347
348 res = temp & (1UL << bit);
728697cd
NP
349 } else {
350 volatile unsigned long *a = addr;
351 unsigned long mask;
352 unsigned long flags;
353
354 a += nr >> SZLONG_LOG;
355 mask = 1UL << bit;
356 raw_local_irq_save(flags);
357 res = (mask & *a);
358 *a |= mask;
359 raw_local_irq_restore(flags);
360 }
361
362 smp_llsc_mb();
363
364 return res != 0;
365}
1da177e4
LT
366/*
367 * test_and_clear_bit - Clear a bit and return its old value
368 * @nr: Bit to clear
369 * @addr: Address to count from
370 *
371 * This operation is atomic and cannot be reordered.
372 * It also implies a memory barrier.
373 */
374static inline int test_and_clear_bit(unsigned long nr,
375 volatile unsigned long *addr)
376{
b961153b 377 unsigned short bit = nr & SZLONG_MASK;
ff72b7a6 378 unsigned long res;
b961153b 379
f252ffd5 380 smp_mb__before_llsc();
c8f30ae5 381
b791d119 382 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 383 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
8e09ffb6 384 unsigned long temp;
1da177e4
LT
385
386 __asm__ __volatile__(
c4559f67 387 " .set mips3 \n"
1da177e4
LT
388 "1: " __LL "%0, %1 # test_and_clear_bit \n"
389 " or %2, %0, %3 \n"
390 " xor %2, %3 \n"
aac8aa77 391 " " __SC "%2, %1 \n"
1da177e4
LT
392 " beqzl %2, 1b \n"
393 " and %2, %0, %3 \n"
aac8aa77 394 " .set mips0 \n"
7837314d
RB
395 : "=&r" (temp), "+m" (*m), "=&r" (res)
396 : "r" (1UL << bit)
1da177e4 397 : "memory");
102fa15c 398#ifdef CONFIG_CPU_MIPSR2
b791d119 399 } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
102fa15c 400 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 401 unsigned long temp;
102fa15c 402
7837314d
RB
403 do {
404 __asm__ __volatile__(
405 " " __LL "%0, %1 # test_and_clear_bit \n"
406 " " __EXT "%2, %0, %3, 1 \n"
407 " " __INS "%0, $0, %3, 1 \n"
408 " " __SC "%0, %1 \n"
409 : "=&r" (temp), "+m" (*m), "=&r" (res)
410 : "ir" (bit)
411 : "memory");
412 } while (unlikely(!temp));
102fa15c 413#endif
b791d119 414 } else if (kernel_uses_llsc) {
1da177e4 415 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 416 unsigned long temp;
1da177e4 417
7837314d
RB
418 do {
419 __asm__ __volatile__(
420 " .set mips3 \n"
421 " " __LL "%0, %1 # test_and_clear_bit \n"
422 " or %2, %0, %3 \n"
423 " xor %2, %3 \n"
424 " " __SC "%2, %1 \n"
425 " .set mips0 \n"
426 : "=&r" (temp), "+m" (*m), "=&r" (res)
427 : "r" (1UL << bit)
428 : "memory");
429 } while (unlikely(!res));
430
431 res = temp & (1UL << bit);
1da177e4
LT
432 } else {
433 volatile unsigned long *a = addr;
434 unsigned long mask;
4ffd8b38 435 unsigned long flags;
1da177e4
LT
436
437 a += nr >> SZLONG_LOG;
b961153b 438 mask = 1UL << bit;
49edd098 439 raw_local_irq_save(flags);
ff72b7a6 440 res = (mask & *a);
1da177e4 441 *a &= ~mask;
49edd098 442 raw_local_irq_restore(flags);
1da177e4 443 }
0004a9df 444
17099b11 445 smp_llsc_mb();
ff72b7a6
RB
446
447 return res != 0;
1da177e4
LT
448}
449
1da177e4
LT
450/*
451 * test_and_change_bit - Change a bit and return its old value
452 * @nr: Bit to change
453 * @addr: Address to count from
454 *
455 * This operation is atomic and cannot be reordered.
456 * It also implies a memory barrier.
457 */
458static inline int test_and_change_bit(unsigned long nr,
459 volatile unsigned long *addr)
460{
b961153b 461 unsigned short bit = nr & SZLONG_MASK;
ff72b7a6 462 unsigned long res;
b961153b 463
f252ffd5 464 smp_mb__before_llsc();
c8f30ae5 465
b791d119 466 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 467 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 468 unsigned long temp;
1da177e4
LT
469
470 __asm__ __volatile__(
c4559f67 471 " .set mips3 \n"
aac8aa77 472 "1: " __LL "%0, %1 # test_and_change_bit \n"
1da177e4 473 " xor %2, %0, %3 \n"
aac8aa77 474 " " __SC "%2, %1 \n"
1da177e4
LT
475 " beqzl %2, 1b \n"
476 " and %2, %0, %3 \n"
aac8aa77 477 " .set mips0 \n"
7837314d
RB
478 : "=&r" (temp), "+m" (*m), "=&r" (res)
479 : "r" (1UL << bit)
1da177e4 480 : "memory");
b791d119 481 } else if (kernel_uses_llsc) {
1da177e4 482 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 483 unsigned long temp;
1da177e4 484
7837314d
RB
485 do {
486 __asm__ __volatile__(
487 " .set mips3 \n"
488 " " __LL "%0, %1 # test_and_change_bit \n"
489 " xor %2, %0, %3 \n"
490 " " __SC "\t%2, %1 \n"
491 " .set mips0 \n"
492 : "=&r" (temp), "+m" (*m), "=&r" (res)
493 : "r" (1UL << bit)
494 : "memory");
495 } while (unlikely(!res));
496
497 res = temp & (1UL << bit);
1da177e4
LT
498 } else {
499 volatile unsigned long *a = addr;
ff72b7a6 500 unsigned long mask;
4ffd8b38 501 unsigned long flags;
1da177e4
LT
502
503 a += nr >> SZLONG_LOG;
b961153b 504 mask = 1UL << bit;
49edd098 505 raw_local_irq_save(flags);
ff72b7a6 506 res = (mask & *a);
1da177e4 507 *a ^= mask;
49edd098 508 raw_local_irq_restore(flags);
1da177e4 509 }
0004a9df 510
17099b11 511 smp_llsc_mb();
ff72b7a6
RB
512
513 return res != 0;
1da177e4
LT
514}
515
3c9ee7ef 516#include <asm-generic/bitops/non-atomic.h>
1da177e4 517
728697cd
NP
518/*
519 * __clear_bit_unlock - Clears a bit in memory
520 * @nr: Bit to clear
521 * @addr: Address to start counting from
522 *
523 * __clear_bit() is non-atomic and implies release semantics before the memory
524 * operation. It can be used for an unlock if no other CPUs can concurrently
525 * modify other bits in the word.
526 */
527static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
528{
529 smp_mb();
530 __clear_bit(nr, addr);
531}
532
1da177e4 533/*
ec917c2c 534 * Return the bit position (0..63) of the most significant 1 bit in a word
65903265
RB
535 * Returns -1 if no 1 bit exists
536 */
4816227b 537static inline unsigned long __fls(unsigned long word)
65903265 538{
4816227b 539 int num;
65903265 540
4816227b 541 if (BITS_PER_LONG == 32 &&
47740eb8 542 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
49a89efb 543 __asm__(
ec917c2c
RB
544 " .set push \n"
545 " .set mips32 \n"
546 " clz %0, %1 \n"
547 " .set pop \n"
4816227b
RB
548 : "=r" (num)
549 : "r" (word));
65903265 550
4816227b 551 return 31 - num;
ec917c2c
RB
552 }
553
4816227b
RB
554 if (BITS_PER_LONG == 64 &&
555 __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
556 __asm__(
557 " .set push \n"
558 " .set mips64 \n"
559 " dclz %0, %1 \n"
560 " .set pop \n"
561 : "=r" (num)
562 : "r" (word));
65903265 563
4816227b
RB
564 return 63 - num;
565 }
566
567 num = BITS_PER_LONG - 1;
65903265 568
4816227b
RB
569#if BITS_PER_LONG == 64
570 if (!(word & (~0ul << 32))) {
571 num -= 32;
572 word <<= 32;
573 }
574#endif
575 if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
576 num -= 16;
577 word <<= 16;
578 }
579 if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
580 num -= 8;
581 word <<= 8;
582 }
583 if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
584 num -= 4;
585 word <<= 4;
586 }
587 if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
588 num -= 2;
589 word <<= 2;
590 }
591 if (!(word & (~0ul << (BITS_PER_LONG-1))))
592 num -= 1;
593 return num;
65903265 594}
65903265
RB
595
596/*
597 * __ffs - find first bit in word.
1da177e4
LT
598 * @word: The word to search
599 *
65903265
RB
600 * Returns 0..SZLONG-1
601 * Undefined if no bit exists, so code should check against 0 first.
1da177e4 602 */
65903265 603static inline unsigned long __ffs(unsigned long word)
1da177e4 604{
ddc0d009 605 return __fls(word & -word);
1da177e4
LT
606}
607
608/*
bc818247 609 * fls - find last bit set.
1da177e4
LT
610 * @word: The word to search
611 *
bc818247
AN
612 * This is defined the same way as ffs.
613 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
1da177e4 614 */
4816227b 615static inline int fls(int x)
1da177e4 616{
4816227b 617 int r;
65903265 618
47740eb8 619 if (__builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
4816227b 620 __asm__("clz %0, %1" : "=r" (x) : "r" (x));
1da177e4 621
4816227b
RB
622 return 32 - x;
623 }
bc818247 624
4816227b
RB
625 r = 32;
626 if (!x)
627 return 0;
628 if (!(x & 0xffff0000u)) {
629 x <<= 16;
630 r -= 16;
631 }
632 if (!(x & 0xff000000u)) {
633 x <<= 8;
634 r -= 8;
635 }
636 if (!(x & 0xf0000000u)) {
637 x <<= 4;
638 r -= 4;
639 }
640 if (!(x & 0xc0000000u)) {
641 x <<= 2;
642 r -= 2;
643 }
644 if (!(x & 0x80000000u)) {
645 x <<= 1;
646 r -= 1;
647 }
648 return r;
65903265 649}
4816227b 650
bc818247 651#include <asm-generic/bitops/fls64.h>
65903265
RB
652
653/*
bc818247 654 * ffs - find first bit set.
65903265
RB
655 * @word: The word to search
656 *
bc818247
AN
657 * This is defined the same way as
658 * the libc and compiler builtin ffs routines, therefore
659 * differs in spirit from the above ffz (man ffs).
65903265 660 */
bc818247 661static inline int ffs(int word)
65903265 662{
bc818247
AN
663 if (!word)
664 return 0;
2caf1900 665
bc818247 666 return fls(word & -word);
65903265
RB
667}
668
bc818247 669#include <asm-generic/bitops/ffz.h>
3c9ee7ef 670#include <asm-generic/bitops/find.h>
1da177e4
LT
671
672#ifdef __KERNEL__
673
3c9ee7ef 674#include <asm-generic/bitops/sched.h>
1a403d1d
DD
675
676#include <asm/arch_hweight.h>
677#include <asm-generic/bitops/const_hweight.h>
678
3c9ee7ef
AM
679#include <asm-generic/bitops/ext2-non-atomic.h>
680#include <asm-generic/bitops/ext2-atomic.h>
681#include <asm-generic/bitops/minix.h>
1da177e4
LT
682
683#endif /* __KERNEL__ */
684
685#endif /* _ASM_BITOPS_H */