]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
102fa15c | 6 | * Copyright (c) 1994 - 1997, 99, 2000, 06, 07 Ralf Baechle (ralf@linux-mips.org) |
1da177e4 LT |
7 | * Copyright (c) 1999, 2000 Silicon Graphics, Inc. |
8 | */ | |
9 | #ifndef _ASM_BITOPS_H | |
10 | #define _ASM_BITOPS_H | |
11 | ||
1da177e4 | 12 | #include <linux/compiler.h> |
4ffd8b38 | 13 | #include <linux/irqflags.h> |
1da177e4 | 14 | #include <linux/types.h> |
0004a9df | 15 | #include <asm/barrier.h> |
ec917c2c | 16 | #include <asm/bug.h> |
1da177e4 LT |
17 | #include <asm/byteorder.h> /* sigh ... */ |
18 | #include <asm/cpu-features.h> | |
4ffd8b38 RB |
19 | #include <asm/sgidefs.h> |
20 | #include <asm/war.h> | |
1da177e4 | 21 | |
49a89efb | 22 | #if _MIPS_SZLONG == 32 |
1da177e4 LT |
23 | #define SZLONG_LOG 5 |
24 | #define SZLONG_MASK 31UL | |
aac8aa77 MR |
25 | #define __LL "ll " |
26 | #define __SC "sc " | |
102fa15c RB |
27 | #define __INS "ins " |
28 | #define __EXT "ext " | |
49a89efb | 29 | #elif _MIPS_SZLONG == 64 |
1da177e4 LT |
30 | #define SZLONG_LOG 6 |
31 | #define SZLONG_MASK 63UL | |
aac8aa77 MR |
32 | #define __LL "lld " |
33 | #define __SC "scd " | |
102fa15c RB |
34 | #define __INS "dins " |
35 | #define __EXT "dext " | |
1da177e4 LT |
36 | #endif |
37 | ||
1da177e4 LT |
38 | /* |
39 | * clear_bit() doesn't provide any barrier for the compiler. | |
40 | */ | |
17099b11 RB |
41 | #define smp_mb__before_clear_bit() smp_llsc_mb() |
42 | #define smp_mb__after_clear_bit() smp_llsc_mb() | |
1da177e4 | 43 | |
1da177e4 LT |
44 | /* |
45 | * set_bit - Atomically set a bit in memory | |
46 | * @nr: the bit to set | |
47 | * @addr: the address to start counting from | |
48 | * | |
49 | * This function is atomic and may not be reordered. See __set_bit() | |
50 | * if you do not require the atomic guarantees. | |
51 | * Note that @nr may be almost arbitrarily large; this function is not | |
52 | * restricted to acting on a single-word quantity. | |
53 | */ | |
54 | static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |
55 | { | |
56 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
b961153b | 57 | unsigned short bit = nr & SZLONG_MASK; |
1da177e4 LT |
58 | unsigned long temp; |
59 | ||
60 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
61 | __asm__ __volatile__( | |
c4559f67 | 62 | " .set mips3 \n" |
1da177e4 LT |
63 | "1: " __LL "%0, %1 # set_bit \n" |
64 | " or %0, %2 \n" | |
aac8aa77 | 65 | " " __SC "%0, %1 \n" |
1da177e4 | 66 | " beqzl %0, 1b \n" |
aac8aa77 | 67 | " .set mips0 \n" |
1da177e4 | 68 | : "=&r" (temp), "=m" (*m) |
b961153b | 69 | : "ir" (1UL << bit), "m" (*m)); |
102fa15c | 70 | #ifdef CONFIG_CPU_MIPSR2 |
b961153b | 71 | } else if (__builtin_constant_p(bit)) { |
102fa15c RB |
72 | __asm__ __volatile__( |
73 | "1: " __LL "%0, %1 # set_bit \n" | |
74 | " " __INS "%0, %4, %2, 1 \n" | |
75 | " " __SC "%0, %1 \n" | |
76 | " beqz %0, 2f \n" | |
77 | " .subsection 2 \n" | |
78 | "2: b 1b \n" | |
79 | " .previous \n" | |
80 | : "=&r" (temp), "=m" (*m) | |
b961153b | 81 | : "ir" (bit), "m" (*m), "r" (~0)); |
102fa15c | 82 | #endif /* CONFIG_CPU_MIPSR2 */ |
1da177e4 LT |
83 | } else if (cpu_has_llsc) { |
84 | __asm__ __volatile__( | |
c4559f67 | 85 | " .set mips3 \n" |
1da177e4 LT |
86 | "1: " __LL "%0, %1 # set_bit \n" |
87 | " or %0, %2 \n" | |
aac8aa77 | 88 | " " __SC "%0, %1 \n" |
f65e4fa8 RB |
89 | " beqz %0, 2f \n" |
90 | " .subsection 2 \n" | |
91 | "2: b 1b \n" | |
92 | " .previous \n" | |
aac8aa77 | 93 | " .set mips0 \n" |
1da177e4 | 94 | : "=&r" (temp), "=m" (*m) |
b961153b | 95 | : "ir" (1UL << bit), "m" (*m)); |
1da177e4 LT |
96 | } else { |
97 | volatile unsigned long *a = addr; | |
98 | unsigned long mask; | |
4ffd8b38 | 99 | unsigned long flags; |
1da177e4 LT |
100 | |
101 | a += nr >> SZLONG_LOG; | |
b961153b | 102 | mask = 1UL << bit; |
49edd098 | 103 | raw_local_irq_save(flags); |
1da177e4 | 104 | *a |= mask; |
49edd098 | 105 | raw_local_irq_restore(flags); |
1da177e4 LT |
106 | } |
107 | } | |
108 | ||
1da177e4 LT |
109 | /* |
110 | * clear_bit - Clears a bit in memory | |
111 | * @nr: Bit to clear | |
112 | * @addr: Address to start counting from | |
113 | * | |
114 | * clear_bit() is atomic and may not be reordered. However, it does | |
115 | * not contain a memory barrier, so if it is used for locking purposes, | |
116 | * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() | |
117 | * in order to ensure changes are visible on other processors. | |
118 | */ | |
119 | static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |
120 | { | |
121 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
b961153b | 122 | unsigned short bit = nr & SZLONG_MASK; |
1da177e4 LT |
123 | unsigned long temp; |
124 | ||
125 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
126 | __asm__ __volatile__( | |
c4559f67 | 127 | " .set mips3 \n" |
1da177e4 LT |
128 | "1: " __LL "%0, %1 # clear_bit \n" |
129 | " and %0, %2 \n" | |
130 | " " __SC "%0, %1 \n" | |
131 | " beqzl %0, 1b \n" | |
aac8aa77 | 132 | " .set mips0 \n" |
1da177e4 | 133 | : "=&r" (temp), "=m" (*m) |
b961153b | 134 | : "ir" (~(1UL << bit)), "m" (*m)); |
102fa15c | 135 | #ifdef CONFIG_CPU_MIPSR2 |
b961153b | 136 | } else if (__builtin_constant_p(bit)) { |
102fa15c RB |
137 | __asm__ __volatile__( |
138 | "1: " __LL "%0, %1 # clear_bit \n" | |
139 | " " __INS "%0, $0, %2, 1 \n" | |
140 | " " __SC "%0, %1 \n" | |
141 | " beqz %0, 2f \n" | |
142 | " .subsection 2 \n" | |
143 | "2: b 1b \n" | |
144 | " .previous \n" | |
145 | : "=&r" (temp), "=m" (*m) | |
b961153b | 146 | : "ir" (bit), "m" (*m)); |
102fa15c | 147 | #endif /* CONFIG_CPU_MIPSR2 */ |
1da177e4 LT |
148 | } else if (cpu_has_llsc) { |
149 | __asm__ __volatile__( | |
c4559f67 | 150 | " .set mips3 \n" |
1da177e4 LT |
151 | "1: " __LL "%0, %1 # clear_bit \n" |
152 | " and %0, %2 \n" | |
153 | " " __SC "%0, %1 \n" | |
f65e4fa8 RB |
154 | " beqz %0, 2f \n" |
155 | " .subsection 2 \n" | |
156 | "2: b 1b \n" | |
157 | " .previous \n" | |
aac8aa77 | 158 | " .set mips0 \n" |
1da177e4 | 159 | : "=&r" (temp), "=m" (*m) |
b961153b | 160 | : "ir" (~(1UL << bit)), "m" (*m)); |
1da177e4 LT |
161 | } else { |
162 | volatile unsigned long *a = addr; | |
163 | unsigned long mask; | |
4ffd8b38 | 164 | unsigned long flags; |
1da177e4 LT |
165 | |
166 | a += nr >> SZLONG_LOG; | |
b961153b | 167 | mask = 1UL << bit; |
49edd098 | 168 | raw_local_irq_save(flags); |
1da177e4 | 169 | *a &= ~mask; |
49edd098 | 170 | raw_local_irq_restore(flags); |
1da177e4 LT |
171 | } |
172 | } | |
173 | ||
728697cd NP |
174 | /* |
175 | * clear_bit_unlock - Clears a bit in memory | |
176 | * @nr: Bit to clear | |
177 | * @addr: Address to start counting from | |
178 | * | |
179 | * clear_bit() is atomic and implies release semantics before the memory | |
180 | * operation. It can be used for an unlock. | |
181 | */ | |
182 | static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) | |
183 | { | |
184 | smp_mb__before_clear_bit(); | |
185 | clear_bit(nr, addr); | |
186 | } | |
187 | ||
1da177e4 LT |
188 | /* |
189 | * change_bit - Toggle a bit in memory | |
190 | * @nr: Bit to change | |
191 | * @addr: Address to start counting from | |
192 | * | |
193 | * change_bit() is atomic and may not be reordered. | |
194 | * Note that @nr may be almost arbitrarily large; this function is not | |
195 | * restricted to acting on a single-word quantity. | |
196 | */ | |
197 | static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |
198 | { | |
b961153b RB |
199 | unsigned short bit = nr & SZLONG_MASK; |
200 | ||
1da177e4 LT |
201 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
202 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
203 | unsigned long temp; | |
204 | ||
205 | __asm__ __volatile__( | |
c4559f67 | 206 | " .set mips3 \n" |
1da177e4 LT |
207 | "1: " __LL "%0, %1 # change_bit \n" |
208 | " xor %0, %2 \n" | |
aac8aa77 | 209 | " " __SC "%0, %1 \n" |
1da177e4 | 210 | " beqzl %0, 1b \n" |
aac8aa77 | 211 | " .set mips0 \n" |
1da177e4 | 212 | : "=&r" (temp), "=m" (*m) |
b961153b | 213 | : "ir" (1UL << bit), "m" (*m)); |
1da177e4 LT |
214 | } else if (cpu_has_llsc) { |
215 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
216 | unsigned long temp; | |
217 | ||
218 | __asm__ __volatile__( | |
c4559f67 | 219 | " .set mips3 \n" |
1da177e4 LT |
220 | "1: " __LL "%0, %1 # change_bit \n" |
221 | " xor %0, %2 \n" | |
aac8aa77 | 222 | " " __SC "%0, %1 \n" |
f65e4fa8 RB |
223 | " beqz %0, 2f \n" |
224 | " .subsection 2 \n" | |
225 | "2: b 1b \n" | |
226 | " .previous \n" | |
aac8aa77 | 227 | " .set mips0 \n" |
1da177e4 | 228 | : "=&r" (temp), "=m" (*m) |
b961153b | 229 | : "ir" (1UL << bit), "m" (*m)); |
1da177e4 LT |
230 | } else { |
231 | volatile unsigned long *a = addr; | |
232 | unsigned long mask; | |
4ffd8b38 | 233 | unsigned long flags; |
1da177e4 LT |
234 | |
235 | a += nr >> SZLONG_LOG; | |
b961153b | 236 | mask = 1UL << bit; |
49edd098 | 237 | raw_local_irq_save(flags); |
1da177e4 | 238 | *a ^= mask; |
49edd098 | 239 | raw_local_irq_restore(flags); |
1da177e4 LT |
240 | } |
241 | } | |
242 | ||
1da177e4 LT |
243 | /* |
244 | * test_and_set_bit - Set a bit and return its old value | |
245 | * @nr: Bit to set | |
246 | * @addr: Address to count from | |
247 | * | |
248 | * This operation is atomic and cannot be reordered. | |
249 | * It also implies a memory barrier. | |
250 | */ | |
251 | static inline int test_and_set_bit(unsigned long nr, | |
252 | volatile unsigned long *addr) | |
253 | { | |
b961153b | 254 | unsigned short bit = nr & SZLONG_MASK; |
ff72b7a6 | 255 | unsigned long res; |
b961153b | 256 | |
c8f30ae5 NP |
257 | smp_llsc_mb(); |
258 | ||
1da177e4 LT |
259 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
260 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 261 | unsigned long temp; |
1da177e4 LT |
262 | |
263 | __asm__ __volatile__( | |
c4559f67 | 264 | " .set mips3 \n" |
1da177e4 LT |
265 | "1: " __LL "%0, %1 # test_and_set_bit \n" |
266 | " or %2, %0, %3 \n" | |
267 | " " __SC "%2, %1 \n" | |
268 | " beqzl %2, 1b \n" | |
269 | " and %2, %0, %3 \n" | |
aac8aa77 | 270 | " .set mips0 \n" |
1da177e4 | 271 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 272 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 273 | : "memory"); |
1da177e4 LT |
274 | } else if (cpu_has_llsc) { |
275 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 276 | unsigned long temp; |
1da177e4 LT |
277 | |
278 | __asm__ __volatile__( | |
aac8aa77 MR |
279 | " .set push \n" |
280 | " .set noreorder \n" | |
c4559f67 | 281 | " .set mips3 \n" |
aac8aa77 | 282 | "1: " __LL "%0, %1 # test_and_set_bit \n" |
1da177e4 LT |
283 | " or %2, %0, %3 \n" |
284 | " " __SC "%2, %1 \n" | |
f65e4fa8 | 285 | " beqz %2, 2f \n" |
1da177e4 | 286 | " and %2, %0, %3 \n" |
f65e4fa8 RB |
287 | " .subsection 2 \n" |
288 | "2: b 1b \n" | |
289 | " nop \n" | |
290 | " .previous \n" | |
aac8aa77 | 291 | " .set pop \n" |
1da177e4 | 292 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 293 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 294 | : "memory"); |
1da177e4 LT |
295 | } else { |
296 | volatile unsigned long *a = addr; | |
297 | unsigned long mask; | |
4ffd8b38 | 298 | unsigned long flags; |
1da177e4 LT |
299 | |
300 | a += nr >> SZLONG_LOG; | |
b961153b | 301 | mask = 1UL << bit; |
49edd098 | 302 | raw_local_irq_save(flags); |
ff72b7a6 | 303 | res = (mask & *a); |
1da177e4 | 304 | *a |= mask; |
49edd098 | 305 | raw_local_irq_restore(flags); |
1da177e4 | 306 | } |
0004a9df | 307 | |
17099b11 | 308 | smp_llsc_mb(); |
ff72b7a6 RB |
309 | |
310 | return res != 0; | |
1da177e4 LT |
311 | } |
312 | ||
728697cd NP |
313 | /* |
314 | * test_and_set_bit_lock - Set a bit and return its old value | |
315 | * @nr: Bit to set | |
316 | * @addr: Address to count from | |
317 | * | |
318 | * This operation is atomic and implies acquire ordering semantics | |
319 | * after the memory operation. | |
320 | */ | |
321 | static inline int test_and_set_bit_lock(unsigned long nr, | |
322 | volatile unsigned long *addr) | |
323 | { | |
324 | unsigned short bit = nr & SZLONG_MASK; | |
325 | unsigned long res; | |
326 | ||
327 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
328 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
329 | unsigned long temp; | |
330 | ||
331 | __asm__ __volatile__( | |
332 | " .set mips3 \n" | |
333 | "1: " __LL "%0, %1 # test_and_set_bit \n" | |
334 | " or %2, %0, %3 \n" | |
335 | " " __SC "%2, %1 \n" | |
336 | " beqzl %2, 1b \n" | |
337 | " and %2, %0, %3 \n" | |
338 | " .set mips0 \n" | |
339 | : "=&r" (temp), "=m" (*m), "=&r" (res) | |
340 | : "r" (1UL << bit), "m" (*m) | |
341 | : "memory"); | |
342 | } else if (cpu_has_llsc) { | |
343 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
344 | unsigned long temp; | |
345 | ||
346 | __asm__ __volatile__( | |
347 | " .set push \n" | |
348 | " .set noreorder \n" | |
349 | " .set mips3 \n" | |
350 | "1: " __LL "%0, %1 # test_and_set_bit \n" | |
351 | " or %2, %0, %3 \n" | |
352 | " " __SC "%2, %1 \n" | |
353 | " beqz %2, 2f \n" | |
354 | " and %2, %0, %3 \n" | |
355 | " .subsection 2 \n" | |
356 | "2: b 1b \n" | |
357 | " nop \n" | |
358 | " .previous \n" | |
359 | " .set pop \n" | |
360 | : "=&r" (temp), "=m" (*m), "=&r" (res) | |
361 | : "r" (1UL << bit), "m" (*m) | |
362 | : "memory"); | |
363 | } else { | |
364 | volatile unsigned long *a = addr; | |
365 | unsigned long mask; | |
366 | unsigned long flags; | |
367 | ||
368 | a += nr >> SZLONG_LOG; | |
369 | mask = 1UL << bit; | |
370 | raw_local_irq_save(flags); | |
371 | res = (mask & *a); | |
372 | *a |= mask; | |
373 | raw_local_irq_restore(flags); | |
374 | } | |
375 | ||
376 | smp_llsc_mb(); | |
377 | ||
378 | return res != 0; | |
379 | } | |
1da177e4 LT |
380 | /* |
381 | * test_and_clear_bit - Clear a bit and return its old value | |
382 | * @nr: Bit to clear | |
383 | * @addr: Address to count from | |
384 | * | |
385 | * This operation is atomic and cannot be reordered. | |
386 | * It also implies a memory barrier. | |
387 | */ | |
388 | static inline int test_and_clear_bit(unsigned long nr, | |
389 | volatile unsigned long *addr) | |
390 | { | |
b961153b | 391 | unsigned short bit = nr & SZLONG_MASK; |
ff72b7a6 | 392 | unsigned long res; |
b961153b | 393 | |
c8f30ae5 NP |
394 | smp_llsc_mb(); |
395 | ||
1da177e4 LT |
396 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
397 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
8e09ffb6 | 398 | unsigned long temp; |
1da177e4 LT |
399 | |
400 | __asm__ __volatile__( | |
c4559f67 | 401 | " .set mips3 \n" |
1da177e4 LT |
402 | "1: " __LL "%0, %1 # test_and_clear_bit \n" |
403 | " or %2, %0, %3 \n" | |
404 | " xor %2, %3 \n" | |
aac8aa77 | 405 | " " __SC "%2, %1 \n" |
1da177e4 LT |
406 | " beqzl %2, 1b \n" |
407 | " and %2, %0, %3 \n" | |
aac8aa77 | 408 | " .set mips0 \n" |
1da177e4 | 409 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 410 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 411 | : "memory"); |
102fa15c RB |
412 | #ifdef CONFIG_CPU_MIPSR2 |
413 | } else if (__builtin_constant_p(nr)) { | |
414 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 415 | unsigned long temp; |
102fa15c RB |
416 | |
417 | __asm__ __volatile__( | |
418 | "1: " __LL "%0, %1 # test_and_clear_bit \n" | |
419 | " " __EXT "%2, %0, %3, 1 \n" | |
420 | " " __INS "%0, $0, %3, 1 \n" | |
421 | " " __SC "%0, %1 \n" | |
422 | " beqz %0, 2f \n" | |
423 | " .subsection 2 \n" | |
424 | "2: b 1b \n" | |
425 | " .previous \n" | |
426 | : "=&r" (temp), "=m" (*m), "=&r" (res) | |
b961153b | 427 | : "ri" (bit), "m" (*m) |
102fa15c | 428 | : "memory"); |
102fa15c | 429 | #endif |
1da177e4 LT |
430 | } else if (cpu_has_llsc) { |
431 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 432 | unsigned long temp; |
1da177e4 LT |
433 | |
434 | __asm__ __volatile__( | |
aac8aa77 MR |
435 | " .set push \n" |
436 | " .set noreorder \n" | |
c4559f67 | 437 | " .set mips3 \n" |
aac8aa77 | 438 | "1: " __LL "%0, %1 # test_and_clear_bit \n" |
1da177e4 LT |
439 | " or %2, %0, %3 \n" |
440 | " xor %2, %3 \n" | |
aac8aa77 | 441 | " " __SC "%2, %1 \n" |
f65e4fa8 | 442 | " beqz %2, 2f \n" |
1da177e4 | 443 | " and %2, %0, %3 \n" |
f65e4fa8 RB |
444 | " .subsection 2 \n" |
445 | "2: b 1b \n" | |
446 | " nop \n" | |
447 | " .previous \n" | |
aac8aa77 | 448 | " .set pop \n" |
1da177e4 | 449 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 450 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 451 | : "memory"); |
1da177e4 LT |
452 | } else { |
453 | volatile unsigned long *a = addr; | |
454 | unsigned long mask; | |
4ffd8b38 | 455 | unsigned long flags; |
1da177e4 LT |
456 | |
457 | a += nr >> SZLONG_LOG; | |
b961153b | 458 | mask = 1UL << bit; |
49edd098 | 459 | raw_local_irq_save(flags); |
ff72b7a6 | 460 | res = (mask & *a); |
1da177e4 | 461 | *a &= ~mask; |
49edd098 | 462 | raw_local_irq_restore(flags); |
1da177e4 | 463 | } |
0004a9df | 464 | |
17099b11 | 465 | smp_llsc_mb(); |
ff72b7a6 RB |
466 | |
467 | return res != 0; | |
1da177e4 LT |
468 | } |
469 | ||
1da177e4 LT |
470 | /* |
471 | * test_and_change_bit - Change a bit and return its old value | |
472 | * @nr: Bit to change | |
473 | * @addr: Address to count from | |
474 | * | |
475 | * This operation is atomic and cannot be reordered. | |
476 | * It also implies a memory barrier. | |
477 | */ | |
478 | static inline int test_and_change_bit(unsigned long nr, | |
479 | volatile unsigned long *addr) | |
480 | { | |
b961153b | 481 | unsigned short bit = nr & SZLONG_MASK; |
ff72b7a6 | 482 | unsigned long res; |
b961153b | 483 | |
c8f30ae5 NP |
484 | smp_llsc_mb(); |
485 | ||
1da177e4 LT |
486 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
487 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 488 | unsigned long temp; |
1da177e4 LT |
489 | |
490 | __asm__ __volatile__( | |
c4559f67 | 491 | " .set mips3 \n" |
aac8aa77 | 492 | "1: " __LL "%0, %1 # test_and_change_bit \n" |
1da177e4 | 493 | " xor %2, %0, %3 \n" |
aac8aa77 | 494 | " " __SC "%2, %1 \n" |
1da177e4 LT |
495 | " beqzl %2, 1b \n" |
496 | " and %2, %0, %3 \n" | |
aac8aa77 | 497 | " .set mips0 \n" |
1da177e4 | 498 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 499 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 500 | : "memory"); |
1da177e4 LT |
501 | } else if (cpu_has_llsc) { |
502 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
ff72b7a6 | 503 | unsigned long temp; |
1da177e4 LT |
504 | |
505 | __asm__ __volatile__( | |
aac8aa77 MR |
506 | " .set push \n" |
507 | " .set noreorder \n" | |
c4559f67 | 508 | " .set mips3 \n" |
aac8aa77 | 509 | "1: " __LL "%0, %1 # test_and_change_bit \n" |
1da177e4 | 510 | " xor %2, %0, %3 \n" |
aac8aa77 | 511 | " " __SC "\t%2, %1 \n" |
f65e4fa8 | 512 | " beqz %2, 2f \n" |
1da177e4 | 513 | " and %2, %0, %3 \n" |
f65e4fa8 RB |
514 | " .subsection 2 \n" |
515 | "2: b 1b \n" | |
516 | " nop \n" | |
517 | " .previous \n" | |
aac8aa77 | 518 | " .set pop \n" |
1da177e4 | 519 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
b961153b | 520 | : "r" (1UL << bit), "m" (*m) |
1da177e4 | 521 | : "memory"); |
1da177e4 LT |
522 | } else { |
523 | volatile unsigned long *a = addr; | |
ff72b7a6 | 524 | unsigned long mask; |
4ffd8b38 | 525 | unsigned long flags; |
1da177e4 LT |
526 | |
527 | a += nr >> SZLONG_LOG; | |
b961153b | 528 | mask = 1UL << bit; |
49edd098 | 529 | raw_local_irq_save(flags); |
ff72b7a6 | 530 | res = (mask & *a); |
1da177e4 | 531 | *a ^= mask; |
49edd098 | 532 | raw_local_irq_restore(flags); |
1da177e4 | 533 | } |
0004a9df | 534 | |
17099b11 | 535 | smp_llsc_mb(); |
ff72b7a6 RB |
536 | |
537 | return res != 0; | |
1da177e4 LT |
538 | } |
539 | ||
3c9ee7ef | 540 | #include <asm-generic/bitops/non-atomic.h> |
1da177e4 | 541 | |
728697cd NP |
542 | /* |
543 | * __clear_bit_unlock - Clears a bit in memory | |
544 | * @nr: Bit to clear | |
545 | * @addr: Address to start counting from | |
546 | * | |
547 | * __clear_bit() is non-atomic and implies release semantics before the memory | |
548 | * operation. It can be used for an unlock if no other CPUs can concurrently | |
549 | * modify other bits in the word. | |
550 | */ | |
551 | static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) | |
552 | { | |
553 | smp_mb(); | |
554 | __clear_bit(nr, addr); | |
555 | } | |
556 | ||
1da177e4 | 557 | /* |
ec917c2c | 558 | * Return the bit position (0..63) of the most significant 1 bit in a word |
65903265 RB |
559 | * Returns -1 if no 1 bit exists |
560 | */ | |
ec917c2c | 561 | static inline int __ilog2(unsigned long x) |
65903265 RB |
562 | { |
563 | int lz; | |
564 | ||
ec917c2c | 565 | if (sizeof(x) == 4) { |
49a89efb | 566 | __asm__( |
ec917c2c RB |
567 | " .set push \n" |
568 | " .set mips32 \n" | |
569 | " clz %0, %1 \n" | |
570 | " .set pop \n" | |
571 | : "=r" (lz) | |
572 | : "r" (x)); | |
65903265 | 573 | |
ec917c2c RB |
574 | return 31 - lz; |
575 | } | |
576 | ||
577 | BUG_ON(sizeof(x) != 8); | |
65903265 | 578 | |
49a89efb | 579 | __asm__( |
65903265 RB |
580 | " .set push \n" |
581 | " .set mips64 \n" | |
582 | " dclz %0, %1 \n" | |
583 | " .set pop \n" | |
584 | : "=r" (lz) | |
585 | : "r" (x)); | |
586 | ||
587 | return 63 - lz; | |
588 | } | |
65903265 | 589 | |
3c9ee7ef AM |
590 | #if defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64) |
591 | ||
65903265 RB |
592 | /* |
593 | * __ffs - find first bit in word. | |
1da177e4 LT |
594 | * @word: The word to search |
595 | * | |
65903265 RB |
596 | * Returns 0..SZLONG-1 |
597 | * Undefined if no bit exists, so code should check against 0 first. | |
1da177e4 | 598 | */ |
65903265 | 599 | static inline unsigned long __ffs(unsigned long word) |
1da177e4 | 600 | { |
65903265 | 601 | return __ilog2(word & -word); |
1da177e4 LT |
602 | } |
603 | ||
604 | /* | |
bc818247 | 605 | * fls - find last bit set. |
1da177e4 LT |
606 | * @word: The word to search |
607 | * | |
bc818247 AN |
608 | * This is defined the same way as ffs. |
609 | * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. | |
1da177e4 | 610 | */ |
bc818247 | 611 | static inline int fls(int word) |
1da177e4 | 612 | { |
49a89efb | 613 | __asm__("clz %0, %1" : "=r" (word) : "r" (word)); |
65903265 | 614 | |
bc818247 | 615 | return 32 - word; |
1da177e4 LT |
616 | } |
617 | ||
bc818247 AN |
618 | #if defined(CONFIG_64BIT) && defined(CONFIG_CPU_MIPS64) |
619 | static inline int fls64(__u64 word) | |
65903265 | 620 | { |
49a89efb | 621 | __asm__("dclz %0, %1" : "=r" (word) : "r" (word)); |
bc818247 AN |
622 | |
623 | return 64 - word; | |
65903265 | 624 | } |
bc818247 AN |
625 | #else |
626 | #include <asm-generic/bitops/fls64.h> | |
627 | #endif | |
65903265 RB |
628 | |
629 | /* | |
bc818247 | 630 | * ffs - find first bit set. |
65903265 RB |
631 | * @word: The word to search |
632 | * | |
bc818247 AN |
633 | * This is defined the same way as |
634 | * the libc and compiler builtin ffs routines, therefore | |
635 | * differs in spirit from the above ffz (man ffs). | |
65903265 | 636 | */ |
bc818247 | 637 | static inline int ffs(int word) |
65903265 | 638 | { |
bc818247 AN |
639 | if (!word) |
640 | return 0; | |
2caf1900 | 641 | |
bc818247 | 642 | return fls(word & -word); |
65903265 RB |
643 | } |
644 | ||
3c9ee7ef | 645 | #else |
1da177e4 | 646 | |
3c9ee7ef AM |
647 | #include <asm-generic/bitops/__ffs.h> |
648 | #include <asm-generic/bitops/ffs.h> | |
3c9ee7ef | 649 | #include <asm-generic/bitops/fls.h> |
bc818247 | 650 | #include <asm-generic/bitops/fls64.h> |
1da177e4 | 651 | |
3c9ee7ef | 652 | #endif /*defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64) */ |
1da177e4 | 653 | |
bc818247 | 654 | #include <asm-generic/bitops/ffz.h> |
3c9ee7ef | 655 | #include <asm-generic/bitops/find.h> |
1da177e4 LT |
656 | |
657 | #ifdef __KERNEL__ | |
658 | ||
3c9ee7ef AM |
659 | #include <asm-generic/bitops/sched.h> |
660 | #include <asm-generic/bitops/hweight.h> | |
661 | #include <asm-generic/bitops/ext2-non-atomic.h> | |
662 | #include <asm-generic/bitops/ext2-atomic.h> | |
663 | #include <asm-generic/bitops/minix.h> | |
1da177e4 LT |
664 | |
665 | #endif /* __KERNEL__ */ | |
666 | ||
667 | #endif /* _ASM_BITOPS_H */ |