]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * Atomic operations that C can't guarantee us. Useful for | |
3 | * resource counting etc.. | |
4 | * | |
5 | * But use these as seldom as possible since they are much more slower | |
6 | * than regular operations. | |
7 | * | |
8 | * This file is subject to the terms and conditions of the GNU General Public | |
9 | * License. See the file "COPYING" in the main directory of this archive | |
10 | * for more details. | |
11 | * | |
e303e088 | 12 | * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle |
1da177e4 | 13 | */ |
1da177e4 LT |
14 | #ifndef _ASM_ATOMIC_H |
15 | #define _ASM_ATOMIC_H | |
16 | ||
192ef366 | 17 | #include <linux/irqflags.h> |
ea435467 | 18 | #include <linux/types.h> |
0004a9df | 19 | #include <asm/barrier.h> |
1da177e4 LT |
20 | #include <asm/cpu-features.h> |
21 | #include <asm/war.h> | |
2856f5e3 | 22 | #include <asm/system.h> |
1da177e4 | 23 | |
1da177e4 LT |
24 | #define ATOMIC_INIT(i) { (i) } |
25 | ||
26 | /* | |
27 | * atomic_read - read atomic variable | |
28 | * @v: pointer of type atomic_t | |
29 | * | |
30 | * Atomically reads the value of @v. | |
31 | */ | |
f3d46f9d | 32 | #define atomic_read(v) (*(volatile int *)&(v)->counter) |
1da177e4 LT |
33 | |
34 | /* | |
35 | * atomic_set - set atomic variable | |
36 | * @v: pointer of type atomic_t | |
37 | * @i: required value | |
38 | * | |
39 | * Atomically sets the value of @v to @i. | |
40 | */ | |
21a151d8 | 41 | #define atomic_set(v, i) ((v)->counter = (i)) |
1da177e4 LT |
42 | |
43 | /* | |
44 | * atomic_add - add integer to atomic variable | |
45 | * @i: integer value to add | |
46 | * @v: pointer of type atomic_t | |
47 | * | |
48 | * Atomically adds @i to @v. | |
49 | */ | |
50 | static __inline__ void atomic_add(int i, atomic_t * v) | |
51 | { | |
b791d119 | 52 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 53 | int temp; |
1da177e4 LT |
54 | |
55 | __asm__ __volatile__( | |
c4559f67 | 56 | " .set mips3 \n" |
1da177e4 LT |
57 | "1: ll %0, %1 # atomic_add \n" |
58 | " addu %0, %2 \n" | |
59 | " sc %0, %1 \n" | |
60 | " beqzl %0, 1b \n" | |
aac8aa77 | 61 | " .set mips0 \n" |
1da177e4 LT |
62 | : "=&r" (temp), "=m" (v->counter) |
63 | : "Ir" (i), "m" (v->counter)); | |
b791d119 | 64 | } else if (kernel_uses_llsc) { |
915ec1e2 | 65 | int temp; |
1da177e4 | 66 | |
7837314d RB |
67 | do { |
68 | __asm__ __volatile__( | |
69 | " .set mips3 \n" | |
70 | " ll %0, %1 # atomic_add \n" | |
71 | " addu %0, %2 \n" | |
72 | " sc %0, %1 \n" | |
73 | " .set mips0 \n" | |
74 | : "=&r" (temp), "=m" (v->counter) | |
75 | : "Ir" (i), "m" (v->counter)); | |
76 | } while (unlikely(!temp)); | |
1da177e4 LT |
77 | } else { |
78 | unsigned long flags; | |
79 | ||
49edd098 | 80 | raw_local_irq_save(flags); |
1da177e4 | 81 | v->counter += i; |
49edd098 | 82 | raw_local_irq_restore(flags); |
1da177e4 LT |
83 | } |
84 | } | |
85 | ||
86 | /* | |
87 | * atomic_sub - subtract the atomic variable | |
88 | * @i: integer value to subtract | |
89 | * @v: pointer of type atomic_t | |
90 | * | |
91 | * Atomically subtracts @i from @v. | |
92 | */ | |
93 | static __inline__ void atomic_sub(int i, atomic_t * v) | |
94 | { | |
b791d119 | 95 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 96 | int temp; |
1da177e4 LT |
97 | |
98 | __asm__ __volatile__( | |
c4559f67 | 99 | " .set mips3 \n" |
1da177e4 LT |
100 | "1: ll %0, %1 # atomic_sub \n" |
101 | " subu %0, %2 \n" | |
102 | " sc %0, %1 \n" | |
103 | " beqzl %0, 1b \n" | |
aac8aa77 | 104 | " .set mips0 \n" |
1da177e4 LT |
105 | : "=&r" (temp), "=m" (v->counter) |
106 | : "Ir" (i), "m" (v->counter)); | |
b791d119 | 107 | } else if (kernel_uses_llsc) { |
915ec1e2 | 108 | int temp; |
1da177e4 | 109 | |
7837314d RB |
110 | do { |
111 | __asm__ __volatile__( | |
112 | " .set mips3 \n" | |
113 | " ll %0, %1 # atomic_sub \n" | |
114 | " subu %0, %2 \n" | |
115 | " sc %0, %1 \n" | |
116 | " .set mips0 \n" | |
117 | : "=&r" (temp), "=m" (v->counter) | |
118 | : "Ir" (i), "m" (v->counter)); | |
119 | } while (unlikely(!temp)); | |
1da177e4 LT |
120 | } else { |
121 | unsigned long flags; | |
122 | ||
49edd098 | 123 | raw_local_irq_save(flags); |
1da177e4 | 124 | v->counter -= i; |
49edd098 | 125 | raw_local_irq_restore(flags); |
1da177e4 LT |
126 | } |
127 | } | |
128 | ||
129 | /* | |
130 | * Same as above, but return the result value | |
131 | */ | |
132 | static __inline__ int atomic_add_return(int i, atomic_t * v) | |
133 | { | |
915ec1e2 | 134 | int result; |
1da177e4 | 135 | |
f252ffd5 | 136 | smp_mb__before_llsc(); |
0004a9df | 137 | |
b791d119 | 138 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 139 | int temp; |
1da177e4 LT |
140 | |
141 | __asm__ __volatile__( | |
c4559f67 | 142 | " .set mips3 \n" |
1da177e4 LT |
143 | "1: ll %1, %2 # atomic_add_return \n" |
144 | " addu %0, %1, %3 \n" | |
145 | " sc %0, %2 \n" | |
146 | " beqzl %0, 1b \n" | |
147 | " addu %0, %1, %3 \n" | |
aac8aa77 | 148 | " .set mips0 \n" |
1da177e4 LT |
149 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
150 | : "Ir" (i), "m" (v->counter) | |
151 | : "memory"); | |
b791d119 | 152 | } else if (kernel_uses_llsc) { |
915ec1e2 | 153 | int temp; |
1da177e4 | 154 | |
7837314d RB |
155 | do { |
156 | __asm__ __volatile__( | |
157 | " .set mips3 \n" | |
158 | " ll %1, %2 # atomic_add_return \n" | |
159 | " addu %0, %1, %3 \n" | |
160 | " sc %0, %2 \n" | |
161 | " .set mips0 \n" | |
162 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | |
163 | : "Ir" (i), "m" (v->counter) | |
164 | : "memory"); | |
165 | } while (unlikely(!result)); | |
166 | ||
167 | result = temp + i; | |
1da177e4 LT |
168 | } else { |
169 | unsigned long flags; | |
170 | ||
49edd098 | 171 | raw_local_irq_save(flags); |
1da177e4 LT |
172 | result = v->counter; |
173 | result += i; | |
174 | v->counter = result; | |
49edd098 | 175 | raw_local_irq_restore(flags); |
1da177e4 LT |
176 | } |
177 | ||
17099b11 | 178 | smp_llsc_mb(); |
0004a9df | 179 | |
1da177e4 LT |
180 | return result; |
181 | } | |
182 | ||
183 | static __inline__ int atomic_sub_return(int i, atomic_t * v) | |
184 | { | |
915ec1e2 | 185 | int result; |
1da177e4 | 186 | |
f252ffd5 | 187 | smp_mb__before_llsc(); |
0004a9df | 188 | |
b791d119 | 189 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 190 | int temp; |
1da177e4 LT |
191 | |
192 | __asm__ __volatile__( | |
c4559f67 | 193 | " .set mips3 \n" |
1da177e4 LT |
194 | "1: ll %1, %2 # atomic_sub_return \n" |
195 | " subu %0, %1, %3 \n" | |
196 | " sc %0, %2 \n" | |
197 | " beqzl %0, 1b \n" | |
198 | " subu %0, %1, %3 \n" | |
aac8aa77 | 199 | " .set mips0 \n" |
1da177e4 LT |
200 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
201 | : "Ir" (i), "m" (v->counter) | |
202 | : "memory"); | |
7837314d RB |
203 | |
204 | result = temp - i; | |
b791d119 | 205 | } else if (kernel_uses_llsc) { |
915ec1e2 | 206 | int temp; |
1da177e4 | 207 | |
7837314d RB |
208 | do { |
209 | __asm__ __volatile__( | |
210 | " .set mips3 \n" | |
211 | " ll %1, %2 # atomic_sub_return \n" | |
212 | " subu %0, %1, %3 \n" | |
213 | " sc %0, %2 \n" | |
214 | " .set mips0 \n" | |
215 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | |
216 | : "Ir" (i), "m" (v->counter) | |
217 | : "memory"); | |
218 | } while (unlikely(!result)); | |
219 | ||
220 | result = temp - i; | |
1da177e4 LT |
221 | } else { |
222 | unsigned long flags; | |
223 | ||
49edd098 | 224 | raw_local_irq_save(flags); |
1da177e4 LT |
225 | result = v->counter; |
226 | result -= i; | |
227 | v->counter = result; | |
49edd098 | 228 | raw_local_irq_restore(flags); |
1da177e4 LT |
229 | } |
230 | ||
17099b11 | 231 | smp_llsc_mb(); |
0004a9df | 232 | |
1da177e4 LT |
233 | return result; |
234 | } | |
235 | ||
236 | /* | |
f10d14dd AG |
237 | * atomic_sub_if_positive - conditionally subtract integer from atomic variable |
238 | * @i: integer value to subtract | |
1da177e4 LT |
239 | * @v: pointer of type atomic_t |
240 | * | |
f10d14dd AG |
241 | * Atomically test @v and subtract @i if @v is greater or equal than @i. |
242 | * The function returns the old value of @v minus @i. | |
1da177e4 LT |
243 | */ |
244 | static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |
245 | { | |
915ec1e2 | 246 | int result; |
1da177e4 | 247 | |
f252ffd5 | 248 | smp_mb__before_llsc(); |
0004a9df | 249 | |
b791d119 | 250 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 251 | int temp; |
1da177e4 LT |
252 | |
253 | __asm__ __volatile__( | |
c4559f67 | 254 | " .set mips3 \n" |
1da177e4 LT |
255 | "1: ll %1, %2 # atomic_sub_if_positive\n" |
256 | " subu %0, %1, %3 \n" | |
257 | " bltz %0, 1f \n" | |
258 | " sc %0, %2 \n" | |
92f22c18 | 259 | " .set noreorder \n" |
1da177e4 | 260 | " beqzl %0, 1b \n" |
92f22c18 RB |
261 | " subu %0, %1, %3 \n" |
262 | " .set reorder \n" | |
1da177e4 | 263 | "1: \n" |
aac8aa77 | 264 | " .set mips0 \n" |
1da177e4 LT |
265 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
266 | : "Ir" (i), "m" (v->counter) | |
267 | : "memory"); | |
b791d119 | 268 | } else if (kernel_uses_llsc) { |
915ec1e2 | 269 | int temp; |
1da177e4 LT |
270 | |
271 | __asm__ __volatile__( | |
c4559f67 | 272 | " .set mips3 \n" |
1da177e4 LT |
273 | "1: ll %1, %2 # atomic_sub_if_positive\n" |
274 | " subu %0, %1, %3 \n" | |
275 | " bltz %0, 1f \n" | |
276 | " sc %0, %2 \n" | |
92f22c18 | 277 | " .set noreorder \n" |
7837314d | 278 | " beqz %0, 1b \n" |
92f22c18 RB |
279 | " subu %0, %1, %3 \n" |
280 | " .set reorder \n" | |
50952026 | 281 | "1: \n" |
aac8aa77 | 282 | " .set mips0 \n" |
1da177e4 LT |
283 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
284 | : "Ir" (i), "m" (v->counter) | |
285 | : "memory"); | |
286 | } else { | |
287 | unsigned long flags; | |
288 | ||
49edd098 | 289 | raw_local_irq_save(flags); |
1da177e4 LT |
290 | result = v->counter; |
291 | result -= i; | |
292 | if (result >= 0) | |
293 | v->counter = result; | |
49edd098 | 294 | raw_local_irq_restore(flags); |
1da177e4 LT |
295 | } |
296 | ||
17099b11 | 297 | smp_llsc_mb(); |
0004a9df | 298 | |
1da177e4 LT |
299 | return result; |
300 | } | |
301 | ||
e12f644b MD |
302 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
303 | #define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) | |
4a6dae6d | 304 | |
8426e1f6 NP |
305 | /** |
306 | * atomic_add_unless - add unless the number is a given value | |
307 | * @v: pointer of type atomic_t | |
308 | * @a: the amount to add to v... | |
309 | * @u: ...unless v is equal to u. | |
310 | * | |
311 | * Atomically adds @a to @v, so long as it was not @u. | |
312 | * Returns non-zero if @v was not @u, and zero otherwise. | |
313 | */ | |
2856f5e3 MD |
314 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
315 | { | |
316 | int c, old; | |
317 | c = atomic_read(v); | |
318 | for (;;) { | |
319 | if (unlikely(c == (u))) | |
320 | break; | |
321 | old = atomic_cmpxchg((v), c, c + (a)); | |
322 | if (likely(old == c)) | |
323 | break; | |
324 | c = old; | |
325 | } | |
326 | return c != (u); | |
327 | } | |
8426e1f6 NP |
328 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
329 | ||
21a151d8 RB |
330 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) |
331 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | |
1da177e4 LT |
332 | |
333 | /* | |
334 | * atomic_sub_and_test - subtract value from variable and test result | |
335 | * @i: integer value to subtract | |
336 | * @v: pointer of type atomic_t | |
337 | * | |
338 | * Atomically subtracts @i from @v and returns | |
339 | * true if the result is zero, or false for all | |
340 | * other cases. | |
341 | */ | |
21a151d8 | 342 | #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) |
1da177e4 LT |
343 | |
344 | /* | |
345 | * atomic_inc_and_test - increment and test | |
346 | * @v: pointer of type atomic_t | |
347 | * | |
348 | * Atomically increments @v by 1 | |
349 | * and returns true if the result is zero, or false for all | |
350 | * other cases. | |
351 | */ | |
352 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | |
353 | ||
354 | /* | |
355 | * atomic_dec_and_test - decrement by 1 and test | |
356 | * @v: pointer of type atomic_t | |
357 | * | |
358 | * Atomically decrements @v by 1 and | |
359 | * returns true if the result is 0, or false for all other | |
360 | * cases. | |
361 | */ | |
362 | #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) | |
363 | ||
364 | /* | |
365 | * atomic_dec_if_positive - decrement by 1 if old value positive | |
366 | * @v: pointer of type atomic_t | |
367 | */ | |
368 | #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v) | |
369 | ||
370 | /* | |
371 | * atomic_inc - increment atomic variable | |
372 | * @v: pointer of type atomic_t | |
373 | * | |
374 | * Atomically increments @v by 1. | |
375 | */ | |
21a151d8 | 376 | #define atomic_inc(v) atomic_add(1, (v)) |
1da177e4 LT |
377 | |
378 | /* | |
379 | * atomic_dec - decrement and test | |
380 | * @v: pointer of type atomic_t | |
381 | * | |
382 | * Atomically decrements @v by 1. | |
383 | */ | |
21a151d8 | 384 | #define atomic_dec(v) atomic_sub(1, (v)) |
1da177e4 LT |
385 | |
386 | /* | |
387 | * atomic_add_negative - add and test if negative | |
388 | * @v: pointer of type atomic_t | |
389 | * @i: integer value to add | |
390 | * | |
391 | * Atomically adds @i to @v and returns true | |
392 | * if the result is negative, or false when | |
393 | * result is greater than or equal to zero. | |
394 | */ | |
21a151d8 | 395 | #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0) |
1da177e4 | 396 | |
875d43e7 | 397 | #ifdef CONFIG_64BIT |
1da177e4 | 398 | |
1da177e4 LT |
399 | #define ATOMIC64_INIT(i) { (i) } |
400 | ||
401 | /* | |
402 | * atomic64_read - read atomic variable | |
403 | * @v: pointer of type atomic64_t | |
404 | * | |
405 | */ | |
f3d46f9d | 406 | #define atomic64_read(v) (*(volatile long *)&(v)->counter) |
1da177e4 LT |
407 | |
408 | /* | |
409 | * atomic64_set - set atomic variable | |
410 | * @v: pointer of type atomic64_t | |
411 | * @i: required value | |
412 | */ | |
21a151d8 | 413 | #define atomic64_set(v, i) ((v)->counter = (i)) |
1da177e4 LT |
414 | |
415 | /* | |
416 | * atomic64_add - add integer to atomic variable | |
417 | * @i: integer value to add | |
418 | * @v: pointer of type atomic64_t | |
419 | * | |
420 | * Atomically adds @i to @v. | |
421 | */ | |
422 | static __inline__ void atomic64_add(long i, atomic64_t * v) | |
423 | { | |
b791d119 | 424 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 425 | long temp; |
1da177e4 LT |
426 | |
427 | __asm__ __volatile__( | |
aac8aa77 | 428 | " .set mips3 \n" |
1da177e4 | 429 | "1: lld %0, %1 # atomic64_add \n" |
f2a68272 | 430 | " daddu %0, %2 \n" |
1da177e4 LT |
431 | " scd %0, %1 \n" |
432 | " beqzl %0, 1b \n" | |
aac8aa77 | 433 | " .set mips0 \n" |
1da177e4 LT |
434 | : "=&r" (temp), "=m" (v->counter) |
435 | : "Ir" (i), "m" (v->counter)); | |
b791d119 | 436 | } else if (kernel_uses_llsc) { |
915ec1e2 | 437 | long temp; |
1da177e4 | 438 | |
7837314d RB |
439 | do { |
440 | __asm__ __volatile__( | |
441 | " .set mips3 \n" | |
442 | " lld %0, %1 # atomic64_add \n" | |
443 | " daddu %0, %2 \n" | |
444 | " scd %0, %1 \n" | |
445 | " .set mips0 \n" | |
446 | : "=&r" (temp), "=m" (v->counter) | |
447 | : "Ir" (i), "m" (v->counter)); | |
448 | } while (unlikely(!temp)); | |
1da177e4 LT |
449 | } else { |
450 | unsigned long flags; | |
451 | ||
49edd098 | 452 | raw_local_irq_save(flags); |
1da177e4 | 453 | v->counter += i; |
49edd098 | 454 | raw_local_irq_restore(flags); |
1da177e4 LT |
455 | } |
456 | } | |
457 | ||
458 | /* | |
459 | * atomic64_sub - subtract the atomic variable | |
460 | * @i: integer value to subtract | |
461 | * @v: pointer of type atomic64_t | |
462 | * | |
463 | * Atomically subtracts @i from @v. | |
464 | */ | |
465 | static __inline__ void atomic64_sub(long i, atomic64_t * v) | |
466 | { | |
b791d119 | 467 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 468 | long temp; |
1da177e4 LT |
469 | |
470 | __asm__ __volatile__( | |
aac8aa77 | 471 | " .set mips3 \n" |
1da177e4 | 472 | "1: lld %0, %1 # atomic64_sub \n" |
f2a68272 | 473 | " dsubu %0, %2 \n" |
1da177e4 LT |
474 | " scd %0, %1 \n" |
475 | " beqzl %0, 1b \n" | |
aac8aa77 | 476 | " .set mips0 \n" |
1da177e4 LT |
477 | : "=&r" (temp), "=m" (v->counter) |
478 | : "Ir" (i), "m" (v->counter)); | |
b791d119 | 479 | } else if (kernel_uses_llsc) { |
915ec1e2 | 480 | long temp; |
1da177e4 | 481 | |
7837314d RB |
482 | do { |
483 | __asm__ __volatile__( | |
484 | " .set mips3 \n" | |
485 | " lld %0, %1 # atomic64_sub \n" | |
486 | " dsubu %0, %2 \n" | |
487 | " scd %0, %1 \n" | |
488 | " .set mips0 \n" | |
489 | : "=&r" (temp), "=m" (v->counter) | |
490 | : "Ir" (i), "m" (v->counter)); | |
491 | } while (unlikely(!temp)); | |
1da177e4 LT |
492 | } else { |
493 | unsigned long flags; | |
494 | ||
49edd098 | 495 | raw_local_irq_save(flags); |
1da177e4 | 496 | v->counter -= i; |
49edd098 | 497 | raw_local_irq_restore(flags); |
1da177e4 LT |
498 | } |
499 | } | |
500 | ||
501 | /* | |
502 | * Same as above, but return the result value | |
503 | */ | |
504 | static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |
505 | { | |
915ec1e2 | 506 | long result; |
1da177e4 | 507 | |
f252ffd5 | 508 | smp_mb__before_llsc(); |
0004a9df | 509 | |
b791d119 | 510 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 511 | long temp; |
1da177e4 LT |
512 | |
513 | __asm__ __volatile__( | |
aac8aa77 | 514 | " .set mips3 \n" |
1da177e4 | 515 | "1: lld %1, %2 # atomic64_add_return \n" |
f2a68272 | 516 | " daddu %0, %1, %3 \n" |
1da177e4 LT |
517 | " scd %0, %2 \n" |
518 | " beqzl %0, 1b \n" | |
f2a68272 | 519 | " daddu %0, %1, %3 \n" |
aac8aa77 | 520 | " .set mips0 \n" |
1da177e4 LT |
521 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
522 | : "Ir" (i), "m" (v->counter) | |
523 | : "memory"); | |
b791d119 | 524 | } else if (kernel_uses_llsc) { |
915ec1e2 | 525 | long temp; |
1da177e4 | 526 | |
7837314d RB |
527 | do { |
528 | __asm__ __volatile__( | |
529 | " .set mips3 \n" | |
530 | " lld %1, %2 # atomic64_add_return \n" | |
531 | " daddu %0, %1, %3 \n" | |
532 | " scd %0, %2 \n" | |
533 | " .set mips0 \n" | |
534 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | |
535 | : "Ir" (i), "m" (v->counter) | |
536 | : "memory"); | |
537 | } while (unlikely(!result)); | |
538 | ||
539 | result = temp + i; | |
1da177e4 LT |
540 | } else { |
541 | unsigned long flags; | |
542 | ||
49edd098 | 543 | raw_local_irq_save(flags); |
1da177e4 LT |
544 | result = v->counter; |
545 | result += i; | |
546 | v->counter = result; | |
49edd098 | 547 | raw_local_irq_restore(flags); |
1da177e4 LT |
548 | } |
549 | ||
17099b11 | 550 | smp_llsc_mb(); |
0004a9df | 551 | |
1da177e4 LT |
552 | return result; |
553 | } | |
554 | ||
555 | static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |
556 | { | |
915ec1e2 | 557 | long result; |
1da177e4 | 558 | |
f252ffd5 | 559 | smp_mb__before_llsc(); |
0004a9df | 560 | |
b791d119 | 561 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 562 | long temp; |
1da177e4 LT |
563 | |
564 | __asm__ __volatile__( | |
aac8aa77 | 565 | " .set mips3 \n" |
1da177e4 | 566 | "1: lld %1, %2 # atomic64_sub_return \n" |
f2a68272 | 567 | " dsubu %0, %1, %3 \n" |
1da177e4 LT |
568 | " scd %0, %2 \n" |
569 | " beqzl %0, 1b \n" | |
f2a68272 | 570 | " dsubu %0, %1, %3 \n" |
aac8aa77 | 571 | " .set mips0 \n" |
1da177e4 LT |
572 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
573 | : "Ir" (i), "m" (v->counter) | |
574 | : "memory"); | |
b791d119 | 575 | } else if (kernel_uses_llsc) { |
915ec1e2 | 576 | long temp; |
1da177e4 | 577 | |
7837314d RB |
578 | do { |
579 | __asm__ __volatile__( | |
580 | " .set mips3 \n" | |
581 | " lld %1, %2 # atomic64_sub_return \n" | |
582 | " dsubu %0, %1, %3 \n" | |
583 | " scd %0, %2 \n" | |
584 | " .set mips0 \n" | |
585 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | |
586 | : "Ir" (i), "m" (v->counter) | |
587 | : "memory"); | |
588 | } while (unlikely(!result)); | |
589 | ||
590 | result = temp - i; | |
1da177e4 LT |
591 | } else { |
592 | unsigned long flags; | |
593 | ||
49edd098 | 594 | raw_local_irq_save(flags); |
1da177e4 LT |
595 | result = v->counter; |
596 | result -= i; | |
597 | v->counter = result; | |
49edd098 | 598 | raw_local_irq_restore(flags); |
1da177e4 LT |
599 | } |
600 | ||
17099b11 | 601 | smp_llsc_mb(); |
0004a9df | 602 | |
1da177e4 LT |
603 | return result; |
604 | } | |
605 | ||
606 | /* | |
f10d14dd AG |
607 | * atomic64_sub_if_positive - conditionally subtract integer from atomic variable |
608 | * @i: integer value to subtract | |
1da177e4 LT |
609 | * @v: pointer of type atomic64_t |
610 | * | |
f10d14dd AG |
611 | * Atomically test @v and subtract @i if @v is greater or equal than @i. |
612 | * The function returns the old value of @v minus @i. | |
1da177e4 LT |
613 | */ |
614 | static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |
615 | { | |
915ec1e2 | 616 | long result; |
1da177e4 | 617 | |
f252ffd5 | 618 | smp_mb__before_llsc(); |
0004a9df | 619 | |
b791d119 | 620 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 621 | long temp; |
1da177e4 LT |
622 | |
623 | __asm__ __volatile__( | |
aac8aa77 | 624 | " .set mips3 \n" |
1da177e4 LT |
625 | "1: lld %1, %2 # atomic64_sub_if_positive\n" |
626 | " dsubu %0, %1, %3 \n" | |
627 | " bltz %0, 1f \n" | |
628 | " scd %0, %2 \n" | |
92f22c18 | 629 | " .set noreorder \n" |
1da177e4 | 630 | " beqzl %0, 1b \n" |
92f22c18 RB |
631 | " dsubu %0, %1, %3 \n" |
632 | " .set reorder \n" | |
1da177e4 | 633 | "1: \n" |
aac8aa77 | 634 | " .set mips0 \n" |
1da177e4 LT |
635 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
636 | : "Ir" (i), "m" (v->counter) | |
637 | : "memory"); | |
b791d119 | 638 | } else if (kernel_uses_llsc) { |
915ec1e2 | 639 | long temp; |
1da177e4 LT |
640 | |
641 | __asm__ __volatile__( | |
aac8aa77 | 642 | " .set mips3 \n" |
1da177e4 LT |
643 | "1: lld %1, %2 # atomic64_sub_if_positive\n" |
644 | " dsubu %0, %1, %3 \n" | |
645 | " bltz %0, 1f \n" | |
646 | " scd %0, %2 \n" | |
92f22c18 | 647 | " .set noreorder \n" |
7837314d | 648 | " beqz %0, 1b \n" |
92f22c18 RB |
649 | " dsubu %0, %1, %3 \n" |
650 | " .set reorder \n" | |
50952026 | 651 | "1: \n" |
aac8aa77 | 652 | " .set mips0 \n" |
1da177e4 LT |
653 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
654 | : "Ir" (i), "m" (v->counter) | |
655 | : "memory"); | |
656 | } else { | |
657 | unsigned long flags; | |
658 | ||
49edd098 | 659 | raw_local_irq_save(flags); |
1da177e4 LT |
660 | result = v->counter; |
661 | result -= i; | |
662 | if (result >= 0) | |
663 | v->counter = result; | |
49edd098 | 664 | raw_local_irq_restore(flags); |
1da177e4 LT |
665 | } |
666 | ||
17099b11 | 667 | smp_llsc_mb(); |
0004a9df | 668 | |
1da177e4 LT |
669 | return result; |
670 | } | |
671 | ||
e12f644b | 672 | #define atomic64_cmpxchg(v, o, n) \ |
7b239bb1 | 673 | ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) |
e12f644b MD |
674 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new))) |
675 | ||
676 | /** | |
677 | * atomic64_add_unless - add unless the number is a given value | |
678 | * @v: pointer of type atomic64_t | |
679 | * @a: the amount to add to v... | |
680 | * @u: ...unless v is equal to u. | |
681 | * | |
682 | * Atomically adds @a to @v, so long as it was not @u. | |
683 | * Returns non-zero if @v was not @u, and zero otherwise. | |
684 | */ | |
2856f5e3 MD |
685 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) |
686 | { | |
687 | long c, old; | |
688 | c = atomic64_read(v); | |
689 | for (;;) { | |
690 | if (unlikely(c == (u))) | |
691 | break; | |
692 | old = atomic64_cmpxchg((v), c, c + (a)); | |
693 | if (likely(old == c)) | |
694 | break; | |
695 | c = old; | |
696 | } | |
697 | return c != (u); | |
698 | } | |
699 | ||
e12f644b MD |
700 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
701 | ||
21a151d8 RB |
702 | #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) |
703 | #define atomic64_inc_return(v) atomic64_add_return(1, (v)) | |
1da177e4 LT |
704 | |
705 | /* | |
706 | * atomic64_sub_and_test - subtract value from variable and test result | |
707 | * @i: integer value to subtract | |
708 | * @v: pointer of type atomic64_t | |
709 | * | |
710 | * Atomically subtracts @i from @v and returns | |
711 | * true if the result is zero, or false for all | |
712 | * other cases. | |
713 | */ | |
21a151d8 | 714 | #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0) |
1da177e4 LT |
715 | |
716 | /* | |
717 | * atomic64_inc_and_test - increment and test | |
718 | * @v: pointer of type atomic64_t | |
719 | * | |
720 | * Atomically increments @v by 1 | |
721 | * and returns true if the result is zero, or false for all | |
722 | * other cases. | |
723 | */ | |
724 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | |
725 | ||
726 | /* | |
727 | * atomic64_dec_and_test - decrement by 1 and test | |
728 | * @v: pointer of type atomic64_t | |
729 | * | |
730 | * Atomically decrements @v by 1 and | |
731 | * returns true if the result is 0, or false for all other | |
732 | * cases. | |
733 | */ | |
734 | #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0) | |
735 | ||
736 | /* | |
737 | * atomic64_dec_if_positive - decrement by 1 if old value positive | |
738 | * @v: pointer of type atomic64_t | |
739 | */ | |
740 | #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v) | |
741 | ||
742 | /* | |
743 | * atomic64_inc - increment atomic variable | |
744 | * @v: pointer of type atomic64_t | |
745 | * | |
746 | * Atomically increments @v by 1. | |
747 | */ | |
21a151d8 | 748 | #define atomic64_inc(v) atomic64_add(1, (v)) |
1da177e4 LT |
749 | |
750 | /* | |
751 | * atomic64_dec - decrement and test | |
752 | * @v: pointer of type atomic64_t | |
753 | * | |
754 | * Atomically decrements @v by 1. | |
755 | */ | |
21a151d8 | 756 | #define atomic64_dec(v) atomic64_sub(1, (v)) |
1da177e4 LT |
757 | |
758 | /* | |
759 | * atomic64_add_negative - add and test if negative | |
760 | * @v: pointer of type atomic64_t | |
761 | * @i: integer value to add | |
762 | * | |
763 | * Atomically adds @i to @v and returns true | |
764 | * if the result is negative, or false when | |
765 | * result is greater than or equal to zero. | |
766 | */ | |
21a151d8 | 767 | #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0) |
1da177e4 | 768 | |
2b78920d DCZ |
769 | #else /* !CONFIG_64BIT */ |
770 | ||
771 | #include <asm-generic/atomic64.h> | |
772 | ||
875d43e7 | 773 | #endif /* CONFIG_64BIT */ |
1da177e4 LT |
774 | |
775 | /* | |
776 | * atomic*_return operations are serializing but not the non-*_return | |
777 | * versions. | |
778 | */ | |
f252ffd5 | 779 | #define smp_mb__before_atomic_dec() smp_mb__before_llsc() |
17099b11 | 780 | #define smp_mb__after_atomic_dec() smp_llsc_mb() |
f252ffd5 | 781 | #define smp_mb__before_atomic_inc() smp_mb__before_llsc() |
17099b11 | 782 | #define smp_mb__after_atomic_inc() smp_llsc_mb() |
1da177e4 | 783 | |
72099ed2 | 784 | #include <asm-generic/atomic-long.h> |
17099b11 | 785 | |
1da177e4 | 786 | #endif /* _ASM_ATOMIC_H */ |