]> bbs.cooldavid.org Git - net-next-2.6.git/blame - arch/mips/include/asm/atomic.h
Merge branch 'for-linus' of git://neil.brown.name/md
[net-next-2.6.git] / arch / mips / include / asm / atomic.h
CommitLineData
1da177e4
LT
1/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
e303e088 12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
1da177e4 13 */
1da177e4
LT
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
192ef366 17#include <linux/irqflags.h>
ea435467 18#include <linux/types.h>
0004a9df 19#include <asm/barrier.h>
1da177e4
LT
20#include <asm/cpu-features.h>
21#include <asm/war.h>
2856f5e3 22#include <asm/system.h>
1da177e4 23
1da177e4
LT
24#define ATOMIC_INIT(i) { (i) }
25
26/*
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
29 *
30 * Atomically reads the value of @v.
31 */
f3d46f9d 32#define atomic_read(v) (*(volatile int *)&(v)->counter)
1da177e4
LT
33
34/*
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
37 * @i: required value
38 *
39 * Atomically sets the value of @v to @i.
40 */
21a151d8 41#define atomic_set(v, i) ((v)->counter = (i))
1da177e4
LT
42
43/*
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
47 *
48 * Atomically adds @i to @v.
49 */
50static __inline__ void atomic_add(int i, atomic_t * v)
51{
b791d119 52 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 53 int temp;
1da177e4
LT
54
55 __asm__ __volatile__(
c4559f67 56 " .set mips3 \n"
1da177e4
LT
57 "1: ll %0, %1 # atomic_add \n"
58 " addu %0, %2 \n"
59 " sc %0, %1 \n"
60 " beqzl %0, 1b \n"
aac8aa77 61 " .set mips0 \n"
1da177e4
LT
62 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter));
b791d119 64 } else if (kernel_uses_llsc) {
915ec1e2 65 int temp;
1da177e4
LT
66
67 __asm__ __volatile__(
c4559f67 68 " .set mips3 \n"
1da177e4
LT
69 "1: ll %0, %1 # atomic_add \n"
70 " addu %0, %2 \n"
71 " sc %0, %1 \n"
f65e4fa8
RB
72 " beqz %0, 2f \n"
73 " .subsection 2 \n"
74 "2: b 1b \n"
75 " .previous \n"
aac8aa77 76 " .set mips0 \n"
1da177e4
LT
77 : "=&r" (temp), "=m" (v->counter)
78 : "Ir" (i), "m" (v->counter));
79 } else {
80 unsigned long flags;
81
49edd098 82 raw_local_irq_save(flags);
1da177e4 83 v->counter += i;
49edd098 84 raw_local_irq_restore(flags);
1da177e4
LT
85 }
86}
87
88/*
89 * atomic_sub - subtract the atomic variable
90 * @i: integer value to subtract
91 * @v: pointer of type atomic_t
92 *
93 * Atomically subtracts @i from @v.
94 */
95static __inline__ void atomic_sub(int i, atomic_t * v)
96{
b791d119 97 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 98 int temp;
1da177e4
LT
99
100 __asm__ __volatile__(
c4559f67 101 " .set mips3 \n"
1da177e4
LT
102 "1: ll %0, %1 # atomic_sub \n"
103 " subu %0, %2 \n"
104 " sc %0, %1 \n"
105 " beqzl %0, 1b \n"
aac8aa77 106 " .set mips0 \n"
1da177e4
LT
107 : "=&r" (temp), "=m" (v->counter)
108 : "Ir" (i), "m" (v->counter));
b791d119 109 } else if (kernel_uses_llsc) {
915ec1e2 110 int temp;
1da177e4
LT
111
112 __asm__ __volatile__(
c4559f67 113 " .set mips3 \n"
1da177e4
LT
114 "1: ll %0, %1 # atomic_sub \n"
115 " subu %0, %2 \n"
116 " sc %0, %1 \n"
f65e4fa8
RB
117 " beqz %0, 2f \n"
118 " .subsection 2 \n"
119 "2: b 1b \n"
120 " .previous \n"
aac8aa77 121 " .set mips0 \n"
1da177e4
LT
122 : "=&r" (temp), "=m" (v->counter)
123 : "Ir" (i), "m" (v->counter));
124 } else {
125 unsigned long flags;
126
49edd098 127 raw_local_irq_save(flags);
1da177e4 128 v->counter -= i;
49edd098 129 raw_local_irq_restore(flags);
1da177e4
LT
130 }
131}
132
133/*
134 * Same as above, but return the result value
135 */
136static __inline__ int atomic_add_return(int i, atomic_t * v)
137{
915ec1e2 138 int result;
1da177e4 139
f252ffd5 140 smp_mb__before_llsc();
0004a9df 141
b791d119 142 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 143 int temp;
1da177e4
LT
144
145 __asm__ __volatile__(
c4559f67 146 " .set mips3 \n"
1da177e4
LT
147 "1: ll %1, %2 # atomic_add_return \n"
148 " addu %0, %1, %3 \n"
149 " sc %0, %2 \n"
150 " beqzl %0, 1b \n"
151 " addu %0, %1, %3 \n"
aac8aa77 152 " .set mips0 \n"
1da177e4
LT
153 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154 : "Ir" (i), "m" (v->counter)
155 : "memory");
b791d119 156 } else if (kernel_uses_llsc) {
915ec1e2 157 int temp;
1da177e4
LT
158
159 __asm__ __volatile__(
c4559f67 160 " .set mips3 \n"
1da177e4
LT
161 "1: ll %1, %2 # atomic_add_return \n"
162 " addu %0, %1, %3 \n"
163 " sc %0, %2 \n"
f65e4fa8 164 " beqz %0, 2f \n"
1da177e4 165 " addu %0, %1, %3 \n"
f65e4fa8
RB
166 " .subsection 2 \n"
167 "2: b 1b \n"
168 " .previous \n"
aac8aa77 169 " .set mips0 \n"
1da177e4
LT
170 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171 : "Ir" (i), "m" (v->counter)
172 : "memory");
173 } else {
174 unsigned long flags;
175
49edd098 176 raw_local_irq_save(flags);
1da177e4
LT
177 result = v->counter;
178 result += i;
179 v->counter = result;
49edd098 180 raw_local_irq_restore(flags);
1da177e4
LT
181 }
182
17099b11 183 smp_llsc_mb();
0004a9df 184
1da177e4
LT
185 return result;
186}
187
188static __inline__ int atomic_sub_return(int i, atomic_t * v)
189{
915ec1e2 190 int result;
1da177e4 191
f252ffd5 192 smp_mb__before_llsc();
0004a9df 193
b791d119 194 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 195 int temp;
1da177e4
LT
196
197 __asm__ __volatile__(
c4559f67 198 " .set mips3 \n"
1da177e4
LT
199 "1: ll %1, %2 # atomic_sub_return \n"
200 " subu %0, %1, %3 \n"
201 " sc %0, %2 \n"
202 " beqzl %0, 1b \n"
203 " subu %0, %1, %3 \n"
aac8aa77 204 " .set mips0 \n"
1da177e4
LT
205 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
206 : "Ir" (i), "m" (v->counter)
207 : "memory");
b791d119 208 } else if (kernel_uses_llsc) {
915ec1e2 209 int temp;
1da177e4
LT
210
211 __asm__ __volatile__(
c4559f67 212 " .set mips3 \n"
1da177e4
LT
213 "1: ll %1, %2 # atomic_sub_return \n"
214 " subu %0, %1, %3 \n"
215 " sc %0, %2 \n"
f65e4fa8 216 " beqz %0, 2f \n"
1da177e4 217 " subu %0, %1, %3 \n"
f65e4fa8
RB
218 " .subsection 2 \n"
219 "2: b 1b \n"
220 " .previous \n"
aac8aa77 221 " .set mips0 \n"
1da177e4
LT
222 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
223 : "Ir" (i), "m" (v->counter)
224 : "memory");
225 } else {
226 unsigned long flags;
227
49edd098 228 raw_local_irq_save(flags);
1da177e4
LT
229 result = v->counter;
230 result -= i;
231 v->counter = result;
49edd098 232 raw_local_irq_restore(flags);
1da177e4
LT
233 }
234
17099b11 235 smp_llsc_mb();
0004a9df 236
1da177e4
LT
237 return result;
238}
239
240/*
f10d14dd
AG
241 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
242 * @i: integer value to subtract
1da177e4
LT
243 * @v: pointer of type atomic_t
244 *
f10d14dd
AG
245 * Atomically test @v and subtract @i if @v is greater or equal than @i.
246 * The function returns the old value of @v minus @i.
1da177e4
LT
247 */
248static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
249{
915ec1e2 250 int result;
1da177e4 251
f252ffd5 252 smp_mb__before_llsc();
0004a9df 253
b791d119 254 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 255 int temp;
1da177e4
LT
256
257 __asm__ __volatile__(
c4559f67 258 " .set mips3 \n"
1da177e4
LT
259 "1: ll %1, %2 # atomic_sub_if_positive\n"
260 " subu %0, %1, %3 \n"
261 " bltz %0, 1f \n"
262 " sc %0, %2 \n"
92f22c18 263 " .set noreorder \n"
1da177e4 264 " beqzl %0, 1b \n"
92f22c18
RB
265 " subu %0, %1, %3 \n"
266 " .set reorder \n"
1da177e4 267 "1: \n"
aac8aa77 268 " .set mips0 \n"
1da177e4
LT
269 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
270 : "Ir" (i), "m" (v->counter)
271 : "memory");
b791d119 272 } else if (kernel_uses_llsc) {
915ec1e2 273 int temp;
1da177e4
LT
274
275 __asm__ __volatile__(
c4559f67 276 " .set mips3 \n"
1da177e4
LT
277 "1: ll %1, %2 # atomic_sub_if_positive\n"
278 " subu %0, %1, %3 \n"
279 " bltz %0, 1f \n"
280 " sc %0, %2 \n"
92f22c18 281 " .set noreorder \n"
f65e4fa8 282 " beqz %0, 2f \n"
92f22c18
RB
283 " subu %0, %1, %3 \n"
284 " .set reorder \n"
f65e4fa8
RB
285 " .subsection 2 \n"
286 "2: b 1b \n"
287 " .previous \n"
50952026 288 "1: \n"
aac8aa77 289 " .set mips0 \n"
1da177e4
LT
290 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
291 : "Ir" (i), "m" (v->counter)
292 : "memory");
293 } else {
294 unsigned long flags;
295
49edd098 296 raw_local_irq_save(flags);
1da177e4
LT
297 result = v->counter;
298 result -= i;
299 if (result >= 0)
300 v->counter = result;
49edd098 301 raw_local_irq_restore(flags);
1da177e4
LT
302 }
303
17099b11 304 smp_llsc_mb();
0004a9df 305
1da177e4
LT
306 return result;
307}
308
e12f644b
MD
309#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
310#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
4a6dae6d 311
8426e1f6
NP
312/**
313 * atomic_add_unless - add unless the number is a given value
314 * @v: pointer of type atomic_t
315 * @a: the amount to add to v...
316 * @u: ...unless v is equal to u.
317 *
318 * Atomically adds @a to @v, so long as it was not @u.
319 * Returns non-zero if @v was not @u, and zero otherwise.
320 */
2856f5e3
MD
321static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
322{
323 int c, old;
324 c = atomic_read(v);
325 for (;;) {
326 if (unlikely(c == (u)))
327 break;
328 old = atomic_cmpxchg((v), c, c + (a));
329 if (likely(old == c))
330 break;
331 c = old;
332 }
333 return c != (u);
334}
8426e1f6
NP
335#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
336
21a151d8
RB
337#define atomic_dec_return(v) atomic_sub_return(1, (v))
338#define atomic_inc_return(v) atomic_add_return(1, (v))
1da177e4
LT
339
340/*
341 * atomic_sub_and_test - subtract value from variable and test result
342 * @i: integer value to subtract
343 * @v: pointer of type atomic_t
344 *
345 * Atomically subtracts @i from @v and returns
346 * true if the result is zero, or false for all
347 * other cases.
348 */
21a151d8 349#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
1da177e4
LT
350
351/*
352 * atomic_inc_and_test - increment and test
353 * @v: pointer of type atomic_t
354 *
355 * Atomically increments @v by 1
356 * and returns true if the result is zero, or false for all
357 * other cases.
358 */
359#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
360
361/*
362 * atomic_dec_and_test - decrement by 1 and test
363 * @v: pointer of type atomic_t
364 *
365 * Atomically decrements @v by 1 and
366 * returns true if the result is 0, or false for all other
367 * cases.
368 */
369#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
370
371/*
372 * atomic_dec_if_positive - decrement by 1 if old value positive
373 * @v: pointer of type atomic_t
374 */
375#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
376
377/*
378 * atomic_inc - increment atomic variable
379 * @v: pointer of type atomic_t
380 *
381 * Atomically increments @v by 1.
382 */
21a151d8 383#define atomic_inc(v) atomic_add(1, (v))
1da177e4
LT
384
385/*
386 * atomic_dec - decrement and test
387 * @v: pointer of type atomic_t
388 *
389 * Atomically decrements @v by 1.
390 */
21a151d8 391#define atomic_dec(v) atomic_sub(1, (v))
1da177e4
LT
392
393/*
394 * atomic_add_negative - add and test if negative
395 * @v: pointer of type atomic_t
396 * @i: integer value to add
397 *
398 * Atomically adds @i to @v and returns true
399 * if the result is negative, or false when
400 * result is greater than or equal to zero.
401 */
21a151d8 402#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
1da177e4 403
875d43e7 404#ifdef CONFIG_64BIT
1da177e4 405
1da177e4
LT
406#define ATOMIC64_INIT(i) { (i) }
407
408/*
409 * atomic64_read - read atomic variable
410 * @v: pointer of type atomic64_t
411 *
412 */
f3d46f9d 413#define atomic64_read(v) (*(volatile long *)&(v)->counter)
1da177e4
LT
414
415/*
416 * atomic64_set - set atomic variable
417 * @v: pointer of type atomic64_t
418 * @i: required value
419 */
21a151d8 420#define atomic64_set(v, i) ((v)->counter = (i))
1da177e4
LT
421
422/*
423 * atomic64_add - add integer to atomic variable
424 * @i: integer value to add
425 * @v: pointer of type atomic64_t
426 *
427 * Atomically adds @i to @v.
428 */
429static __inline__ void atomic64_add(long i, atomic64_t * v)
430{
b791d119 431 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 432 long temp;
1da177e4
LT
433
434 __asm__ __volatile__(
aac8aa77 435 " .set mips3 \n"
1da177e4 436 "1: lld %0, %1 # atomic64_add \n"
f2a68272 437 " daddu %0, %2 \n"
1da177e4
LT
438 " scd %0, %1 \n"
439 " beqzl %0, 1b \n"
aac8aa77 440 " .set mips0 \n"
1da177e4
LT
441 : "=&r" (temp), "=m" (v->counter)
442 : "Ir" (i), "m" (v->counter));
b791d119 443 } else if (kernel_uses_llsc) {
915ec1e2 444 long temp;
1da177e4
LT
445
446 __asm__ __volatile__(
aac8aa77 447 " .set mips3 \n"
1da177e4 448 "1: lld %0, %1 # atomic64_add \n"
f2a68272 449 " daddu %0, %2 \n"
1da177e4 450 " scd %0, %1 \n"
f65e4fa8
RB
451 " beqz %0, 2f \n"
452 " .subsection 2 \n"
453 "2: b 1b \n"
454 " .previous \n"
aac8aa77 455 " .set mips0 \n"
1da177e4
LT
456 : "=&r" (temp), "=m" (v->counter)
457 : "Ir" (i), "m" (v->counter));
458 } else {
459 unsigned long flags;
460
49edd098 461 raw_local_irq_save(flags);
1da177e4 462 v->counter += i;
49edd098 463 raw_local_irq_restore(flags);
1da177e4
LT
464 }
465}
466
467/*
468 * atomic64_sub - subtract the atomic variable
469 * @i: integer value to subtract
470 * @v: pointer of type atomic64_t
471 *
472 * Atomically subtracts @i from @v.
473 */
474static __inline__ void atomic64_sub(long i, atomic64_t * v)
475{
b791d119 476 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 477 long temp;
1da177e4
LT
478
479 __asm__ __volatile__(
aac8aa77 480 " .set mips3 \n"
1da177e4 481 "1: lld %0, %1 # atomic64_sub \n"
f2a68272 482 " dsubu %0, %2 \n"
1da177e4
LT
483 " scd %0, %1 \n"
484 " beqzl %0, 1b \n"
aac8aa77 485 " .set mips0 \n"
1da177e4
LT
486 : "=&r" (temp), "=m" (v->counter)
487 : "Ir" (i), "m" (v->counter));
b791d119 488 } else if (kernel_uses_llsc) {
915ec1e2 489 long temp;
1da177e4
LT
490
491 __asm__ __volatile__(
aac8aa77 492 " .set mips3 \n"
1da177e4 493 "1: lld %0, %1 # atomic64_sub \n"
f2a68272 494 " dsubu %0, %2 \n"
1da177e4 495 " scd %0, %1 \n"
f65e4fa8
RB
496 " beqz %0, 2f \n"
497 " .subsection 2 \n"
498 "2: b 1b \n"
499 " .previous \n"
aac8aa77 500 " .set mips0 \n"
1da177e4
LT
501 : "=&r" (temp), "=m" (v->counter)
502 : "Ir" (i), "m" (v->counter));
503 } else {
504 unsigned long flags;
505
49edd098 506 raw_local_irq_save(flags);
1da177e4 507 v->counter -= i;
49edd098 508 raw_local_irq_restore(flags);
1da177e4
LT
509 }
510}
511
512/*
513 * Same as above, but return the result value
514 */
515static __inline__ long atomic64_add_return(long i, atomic64_t * v)
516{
915ec1e2 517 long result;
1da177e4 518
f252ffd5 519 smp_mb__before_llsc();
0004a9df 520
b791d119 521 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 522 long temp;
1da177e4
LT
523
524 __asm__ __volatile__(
aac8aa77 525 " .set mips3 \n"
1da177e4 526 "1: lld %1, %2 # atomic64_add_return \n"
f2a68272 527 " daddu %0, %1, %3 \n"
1da177e4
LT
528 " scd %0, %2 \n"
529 " beqzl %0, 1b \n"
f2a68272 530 " daddu %0, %1, %3 \n"
aac8aa77 531 " .set mips0 \n"
1da177e4
LT
532 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
533 : "Ir" (i), "m" (v->counter)
534 : "memory");
b791d119 535 } else if (kernel_uses_llsc) {
915ec1e2 536 long temp;
1da177e4
LT
537
538 __asm__ __volatile__(
aac8aa77 539 " .set mips3 \n"
1da177e4 540 "1: lld %1, %2 # atomic64_add_return \n"
f2a68272 541 " daddu %0, %1, %3 \n"
1da177e4 542 " scd %0, %2 \n"
f65e4fa8 543 " beqz %0, 2f \n"
f2a68272 544 " daddu %0, %1, %3 \n"
f65e4fa8
RB
545 " .subsection 2 \n"
546 "2: b 1b \n"
547 " .previous \n"
aac8aa77 548 " .set mips0 \n"
1da177e4
LT
549 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
550 : "Ir" (i), "m" (v->counter)
551 : "memory");
552 } else {
553 unsigned long flags;
554
49edd098 555 raw_local_irq_save(flags);
1da177e4
LT
556 result = v->counter;
557 result += i;
558 v->counter = result;
49edd098 559 raw_local_irq_restore(flags);
1da177e4
LT
560 }
561
17099b11 562 smp_llsc_mb();
0004a9df 563
1da177e4
LT
564 return result;
565}
566
567static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
568{
915ec1e2 569 long result;
1da177e4 570
f252ffd5 571 smp_mb__before_llsc();
0004a9df 572
b791d119 573 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 574 long temp;
1da177e4
LT
575
576 __asm__ __volatile__(
aac8aa77 577 " .set mips3 \n"
1da177e4 578 "1: lld %1, %2 # atomic64_sub_return \n"
f2a68272 579 " dsubu %0, %1, %3 \n"
1da177e4
LT
580 " scd %0, %2 \n"
581 " beqzl %0, 1b \n"
f2a68272 582 " dsubu %0, %1, %3 \n"
aac8aa77 583 " .set mips0 \n"
1da177e4
LT
584 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
585 : "Ir" (i), "m" (v->counter)
586 : "memory");
b791d119 587 } else if (kernel_uses_llsc) {
915ec1e2 588 long temp;
1da177e4
LT
589
590 __asm__ __volatile__(
aac8aa77 591 " .set mips3 \n"
1da177e4 592 "1: lld %1, %2 # atomic64_sub_return \n"
f2a68272 593 " dsubu %0, %1, %3 \n"
1da177e4 594 " scd %0, %2 \n"
f65e4fa8 595 " beqz %0, 2f \n"
f2a68272 596 " dsubu %0, %1, %3 \n"
f65e4fa8
RB
597 " .subsection 2 \n"
598 "2: b 1b \n"
599 " .previous \n"
aac8aa77 600 " .set mips0 \n"
1da177e4
LT
601 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
602 : "Ir" (i), "m" (v->counter)
603 : "memory");
604 } else {
605 unsigned long flags;
606
49edd098 607 raw_local_irq_save(flags);
1da177e4
LT
608 result = v->counter;
609 result -= i;
610 v->counter = result;
49edd098 611 raw_local_irq_restore(flags);
1da177e4
LT
612 }
613
17099b11 614 smp_llsc_mb();
0004a9df 615
1da177e4
LT
616 return result;
617}
618
619/*
f10d14dd
AG
620 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
621 * @i: integer value to subtract
1da177e4
LT
622 * @v: pointer of type atomic64_t
623 *
f10d14dd
AG
624 * Atomically test @v and subtract @i if @v is greater or equal than @i.
625 * The function returns the old value of @v minus @i.
1da177e4
LT
626 */
627static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
628{
915ec1e2 629 long result;
1da177e4 630
f252ffd5 631 smp_mb__before_llsc();
0004a9df 632
b791d119 633 if (kernel_uses_llsc && R10000_LLSC_WAR) {
915ec1e2 634 long temp;
1da177e4
LT
635
636 __asm__ __volatile__(
aac8aa77 637 " .set mips3 \n"
1da177e4
LT
638 "1: lld %1, %2 # atomic64_sub_if_positive\n"
639 " dsubu %0, %1, %3 \n"
640 " bltz %0, 1f \n"
641 " scd %0, %2 \n"
92f22c18 642 " .set noreorder \n"
1da177e4 643 " beqzl %0, 1b \n"
92f22c18
RB
644 " dsubu %0, %1, %3 \n"
645 " .set reorder \n"
1da177e4 646 "1: \n"
aac8aa77 647 " .set mips0 \n"
1da177e4
LT
648 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
649 : "Ir" (i), "m" (v->counter)
650 : "memory");
b791d119 651 } else if (kernel_uses_llsc) {
915ec1e2 652 long temp;
1da177e4
LT
653
654 __asm__ __volatile__(
aac8aa77 655 " .set mips3 \n"
1da177e4
LT
656 "1: lld %1, %2 # atomic64_sub_if_positive\n"
657 " dsubu %0, %1, %3 \n"
658 " bltz %0, 1f \n"
659 " scd %0, %2 \n"
92f22c18 660 " .set noreorder \n"
f65e4fa8 661 " beqz %0, 2f \n"
92f22c18
RB
662 " dsubu %0, %1, %3 \n"
663 " .set reorder \n"
f65e4fa8
RB
664 " .subsection 2 \n"
665 "2: b 1b \n"
666 " .previous \n"
50952026 667 "1: \n"
aac8aa77 668 " .set mips0 \n"
1da177e4
LT
669 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
670 : "Ir" (i), "m" (v->counter)
671 : "memory");
672 } else {
673 unsigned long flags;
674
49edd098 675 raw_local_irq_save(flags);
1da177e4
LT
676 result = v->counter;
677 result -= i;
678 if (result >= 0)
679 v->counter = result;
49edd098 680 raw_local_irq_restore(flags);
1da177e4
LT
681 }
682
17099b11 683 smp_llsc_mb();
0004a9df 684
1da177e4
LT
685 return result;
686}
687
e12f644b 688#define atomic64_cmpxchg(v, o, n) \
7b239bb1 689 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
e12f644b
MD
690#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
691
692/**
693 * atomic64_add_unless - add unless the number is a given value
694 * @v: pointer of type atomic64_t
695 * @a: the amount to add to v...
696 * @u: ...unless v is equal to u.
697 *
698 * Atomically adds @a to @v, so long as it was not @u.
699 * Returns non-zero if @v was not @u, and zero otherwise.
700 */
2856f5e3
MD
701static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
702{
703 long c, old;
704 c = atomic64_read(v);
705 for (;;) {
706 if (unlikely(c == (u)))
707 break;
708 old = atomic64_cmpxchg((v), c, c + (a));
709 if (likely(old == c))
710 break;
711 c = old;
712 }
713 return c != (u);
714}
715
e12f644b
MD
716#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
717
21a151d8
RB
718#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
719#define atomic64_inc_return(v) atomic64_add_return(1, (v))
1da177e4
LT
720
721/*
722 * atomic64_sub_and_test - subtract value from variable and test result
723 * @i: integer value to subtract
724 * @v: pointer of type atomic64_t
725 *
726 * Atomically subtracts @i from @v and returns
727 * true if the result is zero, or false for all
728 * other cases.
729 */
21a151d8 730#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
1da177e4
LT
731
732/*
733 * atomic64_inc_and_test - increment and test
734 * @v: pointer of type atomic64_t
735 *
736 * Atomically increments @v by 1
737 * and returns true if the result is zero, or false for all
738 * other cases.
739 */
740#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
741
742/*
743 * atomic64_dec_and_test - decrement by 1 and test
744 * @v: pointer of type atomic64_t
745 *
746 * Atomically decrements @v by 1 and
747 * returns true if the result is 0, or false for all other
748 * cases.
749 */
750#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
751
752/*
753 * atomic64_dec_if_positive - decrement by 1 if old value positive
754 * @v: pointer of type atomic64_t
755 */
756#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
757
758/*
759 * atomic64_inc - increment atomic variable
760 * @v: pointer of type atomic64_t
761 *
762 * Atomically increments @v by 1.
763 */
21a151d8 764#define atomic64_inc(v) atomic64_add(1, (v))
1da177e4
LT
765
766/*
767 * atomic64_dec - decrement and test
768 * @v: pointer of type atomic64_t
769 *
770 * Atomically decrements @v by 1.
771 */
21a151d8 772#define atomic64_dec(v) atomic64_sub(1, (v))
1da177e4
LT
773
774/*
775 * atomic64_add_negative - add and test if negative
776 * @v: pointer of type atomic64_t
777 * @i: integer value to add
778 *
779 * Atomically adds @i to @v and returns true
780 * if the result is negative, or false when
781 * result is greater than or equal to zero.
782 */
21a151d8 783#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
1da177e4 784
2b78920d
DCZ
785#else /* !CONFIG_64BIT */
786
787#include <asm-generic/atomic64.h>
788
875d43e7 789#endif /* CONFIG_64BIT */
1da177e4
LT
790
791/*
792 * atomic*_return operations are serializing but not the non-*_return
793 * versions.
794 */
f252ffd5 795#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
17099b11 796#define smp_mb__after_atomic_dec() smp_llsc_mb()
f252ffd5 797#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
17099b11 798#define smp_mb__after_atomic_inc() smp_llsc_mb()
1da177e4 799
72099ed2 800#include <asm-generic/atomic-long.h>
17099b11 801
1da177e4 802#endif /* _ASM_ATOMIC_H */