1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* Atomic operations usable in machine independent code */
3 #ifndef _LINUX_ATOMIC_H
4 #define _LINUX_ATOMIC_H
5 #include <linux/types.h>
6
7 #include <asm/atomic.h>
8 #include <asm/barrier.h>
9
10 /*
11 * Relaxed variants of xchg, cmpxchg and some atomic operations.
12 *
13 * We support four variants:
14 *
15 * - Fully ordered: The default implementation, no suffix required.
16 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
17 * - Release: Provides RELEASE semantics, _release suffix.
18 * - Relaxed: No ordering guarantees, _relaxed suffix.
19 *
20 * For compound atomics performing both a load and a store, ACQUIRE
21 * semantics apply only to the load and RELEASE semantics only to the
22 * store portion of the operation. Note that a failed cmpxchg_acquire
23 * does -not- imply any memory ordering constraints.
24 *
25 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
26 */
27
28 #ifndef atomic_read_acquire
29 #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
30 #endif
31
32 #ifndef atomic_set_release
33 #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
34 #endif
35
36 /*
37 * The idea here is to build acquire/release variants by adding explicit
38 * barriers on top of the relaxed variant. In the case where the relaxed
39 * variant is already fully ordered, no additional barriers are needed.
40 *
41 * If an architecture overrides __atomic_acquire_fence() it will probably
42 * want to define smp_mb__after_spinlock().
43 */
44 #ifndef __atomic_acquire_fence
45 #define __atomic_acquire_fence smp_mb__after_atomic
46 #endif
47
48 #ifndef __atomic_release_fence
49 #define __atomic_release_fence smp_mb__before_atomic
50 #endif
51
52 #ifndef __atomic_pre_full_fence
53 #define __atomic_pre_full_fence smp_mb__before_atomic
54 #endif
55
56 #ifndef __atomic_post_full_fence
57 #define __atomic_post_full_fence smp_mb__after_atomic
58 #endif
59
60 #define __atomic_op_acquire(op, args...) \
61 ({ \
62 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
63 __atomic_acquire_fence(); \
64 __ret; \
65 })
66
67 #define __atomic_op_release(op, args...) \
68 ({ \
69 __atomic_release_fence(); \
70 op##_relaxed(args); \
71 })
72
73 #define __atomic_op_fence(op, args...) \
74 ({ \
75 typeof(op##_relaxed(args)) __ret; \
76 __atomic_pre_full_fence(); \
77 __ret = op##_relaxed(args); \
78 __atomic_post_full_fence(); \
79 __ret; \
80 })
81
82 /* atomic_add_return_relaxed */
83 #ifndef atomic_add_return_relaxed
84 #define atomic_add_return_relaxed atomic_add_return
85 #define atomic_add_return_acquire atomic_add_return
86 #define atomic_add_return_release atomic_add_return
87
88 #else /* atomic_add_return_relaxed */
89
90 #ifndef atomic_add_return_acquire
91 #define atomic_add_return_acquire(...) \
92 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
93 #endif
94
95 #ifndef atomic_add_return_release
96 #define atomic_add_return_release(...) \
97 __atomic_op_release(atomic_add_return, __VA_ARGS__)
98 #endif
99
100 #ifndef atomic_add_return
101 #define atomic_add_return(...) \
102 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
103 #endif
104 #endif /* atomic_add_return_relaxed */
105
106 #ifndef atomic_inc
107 #define atomic_inc(v) atomic_add(1, (v))
108 #endif
109
110 /* atomic_inc_return_relaxed */
111 #ifndef atomic_inc_return_relaxed
112
113 #ifndef atomic_inc_return
114 #define atomic_inc_return(v) atomic_add_return(1, (v))
115 #define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
116 #define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
117 #define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
118 #else /* atomic_inc_return */
119 #define atomic_inc_return_relaxed atomic_inc_return
120 #define atomic_inc_return_acquire atomic_inc_return
121 #define atomic_inc_return_release atomic_inc_return
122 #endif /* atomic_inc_return */
123
124 #else /* atomic_inc_return_relaxed */
125
126 #ifndef atomic_inc_return_acquire
127 #define atomic_inc_return_acquire(...) \
128 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
129 #endif
130
131 #ifndef atomic_inc_return_release
132 #define atomic_inc_return_release(...) \
133 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
134 #endif
135
136 #ifndef atomic_inc_return
137 #define atomic_inc_return(...) \
138 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
139 #endif
140 #endif /* atomic_inc_return_relaxed */
141
142 /* atomic_sub_return_relaxed */
143 #ifndef atomic_sub_return_relaxed
144 #define atomic_sub_return_relaxed atomic_sub_return
145 #define atomic_sub_return_acquire atomic_sub_return
146 #define atomic_sub_return_release atomic_sub_return
147
148 #else /* atomic_sub_return_relaxed */
149
150 #ifndef atomic_sub_return_acquire
151 #define atomic_sub_return_acquire(...) \
152 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
153 #endif
154
155 #ifndef atomic_sub_return_release
156 #define atomic_sub_return_release(...) \
157 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
158 #endif
159
160 #ifndef atomic_sub_return
161 #define atomic_sub_return(...) \
162 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
163 #endif
164 #endif /* atomic_sub_return_relaxed */
165
166 #ifndef atomic_dec
167 #define atomic_dec(v) atomic_sub(1, (v))
168 #endif
169
170 /* atomic_dec_return_relaxed */
171 #ifndef atomic_dec_return_relaxed
172
173 #ifndef atomic_dec_return
174 #define atomic_dec_return(v) atomic_sub_return(1, (v))
175 #define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
176 #define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
177 #define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
178 #else /* atomic_dec_return */
179 #define atomic_dec_return_relaxed atomic_dec_return
180 #define atomic_dec_return_acquire atomic_dec_return
181 #define atomic_dec_return_release atomic_dec_return
182 #endif /* atomic_dec_return */
183
184 #else /* atomic_dec_return_relaxed */
185
186 #ifndef atomic_dec_return_acquire
187 #define atomic_dec_return_acquire(...) \
188 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
189 #endif
190
191 #ifndef atomic_dec_return_release
192 #define atomic_dec_return_release(...) \
193 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
194 #endif
195
196 #ifndef atomic_dec_return
197 #define atomic_dec_return(...) \
198 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
199 #endif
200 #endif /* atomic_dec_return_relaxed */
201
202
203 /* atomic_fetch_add_relaxed */
204 #ifndef atomic_fetch_add_relaxed
205 #define atomic_fetch_add_relaxed atomic_fetch_add
206 #define atomic_fetch_add_acquire atomic_fetch_add
207 #define atomic_fetch_add_release atomic_fetch_add
208
209 #else /* atomic_fetch_add_relaxed */
210
211 #ifndef atomic_fetch_add_acquire
212 #define atomic_fetch_add_acquire(...) \
213 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
214 #endif
215
216 #ifndef atomic_fetch_add_release
217 #define atomic_fetch_add_release(...) \
218 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
219 #endif
220
221 #ifndef atomic_fetch_add
222 #define atomic_fetch_add(...) \
223 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
224 #endif
225 #endif /* atomic_fetch_add_relaxed */
226
227 /* atomic_fetch_inc_relaxed */
228 #ifndef atomic_fetch_inc_relaxed
229
230 #ifndef atomic_fetch_inc
231 #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
232 #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
233 #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
234 #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
235 #else /* atomic_fetch_inc */
236 #define atomic_fetch_inc_relaxed atomic_fetch_inc
237 #define atomic_fetch_inc_acquire atomic_fetch_inc
238 #define atomic_fetch_inc_release atomic_fetch_inc
239 #endif /* atomic_fetch_inc */
240
241 #else /* atomic_fetch_inc_relaxed */
242
243 #ifndef atomic_fetch_inc_acquire
244 #define atomic_fetch_inc_acquire(...) \
245 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
246 #endif
247
248 #ifndef atomic_fetch_inc_release
249 #define atomic_fetch_inc_release(...) \
250 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
251 #endif
252
253 #ifndef atomic_fetch_inc
254 #define atomic_fetch_inc(...) \
255 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
256 #endif
257 #endif /* atomic_fetch_inc_relaxed */
258
259 /* atomic_fetch_sub_relaxed */
260 #ifndef atomic_fetch_sub_relaxed
261 #define atomic_fetch_sub_relaxed atomic_fetch_sub
262 #define atomic_fetch_sub_acquire atomic_fetch_sub
263 #define atomic_fetch_sub_release atomic_fetch_sub
264
265 #else /* atomic_fetch_sub_relaxed */
266
267 #ifndef atomic_fetch_sub_acquire
268 #define atomic_fetch_sub_acquire(...) \
269 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
270 #endif
271
272 #ifndef atomic_fetch_sub_release
273 #define atomic_fetch_sub_release(...) \
274 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
275 #endif
276
277 #ifndef atomic_fetch_sub
278 #define atomic_fetch_sub(...) \
279 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
280 #endif
281 #endif /* atomic_fetch_sub_relaxed */
282
283 /* atomic_fetch_dec_relaxed */
284 #ifndef atomic_fetch_dec_relaxed
285
286 #ifndef atomic_fetch_dec
287 #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
288 #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
289 #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
290 #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
291 #else /* atomic_fetch_dec */
292 #define atomic_fetch_dec_relaxed atomic_fetch_dec
293 #define atomic_fetch_dec_acquire atomic_fetch_dec
294 #define atomic_fetch_dec_release atomic_fetch_dec
295 #endif /* atomic_fetch_dec */
296
297 #else /* atomic_fetch_dec_relaxed */
298
299 #ifndef atomic_fetch_dec_acquire
300 #define atomic_fetch_dec_acquire(...) \
301 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
302 #endif
303
304 #ifndef atomic_fetch_dec_release
305 #define atomic_fetch_dec_release(...) \
306 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
307 #endif
308
309 #ifndef atomic_fetch_dec
310 #define atomic_fetch_dec(...) \
311 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
312 #endif
313 #endif /* atomic_fetch_dec_relaxed */
314
315 /* atomic_fetch_or_relaxed */
316 #ifndef atomic_fetch_or_relaxed
317 #define atomic_fetch_or_relaxed atomic_fetch_or
318 #define atomic_fetch_or_acquire atomic_fetch_or
319 #define atomic_fetch_or_release atomic_fetch_or
320
321 #else /* atomic_fetch_or_relaxed */
322
323 #ifndef atomic_fetch_or_acquire
324 #define atomic_fetch_or_acquire(...) \
325 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
326 #endif
327
328 #ifndef atomic_fetch_or_release
329 #define atomic_fetch_or_release(...) \
330 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
331 #endif
332
333 #ifndef atomic_fetch_or
334 #define atomic_fetch_or(...) \
335 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
336 #endif
337 #endif /* atomic_fetch_or_relaxed */
338
339 /* atomic_fetch_and_relaxed */
340 #ifndef atomic_fetch_and_relaxed
341 #define atomic_fetch_and_relaxed atomic_fetch_and
342 #define atomic_fetch_and_acquire atomic_fetch_and
343 #define atomic_fetch_and_release atomic_fetch_and
344
345 #else /* atomic_fetch_and_relaxed */
346
347 #ifndef atomic_fetch_and_acquire
348 #define atomic_fetch_and_acquire(...) \
349 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
350 #endif
351
352 #ifndef atomic_fetch_and_release
353 #define atomic_fetch_and_release(...) \
354 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
355 #endif
356
357 #ifndef atomic_fetch_and
358 #define atomic_fetch_and(...) \
359 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
360 #endif
361 #endif /* atomic_fetch_and_relaxed */
362
363 #ifndef atomic_andnot
364 #define atomic_andnot(i, v) atomic_and(~(int)(i), (v))
365 #endif
366
367 #ifndef atomic_fetch_andnot_relaxed
368
369 #ifndef atomic_fetch_andnot
370 #define atomic_fetch_andnot(i, v) atomic_fetch_and(~(int)(i), (v))
371 #define atomic_fetch_andnot_relaxed(i, v) atomic_fetch_and_relaxed(~(int)(i), (v))
372 #define atomic_fetch_andnot_acquire(i, v) atomic_fetch_and_acquire(~(int)(i), (v))
373 #define atomic_fetch_andnot_release(i, v) atomic_fetch_and_release(~(int)(i), (v))
374 #else /* atomic_fetch_andnot */
375 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
376 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
377 #define atomic_fetch_andnot_release atomic_fetch_andnot
378 #endif /* atomic_fetch_andnot */
379
380 #else /* atomic_fetch_andnot_relaxed */
381
382 #ifndef atomic_fetch_andnot_acquire
383 #define atomic_fetch_andnot_acquire(...) \
384 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
385 #endif
386
387 #ifndef atomic_fetch_andnot_release
388 #define atomic_fetch_andnot_release(...) \
389 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
390 #endif
391
392 #ifndef atomic_fetch_andnot
393 #define atomic_fetch_andnot(...) \
394 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
395 #endif
396 #endif /* atomic_fetch_andnot_relaxed */
397
398 /* atomic_fetch_xor_relaxed */
399 #ifndef atomic_fetch_xor_relaxed
400 #define atomic_fetch_xor_relaxed atomic_fetch_xor
401 #define atomic_fetch_xor_acquire atomic_fetch_xor
402 #define atomic_fetch_xor_release atomic_fetch_xor
403
404 #else /* atomic_fetch_xor_relaxed */
405
406 #ifndef atomic_fetch_xor_acquire
407 #define atomic_fetch_xor_acquire(...) \
408 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
409 #endif
410
411 #ifndef atomic_fetch_xor_release
412 #define atomic_fetch_xor_release(...) \
413 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
414 #endif
415
416 #ifndef atomic_fetch_xor
417 #define atomic_fetch_xor(...) \
418 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
419 #endif
420 #endif /* atomic_fetch_xor_relaxed */
421
422
423 /* atomic_xchg_relaxed */
424 #ifndef atomic_xchg_relaxed
425 #define atomic_xchg_relaxed atomic_xchg
426 #define atomic_xchg_acquire atomic_xchg
427 #define atomic_xchg_release atomic_xchg
428
429 #else /* atomic_xchg_relaxed */
430
431 #ifndef atomic_xchg_acquire
432 #define atomic_xchg_acquire(...) \
433 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
434 #endif
435
436 #ifndef atomic_xchg_release
437 #define atomic_xchg_release(...) \
438 __atomic_op_release(atomic_xchg, __VA_ARGS__)
439 #endif
440
441 #ifndef atomic_xchg
442 #define atomic_xchg(...) \
443 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
444 #endif
445 #endif /* atomic_xchg_relaxed */
446
447 /* atomic_cmpxchg_relaxed */
448 #ifndef atomic_cmpxchg_relaxed
449 #define atomic_cmpxchg_relaxed atomic_cmpxchg
450 #define atomic_cmpxchg_acquire atomic_cmpxchg
451 #define atomic_cmpxchg_release atomic_cmpxchg
452
453 #else /* atomic_cmpxchg_relaxed */
454
455 #ifndef atomic_cmpxchg_acquire
456 #define atomic_cmpxchg_acquire(...) \
457 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
458 #endif
459
460 #ifndef atomic_cmpxchg_release
461 #define atomic_cmpxchg_release(...) \
462 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
463 #endif
464
465 #ifndef atomic_cmpxchg
466 #define atomic_cmpxchg(...) \
467 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
468 #endif
469 #endif /* atomic_cmpxchg_relaxed */
470
471 #ifndef atomic_try_cmpxchg
472
473 #define __atomic_try_cmpxchg(type, _p, _po, _n) \
474 ({ \
475 typeof(_po) __po = (_po); \
476 typeof(*(_po)) __r, __o = *__po; \
477 __r = atomic_cmpxchg##type((_p), __o, (_n)); \
478 if (unlikely(__r != __o)) \
479 *__po = __r; \
480 likely(__r == __o); \
481 })
482
483 #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
484 #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
485 #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
486 #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
487
488 #else /* atomic_try_cmpxchg */
489 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
490 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
491 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
492 #endif /* atomic_try_cmpxchg */
493
494 /* cmpxchg_relaxed */
495 #ifndef cmpxchg_relaxed
496 #define cmpxchg_relaxed cmpxchg
497 #define cmpxchg_acquire cmpxchg
498 #define cmpxchg_release cmpxchg
499
500 #else /* cmpxchg_relaxed */
501
502 #ifndef cmpxchg_acquire
503 #define cmpxchg_acquire(...) \
504 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
505 #endif
506
507 #ifndef cmpxchg_release
508 #define cmpxchg_release(...) \
509 __atomic_op_release(cmpxchg, __VA_ARGS__)
510 #endif
511
512 #ifndef cmpxchg
513 #define cmpxchg(...) \
514 __atomic_op_fence(cmpxchg, __VA_ARGS__)
515 #endif
516 #endif /* cmpxchg_relaxed */
517
518 /* cmpxchg64_relaxed */
519 #ifndef cmpxchg64_relaxed
520 #define cmpxchg64_relaxed cmpxchg64
521 #define cmpxchg64_acquire cmpxchg64
522 #define cmpxchg64_release cmpxchg64
523
524 #else /* cmpxchg64_relaxed */
525
526 #ifndef cmpxchg64_acquire
527 #define cmpxchg64_acquire(...) \
528 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
529 #endif
530
531 #ifndef cmpxchg64_release
532 #define cmpxchg64_release(...) \
533 __atomic_op_release(cmpxchg64, __VA_ARGS__)
534 #endif
535
536 #ifndef cmpxchg64
537 #define cmpxchg64(...) \
538 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
539 #endif
540 #endif /* cmpxchg64_relaxed */
541
542 /* xchg_relaxed */
543 #ifndef xchg_relaxed
544 #define xchg_relaxed xchg
545 #define xchg_acquire xchg
546 #define xchg_release xchg
547
548 #else /* xchg_relaxed */
549
550 #ifndef xchg_acquire
551 #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
552 #endif
553
554 #ifndef xchg_release
555 #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
556 #endif
557
558 #ifndef xchg
559 #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
560 #endif
561 #endif /* xchg_relaxed */
562
563 /**
564 * atomic_fetch_add_unless - add unless the number is already a given value
565 * @v: pointer of type atomic_t
566 * @a: the amount to add to v...
567 * @u: ...unless v is equal to u.
568 *
569 * Atomically adds @a to @v, if @v was not already @u.
570 * Returns the original value of @v.
571 */
572 #ifndef atomic_fetch_add_unless
atomic_fetch_add_unless(atomic_t * v,int a,int u)573 static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
574 {
575 int c = atomic_read(v);
576
577 do {
578 if (unlikely(c == u))
579 break;
580 } while (!atomic_try_cmpxchg(v, &c, c + a));
581
582 return c;
583 }
584 #endif
585
586 /**
587 * atomic_add_unless - add unless the number is already a given value
588 * @v: pointer of type atomic_t
589 * @a: the amount to add to v...
590 * @u: ...unless v is equal to u.
591 *
592 * Atomically adds @a to @v, if @v was not already @u.
593 * Returns true if the addition was done.
594 */
atomic_add_unless(atomic_t * v,int a,int u)595 static inline bool atomic_add_unless(atomic_t *v, int a, int u)
596 {
597 return atomic_fetch_add_unless(v, a, u) != u;
598 }
599
600 /**
601 * atomic_inc_not_zero - increment unless the number is zero
602 * @v: pointer of type atomic_t
603 *
604 * Atomically increments @v by 1, if @v is non-zero.
605 * Returns true if the increment was done.
606 */
607 #ifndef atomic_inc_not_zero
608 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
609 #endif
610
611 /**
612 * atomic_inc_and_test - increment and test
613 * @v: pointer of type atomic_t
614 *
615 * Atomically increments @v by 1
616 * and returns true if the result is zero, or false for all
617 * other cases.
618 */
619 #ifndef atomic_inc_and_test
atomic_inc_and_test(atomic_t * v)620 static inline bool atomic_inc_and_test(atomic_t *v)
621 {
622 return atomic_inc_return(v) == 0;
623 }
624 #endif
625
626 /**
627 * atomic_dec_and_test - decrement and test
628 * @v: pointer of type atomic_t
629 *
630 * Atomically decrements @v by 1 and
631 * returns true if the result is 0, or false for all other
632 * cases.
633 */
634 #ifndef atomic_dec_and_test
atomic_dec_and_test(atomic_t * v)635 static inline bool atomic_dec_and_test(atomic_t *v)
636 {
637 return atomic_dec_return(v) == 0;
638 }
639 #endif
640
641 /**
642 * atomic_sub_and_test - subtract value from variable and test result
643 * @i: integer value to subtract
644 * @v: pointer of type atomic_t
645 *
646 * Atomically subtracts @i from @v and returns
647 * true if the result is zero, or false for all
648 * other cases.
649 */
650 #ifndef atomic_sub_and_test
atomic_sub_and_test(int i,atomic_t * v)651 static inline bool atomic_sub_and_test(int i, atomic_t *v)
652 {
653 return atomic_sub_return(i, v) == 0;
654 }
655 #endif
656
657 /**
658 * atomic_add_negative - add and test if negative
659 * @i: integer value to add
660 * @v: pointer of type atomic_t
661 *
662 * Atomically adds @i to @v and returns true
663 * if the result is negative, or false when
664 * result is greater than or equal to zero.
665 */
666 #ifndef atomic_add_negative
atomic_add_negative(int i,atomic_t * v)667 static inline bool atomic_add_negative(int i, atomic_t *v)
668 {
669 return atomic_add_return(i, v) < 0;
670 }
671 #endif
672
673 #ifndef atomic_inc_unless_negative
atomic_inc_unless_negative(atomic_t * v)674 static inline bool atomic_inc_unless_negative(atomic_t *v)
675 {
676 int c = atomic_read(v);
677
678 do {
679 if (unlikely(c < 0))
680 return false;
681 } while (!atomic_try_cmpxchg(v, &c, c + 1));
682
683 return true;
684 }
685 #endif
686
687 #ifndef atomic_dec_unless_positive
atomic_dec_unless_positive(atomic_t * v)688 static inline bool atomic_dec_unless_positive(atomic_t *v)
689 {
690 int c = atomic_read(v);
691
692 do {
693 if (unlikely(c > 0))
694 return false;
695 } while (!atomic_try_cmpxchg(v, &c, c - 1));
696
697 return true;
698 }
699 #endif
700
701 /*
702 * atomic_dec_if_positive - decrement by 1 if old value positive
703 * @v: pointer of type atomic_t
704 *
705 * The function returns the old value of *v minus 1, even if
706 * the atomic variable, v, was not decremented.
707 */
708 #ifndef atomic_dec_if_positive
atomic_dec_if_positive(atomic_t * v)709 static inline int atomic_dec_if_positive(atomic_t *v)
710 {
711 int dec, c = atomic_read(v);
712
713 do {
714 dec = c - 1;
715 if (unlikely(dec < 0))
716 break;
717 } while (!atomic_try_cmpxchg(v, &c, dec));
718
719 return dec;
720 }
721 #endif
722
723 #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
724 #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
725
726 #ifdef CONFIG_GENERIC_ATOMIC64
727 #include <asm-generic/atomic64.h>
728 #endif
729
730 #ifndef atomic64_read_acquire
731 #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
732 #endif
733
734 #ifndef atomic64_set_release
735 #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
736 #endif
737
738 /* atomic64_add_return_relaxed */
739 #ifndef atomic64_add_return_relaxed
740 #define atomic64_add_return_relaxed atomic64_add_return
741 #define atomic64_add_return_acquire atomic64_add_return
742 #define atomic64_add_return_release atomic64_add_return
743
744 #else /* atomic64_add_return_relaxed */
745
746 #ifndef atomic64_add_return_acquire
747 #define atomic64_add_return_acquire(...) \
748 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
749 #endif
750
751 #ifndef atomic64_add_return_release
752 #define atomic64_add_return_release(...) \
753 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
754 #endif
755
756 #ifndef atomic64_add_return
757 #define atomic64_add_return(...) \
758 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
759 #endif
760 #endif /* atomic64_add_return_relaxed */
761
762 #ifndef atomic64_inc
763 #define atomic64_inc(v) atomic64_add(1, (v))
764 #endif
765
766 /* atomic64_inc_return_relaxed */
767 #ifndef atomic64_inc_return_relaxed
768
769 #ifndef atomic64_inc_return
770 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
771 #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
772 #define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
773 #define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
774 #else /* atomic64_inc_return */
775 #define atomic64_inc_return_relaxed atomic64_inc_return
776 #define atomic64_inc_return_acquire atomic64_inc_return
777 #define atomic64_inc_return_release atomic64_inc_return
778 #endif /* atomic64_inc_return */
779
780 #else /* atomic64_inc_return_relaxed */
781
782 #ifndef atomic64_inc_return_acquire
783 #define atomic64_inc_return_acquire(...) \
784 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
785 #endif
786
787 #ifndef atomic64_inc_return_release
788 #define atomic64_inc_return_release(...) \
789 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
790 #endif
791
792 #ifndef atomic64_inc_return
793 #define atomic64_inc_return(...) \
794 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
795 #endif
796 #endif /* atomic64_inc_return_relaxed */
797
798
799 /* atomic64_sub_return_relaxed */
800 #ifndef atomic64_sub_return_relaxed
801 #define atomic64_sub_return_relaxed atomic64_sub_return
802 #define atomic64_sub_return_acquire atomic64_sub_return
803 #define atomic64_sub_return_release atomic64_sub_return
804
805 #else /* atomic64_sub_return_relaxed */
806
807 #ifndef atomic64_sub_return_acquire
808 #define atomic64_sub_return_acquire(...) \
809 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
810 #endif
811
812 #ifndef atomic64_sub_return_release
813 #define atomic64_sub_return_release(...) \
814 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
815 #endif
816
817 #ifndef atomic64_sub_return
818 #define atomic64_sub_return(...) \
819 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
820 #endif
821 #endif /* atomic64_sub_return_relaxed */
822
823 #ifndef atomic64_dec
824 #define atomic64_dec(v) atomic64_sub(1, (v))
825 #endif
826
827 /* atomic64_dec_return_relaxed */
828 #ifndef atomic64_dec_return_relaxed
829
830 #ifndef atomic64_dec_return
831 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
832 #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
833 #define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
834 #define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
835 #else /* atomic64_dec_return */
836 #define atomic64_dec_return_relaxed atomic64_dec_return
837 #define atomic64_dec_return_acquire atomic64_dec_return
838 #define atomic64_dec_return_release atomic64_dec_return
839 #endif /* atomic64_dec_return */
840
841 #else /* atomic64_dec_return_relaxed */
842
843 #ifndef atomic64_dec_return_acquire
844 #define atomic64_dec_return_acquire(...) \
845 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
846 #endif
847
848 #ifndef atomic64_dec_return_release
849 #define atomic64_dec_return_release(...) \
850 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
851 #endif
852
853 #ifndef atomic64_dec_return
854 #define atomic64_dec_return(...) \
855 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
856 #endif
857 #endif /* atomic64_dec_return_relaxed */
858
859
860 /* atomic64_fetch_add_relaxed */
861 #ifndef atomic64_fetch_add_relaxed
862 #define atomic64_fetch_add_relaxed atomic64_fetch_add
863 #define atomic64_fetch_add_acquire atomic64_fetch_add
864 #define atomic64_fetch_add_release atomic64_fetch_add
865
866 #else /* atomic64_fetch_add_relaxed */
867
868 #ifndef atomic64_fetch_add_acquire
869 #define atomic64_fetch_add_acquire(...) \
870 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
871 #endif
872
873 #ifndef atomic64_fetch_add_release
874 #define atomic64_fetch_add_release(...) \
875 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
876 #endif
877
878 #ifndef atomic64_fetch_add
879 #define atomic64_fetch_add(...) \
880 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
881 #endif
882 #endif /* atomic64_fetch_add_relaxed */
883
884 /* atomic64_fetch_inc_relaxed */
885 #ifndef atomic64_fetch_inc_relaxed
886
887 #ifndef atomic64_fetch_inc
888 #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
889 #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
890 #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
891 #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
892 #else /* atomic64_fetch_inc */
893 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
894 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
895 #define atomic64_fetch_inc_release atomic64_fetch_inc
896 #endif /* atomic64_fetch_inc */
897
898 #else /* atomic64_fetch_inc_relaxed */
899
900 #ifndef atomic64_fetch_inc_acquire
901 #define atomic64_fetch_inc_acquire(...) \
902 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
903 #endif
904
905 #ifndef atomic64_fetch_inc_release
906 #define atomic64_fetch_inc_release(...) \
907 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
908 #endif
909
910 #ifndef atomic64_fetch_inc
911 #define atomic64_fetch_inc(...) \
912 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
913 #endif
914 #endif /* atomic64_fetch_inc_relaxed */
915
916 /* atomic64_fetch_sub_relaxed */
917 #ifndef atomic64_fetch_sub_relaxed
918 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
919 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
920 #define atomic64_fetch_sub_release atomic64_fetch_sub
921
922 #else /* atomic64_fetch_sub_relaxed */
923
924 #ifndef atomic64_fetch_sub_acquire
925 #define atomic64_fetch_sub_acquire(...) \
926 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
927 #endif
928
929 #ifndef atomic64_fetch_sub_release
930 #define atomic64_fetch_sub_release(...) \
931 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
932 #endif
933
934 #ifndef atomic64_fetch_sub
935 #define atomic64_fetch_sub(...) \
936 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
937 #endif
938 #endif /* atomic64_fetch_sub_relaxed */
939
940 /* atomic64_fetch_dec_relaxed */
941 #ifndef atomic64_fetch_dec_relaxed
942
943 #ifndef atomic64_fetch_dec
944 #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
945 #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
946 #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
947 #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
948 #else /* atomic64_fetch_dec */
949 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
950 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
951 #define atomic64_fetch_dec_release atomic64_fetch_dec
952 #endif /* atomic64_fetch_dec */
953
954 #else /* atomic64_fetch_dec_relaxed */
955
956 #ifndef atomic64_fetch_dec_acquire
957 #define atomic64_fetch_dec_acquire(...) \
958 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
959 #endif
960
961 #ifndef atomic64_fetch_dec_release
962 #define atomic64_fetch_dec_release(...) \
963 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
964 #endif
965
966 #ifndef atomic64_fetch_dec
967 #define atomic64_fetch_dec(...) \
968 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
969 #endif
970 #endif /* atomic64_fetch_dec_relaxed */
971
972 /* atomic64_fetch_or_relaxed */
973 #ifndef atomic64_fetch_or_relaxed
974 #define atomic64_fetch_or_relaxed atomic64_fetch_or
975 #define atomic64_fetch_or_acquire atomic64_fetch_or
976 #define atomic64_fetch_or_release atomic64_fetch_or
977
978 #else /* atomic64_fetch_or_relaxed */
979
980 #ifndef atomic64_fetch_or_acquire
981 #define atomic64_fetch_or_acquire(...) \
982 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
983 #endif
984
985 #ifndef atomic64_fetch_or_release
986 #define atomic64_fetch_or_release(...) \
987 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
988 #endif
989
990 #ifndef atomic64_fetch_or
991 #define atomic64_fetch_or(...) \
992 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
993 #endif
994 #endif /* atomic64_fetch_or_relaxed */
995
996 /* atomic64_fetch_and_relaxed */
997 #ifndef atomic64_fetch_and_relaxed
998 #define atomic64_fetch_and_relaxed atomic64_fetch_and
999 #define atomic64_fetch_and_acquire atomic64_fetch_and
1000 #define atomic64_fetch_and_release atomic64_fetch_and
1001
1002 #else /* atomic64_fetch_and_relaxed */
1003
1004 #ifndef atomic64_fetch_and_acquire
1005 #define atomic64_fetch_and_acquire(...) \
1006 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
1007 #endif
1008
1009 #ifndef atomic64_fetch_and_release
1010 #define atomic64_fetch_and_release(...) \
1011 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
1012 #endif
1013
1014 #ifndef atomic64_fetch_and
1015 #define atomic64_fetch_and(...) \
1016 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
1017 #endif
1018 #endif /* atomic64_fetch_and_relaxed */
1019
1020 #ifndef atomic64_andnot
1021 #define atomic64_andnot(i, v) atomic64_and(~(long long)(i), (v))
1022 #endif
1023
1024 #ifndef atomic64_fetch_andnot_relaxed
1025
1026 #ifndef atomic64_fetch_andnot
1027 #define atomic64_fetch_andnot(i, v) atomic64_fetch_and(~(long long)(i), (v))
1028 #define atomic64_fetch_andnot_relaxed(i, v) atomic64_fetch_and_relaxed(~(long long)(i), (v))
1029 #define atomic64_fetch_andnot_acquire(i, v) atomic64_fetch_and_acquire(~(long long)(i), (v))
1030 #define atomic64_fetch_andnot_release(i, v) atomic64_fetch_and_release(~(long long)(i), (v))
1031 #else /* atomic64_fetch_andnot */
1032 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1033 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1034 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
1035 #endif /* atomic64_fetch_andnot */
1036
1037 #else /* atomic64_fetch_andnot_relaxed */
1038
1039 #ifndef atomic64_fetch_andnot_acquire
1040 #define atomic64_fetch_andnot_acquire(...) \
1041 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
1042 #endif
1043
1044 #ifndef atomic64_fetch_andnot_release
1045 #define atomic64_fetch_andnot_release(...) \
1046 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
1047 #endif
1048
1049 #ifndef atomic64_fetch_andnot
1050 #define atomic64_fetch_andnot(...) \
1051 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
1052 #endif
1053 #endif /* atomic64_fetch_andnot_relaxed */
1054
1055 /* atomic64_fetch_xor_relaxed */
1056 #ifndef atomic64_fetch_xor_relaxed
1057 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
1058 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
1059 #define atomic64_fetch_xor_release atomic64_fetch_xor
1060
1061 #else /* atomic64_fetch_xor_relaxed */
1062
1063 #ifndef atomic64_fetch_xor_acquire
1064 #define atomic64_fetch_xor_acquire(...) \
1065 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
1066 #endif
1067
1068 #ifndef atomic64_fetch_xor_release
1069 #define atomic64_fetch_xor_release(...) \
1070 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
1071 #endif
1072
1073 #ifndef atomic64_fetch_xor
1074 #define atomic64_fetch_xor(...) \
1075 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
1076 #endif
1077 #endif /* atomic64_fetch_xor_relaxed */
1078
1079
1080 /* atomic64_xchg_relaxed */
1081 #ifndef atomic64_xchg_relaxed
1082 #define atomic64_xchg_relaxed atomic64_xchg
1083 #define atomic64_xchg_acquire atomic64_xchg
1084 #define atomic64_xchg_release atomic64_xchg
1085
1086 #else /* atomic64_xchg_relaxed */
1087
1088 #ifndef atomic64_xchg_acquire
1089 #define atomic64_xchg_acquire(...) \
1090 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
1091 #endif
1092
1093 #ifndef atomic64_xchg_release
1094 #define atomic64_xchg_release(...) \
1095 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
1096 #endif
1097
1098 #ifndef atomic64_xchg
1099 #define atomic64_xchg(...) \
1100 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
1101 #endif
1102 #endif /* atomic64_xchg_relaxed */
1103
1104 /* atomic64_cmpxchg_relaxed */
1105 #ifndef atomic64_cmpxchg_relaxed
1106 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1107 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1108 #define atomic64_cmpxchg_release atomic64_cmpxchg
1109
1110 #else /* atomic64_cmpxchg_relaxed */
1111
1112 #ifndef atomic64_cmpxchg_acquire
1113 #define atomic64_cmpxchg_acquire(...) \
1114 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1115 #endif
1116
1117 #ifndef atomic64_cmpxchg_release
1118 #define atomic64_cmpxchg_release(...) \
1119 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1120 #endif
1121
1122 #ifndef atomic64_cmpxchg
1123 #define atomic64_cmpxchg(...) \
1124 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1125 #endif
1126 #endif /* atomic64_cmpxchg_relaxed */
1127
1128 #ifndef atomic64_try_cmpxchg
1129
1130 #define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1131 ({ \
1132 typeof(_po) __po = (_po); \
1133 typeof(*(_po)) __r, __o = *__po; \
1134 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
1135 if (unlikely(__r != __o)) \
1136 *__po = __r; \
1137 likely(__r == __o); \
1138 })
1139
1140 #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1141 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1142 #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1143 #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1144
1145 #else /* atomic64_try_cmpxchg */
1146 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1147 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1148 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1149 #endif /* atomic64_try_cmpxchg */
1150
1151 /**
1152 * atomic64_fetch_add_unless - add unless the number is already a given value
1153 * @v: pointer of type atomic64_t
1154 * @a: the amount to add to v...
1155 * @u: ...unless v is equal to u.
1156 *
1157 * Atomically adds @a to @v, if @v was not already @u.
1158 * Returns the original value of @v.
1159 */
1160 #ifndef atomic64_fetch_add_unless
atomic64_fetch_add_unless(atomic64_t * v,long long a,long long u)1161 static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
1162 long long u)
1163 {
1164 long long c = atomic64_read(v);
1165
1166 do {
1167 if (unlikely(c == u))
1168 break;
1169 } while (!atomic64_try_cmpxchg(v, &c, c + a));
1170
1171 return c;
1172 }
1173 #endif
1174
1175 /**
1176 * atomic64_add_unless - add unless the number is already a given value
1177 * @v: pointer of type atomic_t
1178 * @a: the amount to add to v...
1179 * @u: ...unless v is equal to u.
1180 *
1181 * Atomically adds @a to @v, if @v was not already @u.
1182 * Returns true if the addition was done.
1183 */
atomic64_add_unless(atomic64_t * v,long long a,long long u)1184 static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
1185 {
1186 return atomic64_fetch_add_unless(v, a, u) != u;
1187 }
1188
1189 /**
1190 * atomic64_inc_not_zero - increment unless the number is zero
1191 * @v: pointer of type atomic64_t
1192 *
1193 * Atomically increments @v by 1, if @v is non-zero.
1194 * Returns true if the increment was done.
1195 */
1196 #ifndef atomic64_inc_not_zero
1197 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
1198 #endif
1199
1200 /**
1201 * atomic64_inc_and_test - increment and test
1202 * @v: pointer of type atomic64_t
1203 *
1204 * Atomically increments @v by 1
1205 * and returns true if the result is zero, or false for all
1206 * other cases.
1207 */
1208 #ifndef atomic64_inc_and_test
atomic64_inc_and_test(atomic64_t * v)1209 static inline bool atomic64_inc_and_test(atomic64_t *v)
1210 {
1211 return atomic64_inc_return(v) == 0;
1212 }
1213 #endif
1214
1215 /**
1216 * atomic64_dec_and_test - decrement and test
1217 * @v: pointer of type atomic64_t
1218 *
1219 * Atomically decrements @v by 1 and
1220 * returns true if the result is 0, or false for all other
1221 * cases.
1222 */
1223 #ifndef atomic64_dec_and_test
atomic64_dec_and_test(atomic64_t * v)1224 static inline bool atomic64_dec_and_test(atomic64_t *v)
1225 {
1226 return atomic64_dec_return(v) == 0;
1227 }
1228 #endif
1229
1230 /**
1231 * atomic64_sub_and_test - subtract value from variable and test result
1232 * @i: integer value to subtract
1233 * @v: pointer of type atomic64_t
1234 *
1235 * Atomically subtracts @i from @v and returns
1236 * true if the result is zero, or false for all
1237 * other cases.
1238 */
1239 #ifndef atomic64_sub_and_test
atomic64_sub_and_test(long long i,atomic64_t * v)1240 static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
1241 {
1242 return atomic64_sub_return(i, v) == 0;
1243 }
1244 #endif
1245
1246 /**
1247 * atomic64_add_negative - add and test if negative
1248 * @i: integer value to add
1249 * @v: pointer of type atomic64_t
1250 *
1251 * Atomically adds @i to @v and returns true
1252 * if the result is negative, or false when
1253 * result is greater than or equal to zero.
1254 */
1255 #ifndef atomic64_add_negative
atomic64_add_negative(long long i,atomic64_t * v)1256 static inline bool atomic64_add_negative(long long i, atomic64_t *v)
1257 {
1258 return atomic64_add_return(i, v) < 0;
1259 }
1260 #endif
1261
1262 #ifndef atomic64_inc_unless_negative
atomic64_inc_unless_negative(atomic64_t * v)1263 static inline bool atomic64_inc_unless_negative(atomic64_t *v)
1264 {
1265 long long c = atomic64_read(v);
1266
1267 do {
1268 if (unlikely(c < 0))
1269 return false;
1270 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
1271
1272 return true;
1273 }
1274 #endif
1275
1276 #ifndef atomic64_dec_unless_positive
atomic64_dec_unless_positive(atomic64_t * v)1277 static inline bool atomic64_dec_unless_positive(atomic64_t *v)
1278 {
1279 long long c = atomic64_read(v);
1280
1281 do {
1282 if (unlikely(c > 0))
1283 return false;
1284 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
1285
1286 return true;
1287 }
1288 #endif
1289
1290 /*
1291 * atomic64_dec_if_positive - decrement by 1 if old value positive
1292 * @v: pointer of type atomic64_t
1293 *
1294 * The function returns the old value of *v minus 1, even if
1295 * the atomic64 variable, v, was not decremented.
1296 */
1297 #ifndef atomic64_dec_if_positive
atomic64_dec_if_positive(atomic64_t * v)1298 static inline long long atomic64_dec_if_positive(atomic64_t *v)
1299 {
1300 long long dec, c = atomic64_read(v);
1301
1302 do {
1303 dec = c - 1;
1304 if (unlikely(dec < 0))
1305 break;
1306 } while (!atomic64_try_cmpxchg(v, &c, dec));
1307
1308 return dec;
1309 }
1310 #endif
1311
1312 #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
1313 #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1314
1315 #include <asm-generic/atomic-long.h>
1316
1317 #endif /* _LINUX_ATOMIC_H */
1318