1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
10 #define xchg_relaxed xchg
11 #define xchg_acquire xchg
12 #define xchg_release xchg
13 #else /* xchg_relaxed */
16 #define xchg_acquire(...) \
17 __atomic_op_acquire(xchg, __VA_ARGS__)
21 #define xchg_release(...) \
22 __atomic_op_release(xchg, __VA_ARGS__)
27 __atomic_op_fence(xchg, __VA_ARGS__)
30 #endif /* xchg_relaxed */
32 #ifndef cmpxchg_relaxed
33 #define cmpxchg_relaxed cmpxchg
34 #define cmpxchg_acquire cmpxchg
35 #define cmpxchg_release cmpxchg
36 #else /* cmpxchg_relaxed */
38 #ifndef cmpxchg_acquire
39 #define cmpxchg_acquire(...) \
40 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
43 #ifndef cmpxchg_release
44 #define cmpxchg_release(...) \
45 __atomic_op_release(cmpxchg, __VA_ARGS__)
49 #define cmpxchg(...) \
50 __atomic_op_fence(cmpxchg, __VA_ARGS__)
53 #endif /* cmpxchg_relaxed */
55 #ifndef cmpxchg64_relaxed
56 #define cmpxchg64_relaxed cmpxchg64
57 #define cmpxchg64_acquire cmpxchg64
58 #define cmpxchg64_release cmpxchg64
59 #else /* cmpxchg64_relaxed */
61 #ifndef cmpxchg64_acquire
62 #define cmpxchg64_acquire(...) \
63 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
66 #ifndef cmpxchg64_release
67 #define cmpxchg64_release(...) \
68 __atomic_op_release(cmpxchg64, __VA_ARGS__)
72 #define cmpxchg64(...) \
73 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
76 #endif /* cmpxchg64_relaxed */
78 #ifndef atomic_read_acquire
80 atomic_read_acquire(const atomic_t *v)
82 return smp_load_acquire(&(v)->counter);
84 #define atomic_read_acquire atomic_read_acquire
87 #ifndef atomic_set_release
89 atomic_set_release(atomic_t *v, int i)
91 smp_store_release(&(v)->counter, i);
93 #define atomic_set_release atomic_set_release
96 #ifndef atomic_add_return_relaxed
97 #define atomic_add_return_acquire atomic_add_return
98 #define atomic_add_return_release atomic_add_return
99 #define atomic_add_return_relaxed atomic_add_return
100 #else /* atomic_add_return_relaxed */
102 #ifndef atomic_add_return_acquire
104 atomic_add_return_acquire(int i, atomic_t *v)
106 int ret = atomic_add_return_relaxed(i, v);
107 __atomic_acquire_fence();
110 #define atomic_add_return_acquire atomic_add_return_acquire
113 #ifndef atomic_add_return_release
115 atomic_add_return_release(int i, atomic_t *v)
117 __atomic_release_fence();
118 return atomic_add_return_relaxed(i, v);
120 #define atomic_add_return_release atomic_add_return_release
123 #ifndef atomic_add_return
125 atomic_add_return(int i, atomic_t *v)
128 __atomic_pre_full_fence();
129 ret = atomic_add_return_relaxed(i, v);
130 __atomic_post_full_fence();
133 #define atomic_add_return atomic_add_return
136 #endif /* atomic_add_return_relaxed */
138 #ifndef atomic_fetch_add_relaxed
139 #define atomic_fetch_add_acquire atomic_fetch_add
140 #define atomic_fetch_add_release atomic_fetch_add
141 #define atomic_fetch_add_relaxed atomic_fetch_add
142 #else /* atomic_fetch_add_relaxed */
144 #ifndef atomic_fetch_add_acquire
146 atomic_fetch_add_acquire(int i, atomic_t *v)
148 int ret = atomic_fetch_add_relaxed(i, v);
149 __atomic_acquire_fence();
152 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
155 #ifndef atomic_fetch_add_release
157 atomic_fetch_add_release(int i, atomic_t *v)
159 __atomic_release_fence();
160 return atomic_fetch_add_relaxed(i, v);
162 #define atomic_fetch_add_release atomic_fetch_add_release
165 #ifndef atomic_fetch_add
167 atomic_fetch_add(int i, atomic_t *v)
170 __atomic_pre_full_fence();
171 ret = atomic_fetch_add_relaxed(i, v);
172 __atomic_post_full_fence();
175 #define atomic_fetch_add atomic_fetch_add
178 #endif /* atomic_fetch_add_relaxed */
180 #ifndef atomic_sub_return_relaxed
181 #define atomic_sub_return_acquire atomic_sub_return
182 #define atomic_sub_return_release atomic_sub_return
183 #define atomic_sub_return_relaxed atomic_sub_return
184 #else /* atomic_sub_return_relaxed */
186 #ifndef atomic_sub_return_acquire
188 atomic_sub_return_acquire(int i, atomic_t *v)
190 int ret = atomic_sub_return_relaxed(i, v);
191 __atomic_acquire_fence();
194 #define atomic_sub_return_acquire atomic_sub_return_acquire
197 #ifndef atomic_sub_return_release
199 atomic_sub_return_release(int i, atomic_t *v)
201 __atomic_release_fence();
202 return atomic_sub_return_relaxed(i, v);
204 #define atomic_sub_return_release atomic_sub_return_release
207 #ifndef atomic_sub_return
209 atomic_sub_return(int i, atomic_t *v)
212 __atomic_pre_full_fence();
213 ret = atomic_sub_return_relaxed(i, v);
214 __atomic_post_full_fence();
217 #define atomic_sub_return atomic_sub_return
220 #endif /* atomic_sub_return_relaxed */
222 #ifndef atomic_fetch_sub_relaxed
223 #define atomic_fetch_sub_acquire atomic_fetch_sub
224 #define atomic_fetch_sub_release atomic_fetch_sub
225 #define atomic_fetch_sub_relaxed atomic_fetch_sub
226 #else /* atomic_fetch_sub_relaxed */
228 #ifndef atomic_fetch_sub_acquire
230 atomic_fetch_sub_acquire(int i, atomic_t *v)
232 int ret = atomic_fetch_sub_relaxed(i, v);
233 __atomic_acquire_fence();
236 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
239 #ifndef atomic_fetch_sub_release
241 atomic_fetch_sub_release(int i, atomic_t *v)
243 __atomic_release_fence();
244 return atomic_fetch_sub_relaxed(i, v);
246 #define atomic_fetch_sub_release atomic_fetch_sub_release
249 #ifndef atomic_fetch_sub
251 atomic_fetch_sub(int i, atomic_t *v)
254 __atomic_pre_full_fence();
255 ret = atomic_fetch_sub_relaxed(i, v);
256 __atomic_post_full_fence();
259 #define atomic_fetch_sub atomic_fetch_sub
262 #endif /* atomic_fetch_sub_relaxed */
266 atomic_inc(atomic_t *v)
270 #define atomic_inc atomic_inc
273 #ifndef atomic_inc_return_relaxed
274 #ifdef atomic_inc_return
275 #define atomic_inc_return_acquire atomic_inc_return
276 #define atomic_inc_return_release atomic_inc_return
277 #define atomic_inc_return_relaxed atomic_inc_return
278 #endif /* atomic_inc_return */
280 #ifndef atomic_inc_return
282 atomic_inc_return(atomic_t *v)
284 return atomic_add_return(1, v);
286 #define atomic_inc_return atomic_inc_return
289 #ifndef atomic_inc_return_acquire
291 atomic_inc_return_acquire(atomic_t *v)
293 return atomic_add_return_acquire(1, v);
295 #define atomic_inc_return_acquire atomic_inc_return_acquire
298 #ifndef atomic_inc_return_release
300 atomic_inc_return_release(atomic_t *v)
302 return atomic_add_return_release(1, v);
304 #define atomic_inc_return_release atomic_inc_return_release
307 #ifndef atomic_inc_return_relaxed
309 atomic_inc_return_relaxed(atomic_t *v)
311 return atomic_add_return_relaxed(1, v);
313 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
316 #else /* atomic_inc_return_relaxed */
318 #ifndef atomic_inc_return_acquire
320 atomic_inc_return_acquire(atomic_t *v)
322 int ret = atomic_inc_return_relaxed(v);
323 __atomic_acquire_fence();
326 #define atomic_inc_return_acquire atomic_inc_return_acquire
329 #ifndef atomic_inc_return_release
331 atomic_inc_return_release(atomic_t *v)
333 __atomic_release_fence();
334 return atomic_inc_return_relaxed(v);
336 #define atomic_inc_return_release atomic_inc_return_release
339 #ifndef atomic_inc_return
341 atomic_inc_return(atomic_t *v)
344 __atomic_pre_full_fence();
345 ret = atomic_inc_return_relaxed(v);
346 __atomic_post_full_fence();
349 #define atomic_inc_return atomic_inc_return
352 #endif /* atomic_inc_return_relaxed */
354 #ifndef atomic_fetch_inc_relaxed
355 #ifdef atomic_fetch_inc
356 #define atomic_fetch_inc_acquire atomic_fetch_inc
357 #define atomic_fetch_inc_release atomic_fetch_inc
358 #define atomic_fetch_inc_relaxed atomic_fetch_inc
359 #endif /* atomic_fetch_inc */
361 #ifndef atomic_fetch_inc
363 atomic_fetch_inc(atomic_t *v)
365 return atomic_fetch_add(1, v);
367 #define atomic_fetch_inc atomic_fetch_inc
370 #ifndef atomic_fetch_inc_acquire
372 atomic_fetch_inc_acquire(atomic_t *v)
374 return atomic_fetch_add_acquire(1, v);
376 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
379 #ifndef atomic_fetch_inc_release
381 atomic_fetch_inc_release(atomic_t *v)
383 return atomic_fetch_add_release(1, v);
385 #define atomic_fetch_inc_release atomic_fetch_inc_release
388 #ifndef atomic_fetch_inc_relaxed
390 atomic_fetch_inc_relaxed(atomic_t *v)
392 return atomic_fetch_add_relaxed(1, v);
394 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
397 #else /* atomic_fetch_inc_relaxed */
399 #ifndef atomic_fetch_inc_acquire
401 atomic_fetch_inc_acquire(atomic_t *v)
403 int ret = atomic_fetch_inc_relaxed(v);
404 __atomic_acquire_fence();
407 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
410 #ifndef atomic_fetch_inc_release
412 atomic_fetch_inc_release(atomic_t *v)
414 __atomic_release_fence();
415 return atomic_fetch_inc_relaxed(v);
417 #define atomic_fetch_inc_release atomic_fetch_inc_release
420 #ifndef atomic_fetch_inc
422 atomic_fetch_inc(atomic_t *v)
425 __atomic_pre_full_fence();
426 ret = atomic_fetch_inc_relaxed(v);
427 __atomic_post_full_fence();
430 #define atomic_fetch_inc atomic_fetch_inc
433 #endif /* atomic_fetch_inc_relaxed */
437 atomic_dec(atomic_t *v)
441 #define atomic_dec atomic_dec
444 #ifndef atomic_dec_return_relaxed
445 #ifdef atomic_dec_return
446 #define atomic_dec_return_acquire atomic_dec_return
447 #define atomic_dec_return_release atomic_dec_return
448 #define atomic_dec_return_relaxed atomic_dec_return
449 #endif /* atomic_dec_return */
451 #ifndef atomic_dec_return
453 atomic_dec_return(atomic_t *v)
455 return atomic_sub_return(1, v);
457 #define atomic_dec_return atomic_dec_return
460 #ifndef atomic_dec_return_acquire
462 atomic_dec_return_acquire(atomic_t *v)
464 return atomic_sub_return_acquire(1, v);
466 #define atomic_dec_return_acquire atomic_dec_return_acquire
469 #ifndef atomic_dec_return_release
471 atomic_dec_return_release(atomic_t *v)
473 return atomic_sub_return_release(1, v);
475 #define atomic_dec_return_release atomic_dec_return_release
478 #ifndef atomic_dec_return_relaxed
480 atomic_dec_return_relaxed(atomic_t *v)
482 return atomic_sub_return_relaxed(1, v);
484 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
487 #else /* atomic_dec_return_relaxed */
489 #ifndef atomic_dec_return_acquire
491 atomic_dec_return_acquire(atomic_t *v)
493 int ret = atomic_dec_return_relaxed(v);
494 __atomic_acquire_fence();
497 #define atomic_dec_return_acquire atomic_dec_return_acquire
500 #ifndef atomic_dec_return_release
502 atomic_dec_return_release(atomic_t *v)
504 __atomic_release_fence();
505 return atomic_dec_return_relaxed(v);
507 #define atomic_dec_return_release atomic_dec_return_release
510 #ifndef atomic_dec_return
512 atomic_dec_return(atomic_t *v)
515 __atomic_pre_full_fence();
516 ret = atomic_dec_return_relaxed(v);
517 __atomic_post_full_fence();
520 #define atomic_dec_return atomic_dec_return
523 #endif /* atomic_dec_return_relaxed */
525 #ifndef atomic_fetch_dec_relaxed
526 #ifdef atomic_fetch_dec
527 #define atomic_fetch_dec_acquire atomic_fetch_dec
528 #define atomic_fetch_dec_release atomic_fetch_dec
529 #define atomic_fetch_dec_relaxed atomic_fetch_dec
530 #endif /* atomic_fetch_dec */
532 #ifndef atomic_fetch_dec
534 atomic_fetch_dec(atomic_t *v)
536 return atomic_fetch_sub(1, v);
538 #define atomic_fetch_dec atomic_fetch_dec
541 #ifndef atomic_fetch_dec_acquire
543 atomic_fetch_dec_acquire(atomic_t *v)
545 return atomic_fetch_sub_acquire(1, v);
547 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
550 #ifndef atomic_fetch_dec_release
552 atomic_fetch_dec_release(atomic_t *v)
554 return atomic_fetch_sub_release(1, v);
556 #define atomic_fetch_dec_release atomic_fetch_dec_release
559 #ifndef atomic_fetch_dec_relaxed
561 atomic_fetch_dec_relaxed(atomic_t *v)
563 return atomic_fetch_sub_relaxed(1, v);
565 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
568 #else /* atomic_fetch_dec_relaxed */
570 #ifndef atomic_fetch_dec_acquire
572 atomic_fetch_dec_acquire(atomic_t *v)
574 int ret = atomic_fetch_dec_relaxed(v);
575 __atomic_acquire_fence();
578 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
581 #ifndef atomic_fetch_dec_release
583 atomic_fetch_dec_release(atomic_t *v)
585 __atomic_release_fence();
586 return atomic_fetch_dec_relaxed(v);
588 #define atomic_fetch_dec_release atomic_fetch_dec_release
591 #ifndef atomic_fetch_dec
593 atomic_fetch_dec(atomic_t *v)
596 __atomic_pre_full_fence();
597 ret = atomic_fetch_dec_relaxed(v);
598 __atomic_post_full_fence();
601 #define atomic_fetch_dec atomic_fetch_dec
604 #endif /* atomic_fetch_dec_relaxed */
606 #ifndef atomic_fetch_and_relaxed
607 #define atomic_fetch_and_acquire atomic_fetch_and
608 #define atomic_fetch_and_release atomic_fetch_and
609 #define atomic_fetch_and_relaxed atomic_fetch_and
610 #else /* atomic_fetch_and_relaxed */
612 #ifndef atomic_fetch_and_acquire
614 atomic_fetch_and_acquire(int i, atomic_t *v)
616 int ret = atomic_fetch_and_relaxed(i, v);
617 __atomic_acquire_fence();
620 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
623 #ifndef atomic_fetch_and_release
625 atomic_fetch_and_release(int i, atomic_t *v)
627 __atomic_release_fence();
628 return atomic_fetch_and_relaxed(i, v);
630 #define atomic_fetch_and_release atomic_fetch_and_release
633 #ifndef atomic_fetch_and
635 atomic_fetch_and(int i, atomic_t *v)
638 __atomic_pre_full_fence();
639 ret = atomic_fetch_and_relaxed(i, v);
640 __atomic_post_full_fence();
643 #define atomic_fetch_and atomic_fetch_and
646 #endif /* atomic_fetch_and_relaxed */
648 #ifndef atomic_andnot
650 atomic_andnot(int i, atomic_t *v)
654 #define atomic_andnot atomic_andnot
657 #ifndef atomic_fetch_andnot_relaxed
658 #ifdef atomic_fetch_andnot
659 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
660 #define atomic_fetch_andnot_release atomic_fetch_andnot
661 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
662 #endif /* atomic_fetch_andnot */
664 #ifndef atomic_fetch_andnot
666 atomic_fetch_andnot(int i, atomic_t *v)
668 return atomic_fetch_and(~i, v);
670 #define atomic_fetch_andnot atomic_fetch_andnot
673 #ifndef atomic_fetch_andnot_acquire
675 atomic_fetch_andnot_acquire(int i, atomic_t *v)
677 return atomic_fetch_and_acquire(~i, v);
679 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
682 #ifndef atomic_fetch_andnot_release
684 atomic_fetch_andnot_release(int i, atomic_t *v)
686 return atomic_fetch_and_release(~i, v);
688 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
691 #ifndef atomic_fetch_andnot_relaxed
693 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
695 return atomic_fetch_and_relaxed(~i, v);
697 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
700 #else /* atomic_fetch_andnot_relaxed */
702 #ifndef atomic_fetch_andnot_acquire
704 atomic_fetch_andnot_acquire(int i, atomic_t *v)
706 int ret = atomic_fetch_andnot_relaxed(i, v);
707 __atomic_acquire_fence();
710 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
713 #ifndef atomic_fetch_andnot_release
715 atomic_fetch_andnot_release(int i, atomic_t *v)
717 __atomic_release_fence();
718 return atomic_fetch_andnot_relaxed(i, v);
720 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
723 #ifndef atomic_fetch_andnot
725 atomic_fetch_andnot(int i, atomic_t *v)
728 __atomic_pre_full_fence();
729 ret = atomic_fetch_andnot_relaxed(i, v);
730 __atomic_post_full_fence();
733 #define atomic_fetch_andnot atomic_fetch_andnot
736 #endif /* atomic_fetch_andnot_relaxed */
738 #ifndef atomic_fetch_or_relaxed
739 #define atomic_fetch_or_acquire atomic_fetch_or
740 #define atomic_fetch_or_release atomic_fetch_or
741 #define atomic_fetch_or_relaxed atomic_fetch_or
742 #else /* atomic_fetch_or_relaxed */
744 #ifndef atomic_fetch_or_acquire
746 atomic_fetch_or_acquire(int i, atomic_t *v)
748 int ret = atomic_fetch_or_relaxed(i, v);
749 __atomic_acquire_fence();
752 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
755 #ifndef atomic_fetch_or_release
757 atomic_fetch_or_release(int i, atomic_t *v)
759 __atomic_release_fence();
760 return atomic_fetch_or_relaxed(i, v);
762 #define atomic_fetch_or_release atomic_fetch_or_release
765 #ifndef atomic_fetch_or
767 atomic_fetch_or(int i, atomic_t *v)
770 __atomic_pre_full_fence();
771 ret = atomic_fetch_or_relaxed(i, v);
772 __atomic_post_full_fence();
775 #define atomic_fetch_or atomic_fetch_or
778 #endif /* atomic_fetch_or_relaxed */
780 #ifndef atomic_fetch_xor_relaxed
781 #define atomic_fetch_xor_acquire atomic_fetch_xor
782 #define atomic_fetch_xor_release atomic_fetch_xor
783 #define atomic_fetch_xor_relaxed atomic_fetch_xor
784 #else /* atomic_fetch_xor_relaxed */
786 #ifndef atomic_fetch_xor_acquire
788 atomic_fetch_xor_acquire(int i, atomic_t *v)
790 int ret = atomic_fetch_xor_relaxed(i, v);
791 __atomic_acquire_fence();
794 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
797 #ifndef atomic_fetch_xor_release
799 atomic_fetch_xor_release(int i, atomic_t *v)
801 __atomic_release_fence();
802 return atomic_fetch_xor_relaxed(i, v);
804 #define atomic_fetch_xor_release atomic_fetch_xor_release
807 #ifndef atomic_fetch_xor
809 atomic_fetch_xor(int i, atomic_t *v)
812 __atomic_pre_full_fence();
813 ret = atomic_fetch_xor_relaxed(i, v);
814 __atomic_post_full_fence();
817 #define atomic_fetch_xor atomic_fetch_xor
820 #endif /* atomic_fetch_xor_relaxed */
822 #ifndef atomic_xchg_relaxed
823 #define atomic_xchg_acquire atomic_xchg
824 #define atomic_xchg_release atomic_xchg
825 #define atomic_xchg_relaxed atomic_xchg
826 #else /* atomic_xchg_relaxed */
828 #ifndef atomic_xchg_acquire
830 atomic_xchg_acquire(atomic_t *v, int i)
832 int ret = atomic_xchg_relaxed(v, i);
833 __atomic_acquire_fence();
836 #define atomic_xchg_acquire atomic_xchg_acquire
839 #ifndef atomic_xchg_release
841 atomic_xchg_release(atomic_t *v, int i)
843 __atomic_release_fence();
844 return atomic_xchg_relaxed(v, i);
846 #define atomic_xchg_release atomic_xchg_release
851 atomic_xchg(atomic_t *v, int i)
854 __atomic_pre_full_fence();
855 ret = atomic_xchg_relaxed(v, i);
856 __atomic_post_full_fence();
859 #define atomic_xchg atomic_xchg
862 #endif /* atomic_xchg_relaxed */
864 #ifndef atomic_cmpxchg_relaxed
865 #define atomic_cmpxchg_acquire atomic_cmpxchg
866 #define atomic_cmpxchg_release atomic_cmpxchg
867 #define atomic_cmpxchg_relaxed atomic_cmpxchg
868 #else /* atomic_cmpxchg_relaxed */
870 #ifndef atomic_cmpxchg_acquire
872 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
874 int ret = atomic_cmpxchg_relaxed(v, old, new);
875 __atomic_acquire_fence();
878 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
881 #ifndef atomic_cmpxchg_release
883 atomic_cmpxchg_release(atomic_t *v, int old, int new)
885 __atomic_release_fence();
886 return atomic_cmpxchg_relaxed(v, old, new);
888 #define atomic_cmpxchg_release atomic_cmpxchg_release
891 #ifndef atomic_cmpxchg
893 atomic_cmpxchg(atomic_t *v, int old, int new)
896 __atomic_pre_full_fence();
897 ret = atomic_cmpxchg_relaxed(v, old, new);
898 __atomic_post_full_fence();
901 #define atomic_cmpxchg atomic_cmpxchg
904 #endif /* atomic_cmpxchg_relaxed */
906 #ifndef atomic_try_cmpxchg_relaxed
907 #ifdef atomic_try_cmpxchg
908 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
909 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
910 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
911 #endif /* atomic_try_cmpxchg */
913 #ifndef atomic_try_cmpxchg
915 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
918 r = atomic_cmpxchg(v, o, new);
919 if (unlikely(r != o))
921 return likely(r == o);
923 #define atomic_try_cmpxchg atomic_try_cmpxchg
926 #ifndef atomic_try_cmpxchg_acquire
928 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
931 r = atomic_cmpxchg_acquire(v, o, new);
932 if (unlikely(r != o))
934 return likely(r == o);
936 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
939 #ifndef atomic_try_cmpxchg_release
941 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
944 r = atomic_cmpxchg_release(v, o, new);
945 if (unlikely(r != o))
947 return likely(r == o);
949 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
952 #ifndef atomic_try_cmpxchg_relaxed
954 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
957 r = atomic_cmpxchg_relaxed(v, o, new);
958 if (unlikely(r != o))
960 return likely(r == o);
962 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
965 #else /* atomic_try_cmpxchg_relaxed */
967 #ifndef atomic_try_cmpxchg_acquire
969 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
971 bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
972 __atomic_acquire_fence();
975 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
978 #ifndef atomic_try_cmpxchg_release
980 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
982 __atomic_release_fence();
983 return atomic_try_cmpxchg_relaxed(v, old, new);
985 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
988 #ifndef atomic_try_cmpxchg
990 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
993 __atomic_pre_full_fence();
994 ret = atomic_try_cmpxchg_relaxed(v, old, new);
995 __atomic_post_full_fence();
998 #define atomic_try_cmpxchg atomic_try_cmpxchg
1001 #endif /* atomic_try_cmpxchg_relaxed */
1003 #ifndef atomic_sub_and_test
1005 * atomic_sub_and_test - subtract value from variable and test result
1006 * @i: integer value to subtract
1007 * @v: pointer of type atomic_t
1009 * Atomically subtracts @i from @v and returns
1010 * true if the result is zero, or false for all
1014 atomic_sub_and_test(int i, atomic_t *v)
1016 return atomic_sub_return(i, v) == 0;
1018 #define atomic_sub_and_test atomic_sub_and_test
1021 #ifndef atomic_dec_and_test
1023 * atomic_dec_and_test - decrement and test
1024 * @v: pointer of type atomic_t
1026 * Atomically decrements @v by 1 and
1027 * returns true if the result is 0, or false for all other
1031 atomic_dec_and_test(atomic_t *v)
1033 return atomic_dec_return(v) == 0;
1035 #define atomic_dec_and_test atomic_dec_and_test
1038 #ifndef atomic_inc_and_test
1040 * atomic_inc_and_test - increment and test
1041 * @v: pointer of type atomic_t
1043 * Atomically increments @v by 1
1044 * and returns true if the result is zero, or false for all
1048 atomic_inc_and_test(atomic_t *v)
1050 return atomic_inc_return(v) == 0;
1052 #define atomic_inc_and_test atomic_inc_and_test
1055 #ifndef atomic_add_negative
1057 * atomic_add_negative - add and test if negative
1058 * @i: integer value to add
1059 * @v: pointer of type atomic_t
1061 * Atomically adds @i to @v and returns true
1062 * if the result is negative, or false when
1063 * result is greater than or equal to zero.
1066 atomic_add_negative(int i, atomic_t *v)
1068 return atomic_add_return(i, v) < 0;
1070 #define atomic_add_negative atomic_add_negative
1073 #ifndef atomic_fetch_add_unless
1075 * atomic_fetch_add_unless - add unless the number is already a given value
1076 * @v: pointer of type atomic_t
1077 * @a: the amount to add to v...
1078 * @u: ...unless v is equal to u.
1080 * Atomically adds @a to @v, so long as @v was not already @u.
1081 * Returns original value of @v
1084 atomic_fetch_add_unless(atomic_t *v, int a, int u)
1086 int c = atomic_read(v);
1089 if (unlikely(c == u))
1091 } while (!atomic_try_cmpxchg(v, &c, c + a));
1095 #define atomic_fetch_add_unless atomic_fetch_add_unless
1098 #ifndef atomic_add_unless
1100 * atomic_add_unless - add unless the number is already a given value
1101 * @v: pointer of type atomic_t
1102 * @a: the amount to add to v...
1103 * @u: ...unless v is equal to u.
1105 * Atomically adds @a to @v, if @v was not already @u.
1106 * Returns true if the addition was done.
1109 atomic_add_unless(atomic_t *v, int a, int u)
1111 return atomic_fetch_add_unless(v, a, u) != u;
1113 #define atomic_add_unless atomic_add_unless
1116 #ifndef atomic_inc_not_zero
1118 * atomic_inc_not_zero - increment unless the number is zero
1119 * @v: pointer of type atomic_t
1121 * Atomically increments @v by 1, if @v is non-zero.
1122 * Returns true if the increment was done.
1125 atomic_inc_not_zero(atomic_t *v)
1127 return atomic_add_unless(v, 1, 0);
1129 #define atomic_inc_not_zero atomic_inc_not_zero
1132 #ifndef atomic_inc_unless_negative
1134 atomic_inc_unless_negative(atomic_t *v)
1136 int c = atomic_read(v);
1139 if (unlikely(c < 0))
1141 } while (!atomic_try_cmpxchg(v, &c, c + 1));
1145 #define atomic_inc_unless_negative atomic_inc_unless_negative
1148 #ifndef atomic_dec_unless_positive
1150 atomic_dec_unless_positive(atomic_t *v)
1152 int c = atomic_read(v);
1155 if (unlikely(c > 0))
1157 } while (!atomic_try_cmpxchg(v, &c, c - 1));
1161 #define atomic_dec_unless_positive atomic_dec_unless_positive
1164 #ifndef atomic_dec_if_positive
1166 atomic_dec_if_positive(atomic_t *v)
1168 int dec, c = atomic_read(v);
1172 if (unlikely(dec < 0))
1174 } while (!atomic_try_cmpxchg(v, &c, dec));
1178 #define atomic_dec_if_positive atomic_dec_if_positive
1181 #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1182 #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
1184 #ifdef CONFIG_GENERIC_ATOMIC64
1185 #include <asm-generic/atomic64.h>
1188 #ifndef atomic64_read_acquire
1190 atomic64_read_acquire(const atomic64_t *v)
1192 return smp_load_acquire(&(v)->counter);
1194 #define atomic64_read_acquire atomic64_read_acquire
1197 #ifndef atomic64_set_release
1199 atomic64_set_release(atomic64_t *v, s64 i)
1201 smp_store_release(&(v)->counter, i);
1203 #define atomic64_set_release atomic64_set_release
1206 #ifndef atomic64_add_return_relaxed
1207 #define atomic64_add_return_acquire atomic64_add_return
1208 #define atomic64_add_return_release atomic64_add_return
1209 #define atomic64_add_return_relaxed atomic64_add_return
1210 #else /* atomic64_add_return_relaxed */
1212 #ifndef atomic64_add_return_acquire
1214 atomic64_add_return_acquire(s64 i, atomic64_t *v)
1216 s64 ret = atomic64_add_return_relaxed(i, v);
1217 __atomic_acquire_fence();
1220 #define atomic64_add_return_acquire atomic64_add_return_acquire
1223 #ifndef atomic64_add_return_release
1225 atomic64_add_return_release(s64 i, atomic64_t *v)
1227 __atomic_release_fence();
1228 return atomic64_add_return_relaxed(i, v);
1230 #define atomic64_add_return_release atomic64_add_return_release
1233 #ifndef atomic64_add_return
1235 atomic64_add_return(s64 i, atomic64_t *v)
1238 __atomic_pre_full_fence();
1239 ret = atomic64_add_return_relaxed(i, v);
1240 __atomic_post_full_fence();
1243 #define atomic64_add_return atomic64_add_return
1246 #endif /* atomic64_add_return_relaxed */
1248 #ifndef atomic64_fetch_add_relaxed
1249 #define atomic64_fetch_add_acquire atomic64_fetch_add
1250 #define atomic64_fetch_add_release atomic64_fetch_add
1251 #define atomic64_fetch_add_relaxed atomic64_fetch_add
1252 #else /* atomic64_fetch_add_relaxed */
1254 #ifndef atomic64_fetch_add_acquire
1256 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1258 s64 ret = atomic64_fetch_add_relaxed(i, v);
1259 __atomic_acquire_fence();
1262 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1265 #ifndef atomic64_fetch_add_release
1267 atomic64_fetch_add_release(s64 i, atomic64_t *v)
1269 __atomic_release_fence();
1270 return atomic64_fetch_add_relaxed(i, v);
1272 #define atomic64_fetch_add_release atomic64_fetch_add_release
1275 #ifndef atomic64_fetch_add
1277 atomic64_fetch_add(s64 i, atomic64_t *v)
1280 __atomic_pre_full_fence();
1281 ret = atomic64_fetch_add_relaxed(i, v);
1282 __atomic_post_full_fence();
1285 #define atomic64_fetch_add atomic64_fetch_add
1288 #endif /* atomic64_fetch_add_relaxed */
1290 #ifndef atomic64_sub_return_relaxed
1291 #define atomic64_sub_return_acquire atomic64_sub_return
1292 #define atomic64_sub_return_release atomic64_sub_return
1293 #define atomic64_sub_return_relaxed atomic64_sub_return
1294 #else /* atomic64_sub_return_relaxed */
1296 #ifndef atomic64_sub_return_acquire
1298 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1300 s64 ret = atomic64_sub_return_relaxed(i, v);
1301 __atomic_acquire_fence();
1304 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
1307 #ifndef atomic64_sub_return_release
1309 atomic64_sub_return_release(s64 i, atomic64_t *v)
1311 __atomic_release_fence();
1312 return atomic64_sub_return_relaxed(i, v);
1314 #define atomic64_sub_return_release atomic64_sub_return_release
1317 #ifndef atomic64_sub_return
1319 atomic64_sub_return(s64 i, atomic64_t *v)
1322 __atomic_pre_full_fence();
1323 ret = atomic64_sub_return_relaxed(i, v);
1324 __atomic_post_full_fence();
1327 #define atomic64_sub_return atomic64_sub_return
1330 #endif /* atomic64_sub_return_relaxed */
1332 #ifndef atomic64_fetch_sub_relaxed
1333 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
1334 #define atomic64_fetch_sub_release atomic64_fetch_sub
1335 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1336 #else /* atomic64_fetch_sub_relaxed */
1338 #ifndef atomic64_fetch_sub_acquire
1340 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1342 s64 ret = atomic64_fetch_sub_relaxed(i, v);
1343 __atomic_acquire_fence();
1346 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1349 #ifndef atomic64_fetch_sub_release
1351 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1353 __atomic_release_fence();
1354 return atomic64_fetch_sub_relaxed(i, v);
1356 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1359 #ifndef atomic64_fetch_sub
1361 atomic64_fetch_sub(s64 i, atomic64_t *v)
1364 __atomic_pre_full_fence();
1365 ret = atomic64_fetch_sub_relaxed(i, v);
1366 __atomic_post_full_fence();
1369 #define atomic64_fetch_sub atomic64_fetch_sub
1372 #endif /* atomic64_fetch_sub_relaxed */
1374 #ifndef atomic64_inc
1376 atomic64_inc(atomic64_t *v)
1380 #define atomic64_inc atomic64_inc
1383 #ifndef atomic64_inc_return_relaxed
1384 #ifdef atomic64_inc_return
1385 #define atomic64_inc_return_acquire atomic64_inc_return
1386 #define atomic64_inc_return_release atomic64_inc_return
1387 #define atomic64_inc_return_relaxed atomic64_inc_return
1388 #endif /* atomic64_inc_return */
1390 #ifndef atomic64_inc_return
1392 atomic64_inc_return(atomic64_t *v)
1394 return atomic64_add_return(1, v);
1396 #define atomic64_inc_return atomic64_inc_return
1399 #ifndef atomic64_inc_return_acquire
1401 atomic64_inc_return_acquire(atomic64_t *v)
1403 return atomic64_add_return_acquire(1, v);
1405 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1408 #ifndef atomic64_inc_return_release
1410 atomic64_inc_return_release(atomic64_t *v)
1412 return atomic64_add_return_release(1, v);
1414 #define atomic64_inc_return_release atomic64_inc_return_release
1417 #ifndef atomic64_inc_return_relaxed
1419 atomic64_inc_return_relaxed(atomic64_t *v)
1421 return atomic64_add_return_relaxed(1, v);
1423 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1426 #else /* atomic64_inc_return_relaxed */
1428 #ifndef atomic64_inc_return_acquire
1430 atomic64_inc_return_acquire(atomic64_t *v)
1432 s64 ret = atomic64_inc_return_relaxed(v);
1433 __atomic_acquire_fence();
1436 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1439 #ifndef atomic64_inc_return_release
1441 atomic64_inc_return_release(atomic64_t *v)
1443 __atomic_release_fence();
1444 return atomic64_inc_return_relaxed(v);
1446 #define atomic64_inc_return_release atomic64_inc_return_release
1449 #ifndef atomic64_inc_return
1451 atomic64_inc_return(atomic64_t *v)
1454 __atomic_pre_full_fence();
1455 ret = atomic64_inc_return_relaxed(v);
1456 __atomic_post_full_fence();
1459 #define atomic64_inc_return atomic64_inc_return
1462 #endif /* atomic64_inc_return_relaxed */
1464 #ifndef atomic64_fetch_inc_relaxed
1465 #ifdef atomic64_fetch_inc
1466 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
1467 #define atomic64_fetch_inc_release atomic64_fetch_inc
1468 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1469 #endif /* atomic64_fetch_inc */
1471 #ifndef atomic64_fetch_inc
1473 atomic64_fetch_inc(atomic64_t *v)
1475 return atomic64_fetch_add(1, v);
1477 #define atomic64_fetch_inc atomic64_fetch_inc
1480 #ifndef atomic64_fetch_inc_acquire
1482 atomic64_fetch_inc_acquire(atomic64_t *v)
1484 return atomic64_fetch_add_acquire(1, v);
1486 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1489 #ifndef atomic64_fetch_inc_release
1491 atomic64_fetch_inc_release(atomic64_t *v)
1493 return atomic64_fetch_add_release(1, v);
1495 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1498 #ifndef atomic64_fetch_inc_relaxed
1500 atomic64_fetch_inc_relaxed(atomic64_t *v)
1502 return atomic64_fetch_add_relaxed(1, v);
1504 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1507 #else /* atomic64_fetch_inc_relaxed */
1509 #ifndef atomic64_fetch_inc_acquire
1511 atomic64_fetch_inc_acquire(atomic64_t *v)
1513 s64 ret = atomic64_fetch_inc_relaxed(v);
1514 __atomic_acquire_fence();
1517 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1520 #ifndef atomic64_fetch_inc_release
1522 atomic64_fetch_inc_release(atomic64_t *v)
1524 __atomic_release_fence();
1525 return atomic64_fetch_inc_relaxed(v);
1527 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1530 #ifndef atomic64_fetch_inc
1532 atomic64_fetch_inc(atomic64_t *v)
1535 __atomic_pre_full_fence();
1536 ret = atomic64_fetch_inc_relaxed(v);
1537 __atomic_post_full_fence();
1540 #define atomic64_fetch_inc atomic64_fetch_inc
1543 #endif /* atomic64_fetch_inc_relaxed */
1545 #ifndef atomic64_dec
1547 atomic64_dec(atomic64_t *v)
1551 #define atomic64_dec atomic64_dec
1554 #ifndef atomic64_dec_return_relaxed
1555 #ifdef atomic64_dec_return
1556 #define atomic64_dec_return_acquire atomic64_dec_return
1557 #define atomic64_dec_return_release atomic64_dec_return
1558 #define atomic64_dec_return_relaxed atomic64_dec_return
1559 #endif /* atomic64_dec_return */
1561 #ifndef atomic64_dec_return
1563 atomic64_dec_return(atomic64_t *v)
1565 return atomic64_sub_return(1, v);
1567 #define atomic64_dec_return atomic64_dec_return
1570 #ifndef atomic64_dec_return_acquire
1572 atomic64_dec_return_acquire(atomic64_t *v)
1574 return atomic64_sub_return_acquire(1, v);
1576 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1579 #ifndef atomic64_dec_return_release
1581 atomic64_dec_return_release(atomic64_t *v)
1583 return atomic64_sub_return_release(1, v);
1585 #define atomic64_dec_return_release atomic64_dec_return_release
1588 #ifndef atomic64_dec_return_relaxed
1590 atomic64_dec_return_relaxed(atomic64_t *v)
1592 return atomic64_sub_return_relaxed(1, v);
1594 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1597 #else /* atomic64_dec_return_relaxed */
1599 #ifndef atomic64_dec_return_acquire
1601 atomic64_dec_return_acquire(atomic64_t *v)
1603 s64 ret = atomic64_dec_return_relaxed(v);
1604 __atomic_acquire_fence();
1607 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1610 #ifndef atomic64_dec_return_release
1612 atomic64_dec_return_release(atomic64_t *v)
1614 __atomic_release_fence();
1615 return atomic64_dec_return_relaxed(v);
1617 #define atomic64_dec_return_release atomic64_dec_return_release
1620 #ifndef atomic64_dec_return
1622 atomic64_dec_return(atomic64_t *v)
1625 __atomic_pre_full_fence();
1626 ret = atomic64_dec_return_relaxed(v);
1627 __atomic_post_full_fence();
1630 #define atomic64_dec_return atomic64_dec_return
1633 #endif /* atomic64_dec_return_relaxed */
1635 #ifndef atomic64_fetch_dec_relaxed
1636 #ifdef atomic64_fetch_dec
1637 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
1638 #define atomic64_fetch_dec_release atomic64_fetch_dec
1639 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1640 #endif /* atomic64_fetch_dec */
1642 #ifndef atomic64_fetch_dec
1644 atomic64_fetch_dec(atomic64_t *v)
1646 return atomic64_fetch_sub(1, v);
1648 #define atomic64_fetch_dec atomic64_fetch_dec
1651 #ifndef atomic64_fetch_dec_acquire
1653 atomic64_fetch_dec_acquire(atomic64_t *v)
1655 return atomic64_fetch_sub_acquire(1, v);
1657 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1660 #ifndef atomic64_fetch_dec_release
1662 atomic64_fetch_dec_release(atomic64_t *v)
1664 return atomic64_fetch_sub_release(1, v);
1666 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1669 #ifndef atomic64_fetch_dec_relaxed
1671 atomic64_fetch_dec_relaxed(atomic64_t *v)
1673 return atomic64_fetch_sub_relaxed(1, v);
1675 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1678 #else /* atomic64_fetch_dec_relaxed */
1680 #ifndef atomic64_fetch_dec_acquire
1682 atomic64_fetch_dec_acquire(atomic64_t *v)
1684 s64 ret = atomic64_fetch_dec_relaxed(v);
1685 __atomic_acquire_fence();
1688 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1691 #ifndef atomic64_fetch_dec_release
1693 atomic64_fetch_dec_release(atomic64_t *v)
1695 __atomic_release_fence();
1696 return atomic64_fetch_dec_relaxed(v);
1698 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1701 #ifndef atomic64_fetch_dec
1703 atomic64_fetch_dec(atomic64_t *v)
1706 __atomic_pre_full_fence();
1707 ret = atomic64_fetch_dec_relaxed(v);
1708 __atomic_post_full_fence();
1711 #define atomic64_fetch_dec atomic64_fetch_dec
1714 #endif /* atomic64_fetch_dec_relaxed */
1716 #ifndef atomic64_fetch_and_relaxed
1717 #define atomic64_fetch_and_acquire atomic64_fetch_and
1718 #define atomic64_fetch_and_release atomic64_fetch_and
1719 #define atomic64_fetch_and_relaxed atomic64_fetch_and
1720 #else /* atomic64_fetch_and_relaxed */
1722 #ifndef atomic64_fetch_and_acquire
1724 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1726 s64 ret = atomic64_fetch_and_relaxed(i, v);
1727 __atomic_acquire_fence();
1730 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1733 #ifndef atomic64_fetch_and_release
1735 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1737 __atomic_release_fence();
1738 return atomic64_fetch_and_relaxed(i, v);
1740 #define atomic64_fetch_and_release atomic64_fetch_and_release
1743 #ifndef atomic64_fetch_and
1745 atomic64_fetch_and(s64 i, atomic64_t *v)
1748 __atomic_pre_full_fence();
1749 ret = atomic64_fetch_and_relaxed(i, v);
1750 __atomic_post_full_fence();
1753 #define atomic64_fetch_and atomic64_fetch_and
1756 #endif /* atomic64_fetch_and_relaxed */
1758 #ifndef atomic64_andnot
1760 atomic64_andnot(s64 i, atomic64_t *v)
1762 atomic64_and(~i, v);
1764 #define atomic64_andnot atomic64_andnot
1767 #ifndef atomic64_fetch_andnot_relaxed
1768 #ifdef atomic64_fetch_andnot
1769 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1770 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
1771 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1772 #endif /* atomic64_fetch_andnot */
1774 #ifndef atomic64_fetch_andnot
1776 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1778 return atomic64_fetch_and(~i, v);
1780 #define atomic64_fetch_andnot atomic64_fetch_andnot
1783 #ifndef atomic64_fetch_andnot_acquire
1785 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1787 return atomic64_fetch_and_acquire(~i, v);
1789 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1792 #ifndef atomic64_fetch_andnot_release
1794 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1796 return atomic64_fetch_and_release(~i, v);
1798 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1801 #ifndef atomic64_fetch_andnot_relaxed
1803 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1805 return atomic64_fetch_and_relaxed(~i, v);
1807 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1810 #else /* atomic64_fetch_andnot_relaxed */
1812 #ifndef atomic64_fetch_andnot_acquire
1814 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1816 s64 ret = atomic64_fetch_andnot_relaxed(i, v);
1817 __atomic_acquire_fence();
1820 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1823 #ifndef atomic64_fetch_andnot_release
1825 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1827 __atomic_release_fence();
1828 return atomic64_fetch_andnot_relaxed(i, v);
1830 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1833 #ifndef atomic64_fetch_andnot
1835 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1838 __atomic_pre_full_fence();
1839 ret = atomic64_fetch_andnot_relaxed(i, v);
1840 __atomic_post_full_fence();
1843 #define atomic64_fetch_andnot atomic64_fetch_andnot
1846 #endif /* atomic64_fetch_andnot_relaxed */
1848 #ifndef atomic64_fetch_or_relaxed
1849 #define atomic64_fetch_or_acquire atomic64_fetch_or
1850 #define atomic64_fetch_or_release atomic64_fetch_or
1851 #define atomic64_fetch_or_relaxed atomic64_fetch_or
1852 #else /* atomic64_fetch_or_relaxed */
1854 #ifndef atomic64_fetch_or_acquire
1856 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1858 s64 ret = atomic64_fetch_or_relaxed(i, v);
1859 __atomic_acquire_fence();
1862 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1865 #ifndef atomic64_fetch_or_release
1867 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1869 __atomic_release_fence();
1870 return atomic64_fetch_or_relaxed(i, v);
1872 #define atomic64_fetch_or_release atomic64_fetch_or_release
1875 #ifndef atomic64_fetch_or
1877 atomic64_fetch_or(s64 i, atomic64_t *v)
1880 __atomic_pre_full_fence();
1881 ret = atomic64_fetch_or_relaxed(i, v);
1882 __atomic_post_full_fence();
1885 #define atomic64_fetch_or atomic64_fetch_or
1888 #endif /* atomic64_fetch_or_relaxed */
1890 #ifndef atomic64_fetch_xor_relaxed
1891 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
1892 #define atomic64_fetch_xor_release atomic64_fetch_xor
1893 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
1894 #else /* atomic64_fetch_xor_relaxed */
1896 #ifndef atomic64_fetch_xor_acquire
1898 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1900 s64 ret = atomic64_fetch_xor_relaxed(i, v);
1901 __atomic_acquire_fence();
1904 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1907 #ifndef atomic64_fetch_xor_release
1909 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1911 __atomic_release_fence();
1912 return atomic64_fetch_xor_relaxed(i, v);
1914 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1917 #ifndef atomic64_fetch_xor
1919 atomic64_fetch_xor(s64 i, atomic64_t *v)
1922 __atomic_pre_full_fence();
1923 ret = atomic64_fetch_xor_relaxed(i, v);
1924 __atomic_post_full_fence();
1927 #define atomic64_fetch_xor atomic64_fetch_xor
1930 #endif /* atomic64_fetch_xor_relaxed */
1932 #ifndef atomic64_xchg_relaxed
1933 #define atomic64_xchg_acquire atomic64_xchg
1934 #define atomic64_xchg_release atomic64_xchg
1935 #define atomic64_xchg_relaxed atomic64_xchg
1936 #else /* atomic64_xchg_relaxed */
1938 #ifndef atomic64_xchg_acquire
1940 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1942 s64 ret = atomic64_xchg_relaxed(v, i);
1943 __atomic_acquire_fence();
1946 #define atomic64_xchg_acquire atomic64_xchg_acquire
1949 #ifndef atomic64_xchg_release
1951 atomic64_xchg_release(atomic64_t *v, s64 i)
1953 __atomic_release_fence();
1954 return atomic64_xchg_relaxed(v, i);
1956 #define atomic64_xchg_release atomic64_xchg_release
1959 #ifndef atomic64_xchg
1961 atomic64_xchg(atomic64_t *v, s64 i)
1964 __atomic_pre_full_fence();
1965 ret = atomic64_xchg_relaxed(v, i);
1966 __atomic_post_full_fence();
1969 #define atomic64_xchg atomic64_xchg
1972 #endif /* atomic64_xchg_relaxed */
1974 #ifndef atomic64_cmpxchg_relaxed
1975 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1976 #define atomic64_cmpxchg_release atomic64_cmpxchg
1977 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1978 #else /* atomic64_cmpxchg_relaxed */
1980 #ifndef atomic64_cmpxchg_acquire
1982 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1984 s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
1985 __atomic_acquire_fence();
1988 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1991 #ifndef atomic64_cmpxchg_release
1993 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1995 __atomic_release_fence();
1996 return atomic64_cmpxchg_relaxed(v, old, new);
1998 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
2001 #ifndef atomic64_cmpxchg
2003 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2006 __atomic_pre_full_fence();
2007 ret = atomic64_cmpxchg_relaxed(v, old, new);
2008 __atomic_post_full_fence();
2011 #define atomic64_cmpxchg atomic64_cmpxchg
2014 #endif /* atomic64_cmpxchg_relaxed */
2016 #ifndef atomic64_try_cmpxchg_relaxed
2017 #ifdef atomic64_try_cmpxchg
2018 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2019 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2020 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2021 #endif /* atomic64_try_cmpxchg */
2023 #ifndef atomic64_try_cmpxchg
2025 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2028 r = atomic64_cmpxchg(v, o, new);
2029 if (unlikely(r != o))
2031 return likely(r == o);
2033 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2036 #ifndef atomic64_try_cmpxchg_acquire
2038 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2041 r = atomic64_cmpxchg_acquire(v, o, new);
2042 if (unlikely(r != o))
2044 return likely(r == o);
2046 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2049 #ifndef atomic64_try_cmpxchg_release
2051 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2054 r = atomic64_cmpxchg_release(v, o, new);
2055 if (unlikely(r != o))
2057 return likely(r == o);
2059 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2062 #ifndef atomic64_try_cmpxchg_relaxed
2064 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2067 r = atomic64_cmpxchg_relaxed(v, o, new);
2068 if (unlikely(r != o))
2070 return likely(r == o);
2072 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2075 #else /* atomic64_try_cmpxchg_relaxed */
2077 #ifndef atomic64_try_cmpxchg_acquire
2079 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2081 bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2082 __atomic_acquire_fence();
2085 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2088 #ifndef atomic64_try_cmpxchg_release
2090 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2092 __atomic_release_fence();
2093 return atomic64_try_cmpxchg_relaxed(v, old, new);
2095 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2098 #ifndef atomic64_try_cmpxchg
2100 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2103 __atomic_pre_full_fence();
2104 ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2105 __atomic_post_full_fence();
2108 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2111 #endif /* atomic64_try_cmpxchg_relaxed */
2113 #ifndef atomic64_sub_and_test
2115 * atomic64_sub_and_test - subtract value from variable and test result
2116 * @i: integer value to subtract
2117 * @v: pointer of type atomic64_t
2119 * Atomically subtracts @i from @v and returns
2120 * true if the result is zero, or false for all
2124 atomic64_sub_and_test(s64 i, atomic64_t *v)
2126 return atomic64_sub_return(i, v) == 0;
2128 #define atomic64_sub_and_test atomic64_sub_and_test
2131 #ifndef atomic64_dec_and_test
2133 * atomic64_dec_and_test - decrement and test
2134 * @v: pointer of type atomic64_t
2136 * Atomically decrements @v by 1 and
2137 * returns true if the result is 0, or false for all other
2141 atomic64_dec_and_test(atomic64_t *v)
2143 return atomic64_dec_return(v) == 0;
2145 #define atomic64_dec_and_test atomic64_dec_and_test
2148 #ifndef atomic64_inc_and_test
2150 * atomic64_inc_and_test - increment and test
2151 * @v: pointer of type atomic64_t
2153 * Atomically increments @v by 1
2154 * and returns true if the result is zero, or false for all
2158 atomic64_inc_and_test(atomic64_t *v)
2160 return atomic64_inc_return(v) == 0;
2162 #define atomic64_inc_and_test atomic64_inc_and_test
2165 #ifndef atomic64_add_negative
2167 * atomic64_add_negative - add and test if negative
2168 * @i: integer value to add
2169 * @v: pointer of type atomic64_t
2171 * Atomically adds @i to @v and returns true
2172 * if the result is negative, or false when
2173 * result is greater than or equal to zero.
2176 atomic64_add_negative(s64 i, atomic64_t *v)
2178 return atomic64_add_return(i, v) < 0;
2180 #define atomic64_add_negative atomic64_add_negative
2183 #ifndef atomic64_fetch_add_unless
2185 * atomic64_fetch_add_unless - add unless the number is already a given value
2186 * @v: pointer of type atomic64_t
2187 * @a: the amount to add to v...
2188 * @u: ...unless v is equal to u.
2190 * Atomically adds @a to @v, so long as @v was not already @u.
2191 * Returns original value of @v
2194 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2196 s64 c = atomic64_read(v);
2199 if (unlikely(c == u))
2201 } while (!atomic64_try_cmpxchg(v, &c, c + a));
2205 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
2208 #ifndef atomic64_add_unless
2210 * atomic64_add_unless - add unless the number is already a given value
2211 * @v: pointer of type atomic64_t
2212 * @a: the amount to add to v...
2213 * @u: ...unless v is equal to u.
2215 * Atomically adds @a to @v, if @v was not already @u.
2216 * Returns true if the addition was done.
2219 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2221 return atomic64_fetch_add_unless(v, a, u) != u;
2223 #define atomic64_add_unless atomic64_add_unless
2226 #ifndef atomic64_inc_not_zero
2228 * atomic64_inc_not_zero - increment unless the number is zero
2229 * @v: pointer of type atomic64_t
2231 * Atomically increments @v by 1, if @v is non-zero.
2232 * Returns true if the increment was done.
2235 atomic64_inc_not_zero(atomic64_t *v)
2237 return atomic64_add_unless(v, 1, 0);
2239 #define atomic64_inc_not_zero atomic64_inc_not_zero
2242 #ifndef atomic64_inc_unless_negative
2244 atomic64_inc_unless_negative(atomic64_t *v)
2246 s64 c = atomic64_read(v);
2249 if (unlikely(c < 0))
2251 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
2255 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
2258 #ifndef atomic64_dec_unless_positive
2260 atomic64_dec_unless_positive(atomic64_t *v)
2262 s64 c = atomic64_read(v);
2265 if (unlikely(c > 0))
2267 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
2271 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
2274 #ifndef atomic64_dec_if_positive
2276 atomic64_dec_if_positive(atomic64_t *v)
2278 s64 dec, c = atomic64_read(v);
2282 if (unlikely(dec < 0))
2284 } while (!atomic64_try_cmpxchg(v, &c, dec));
2288 #define atomic64_dec_if_positive atomic64_dec_if_positive
2291 #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
2292 #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
2294 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2295 // 25de4a2804d70f57e994fe3b419148658bb5378a