1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
7 * This file provides wrappers with KASAN instrumentation for atomic operations.
8 * To use this functionality an arch's atomic.h file needs to define all
9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10 * this file at the end. This file provides atomic_read() that forwards to
11 * arch_atomic_read() for actual atomic operation.
12 * Note: if an arch atomic operation is implemented by means of other atomic
13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15 * double instrumentation.
17 #ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18 #define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
20 #include <linux/build_bug.h>
21 #include <linux/kasan-checks.h>
24 atomic_read(const atomic_t *v)
26 kasan_check_read(v, sizeof(*v));
27 return arch_atomic_read(v);
29 #define atomic_read atomic_read
31 #if defined(arch_atomic_read_acquire)
33 atomic_read_acquire(const atomic_t *v)
35 kasan_check_read(v, sizeof(*v));
36 return arch_atomic_read_acquire(v);
38 #define atomic_read_acquire atomic_read_acquire
42 atomic_set(atomic_t *v, int i)
44 kasan_check_write(v, sizeof(*v));
45 arch_atomic_set(v, i);
47 #define atomic_set atomic_set
49 #if defined(arch_atomic_set_release)
51 atomic_set_release(atomic_t *v, int i)
53 kasan_check_write(v, sizeof(*v));
54 arch_atomic_set_release(v, i);
56 #define atomic_set_release atomic_set_release
60 atomic_add(int i, atomic_t *v)
62 kasan_check_write(v, sizeof(*v));
63 arch_atomic_add(i, v);
65 #define atomic_add atomic_add
67 #if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
69 atomic_add_return(int i, atomic_t *v)
71 kasan_check_write(v, sizeof(*v));
72 return arch_atomic_add_return(i, v);
74 #define atomic_add_return atomic_add_return
77 #if defined(arch_atomic_add_return_acquire)
79 atomic_add_return_acquire(int i, atomic_t *v)
81 kasan_check_write(v, sizeof(*v));
82 return arch_atomic_add_return_acquire(i, v);
84 #define atomic_add_return_acquire atomic_add_return_acquire
87 #if defined(arch_atomic_add_return_release)
89 atomic_add_return_release(int i, atomic_t *v)
91 kasan_check_write(v, sizeof(*v));
92 return arch_atomic_add_return_release(i, v);
94 #define atomic_add_return_release atomic_add_return_release
97 #if defined(arch_atomic_add_return_relaxed)
99 atomic_add_return_relaxed(int i, atomic_t *v)
101 kasan_check_write(v, sizeof(*v));
102 return arch_atomic_add_return_relaxed(i, v);
104 #define atomic_add_return_relaxed atomic_add_return_relaxed
107 #if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
109 atomic_fetch_add(int i, atomic_t *v)
111 kasan_check_write(v, sizeof(*v));
112 return arch_atomic_fetch_add(i, v);
114 #define atomic_fetch_add atomic_fetch_add
117 #if defined(arch_atomic_fetch_add_acquire)
119 atomic_fetch_add_acquire(int i, atomic_t *v)
121 kasan_check_write(v, sizeof(*v));
122 return arch_atomic_fetch_add_acquire(i, v);
124 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
127 #if defined(arch_atomic_fetch_add_release)
129 atomic_fetch_add_release(int i, atomic_t *v)
131 kasan_check_write(v, sizeof(*v));
132 return arch_atomic_fetch_add_release(i, v);
134 #define atomic_fetch_add_release atomic_fetch_add_release
137 #if defined(arch_atomic_fetch_add_relaxed)
139 atomic_fetch_add_relaxed(int i, atomic_t *v)
141 kasan_check_write(v, sizeof(*v));
142 return arch_atomic_fetch_add_relaxed(i, v);
144 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
148 atomic_sub(int i, atomic_t *v)
150 kasan_check_write(v, sizeof(*v));
151 arch_atomic_sub(i, v);
153 #define atomic_sub atomic_sub
155 #if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
157 atomic_sub_return(int i, atomic_t *v)
159 kasan_check_write(v, sizeof(*v));
160 return arch_atomic_sub_return(i, v);
162 #define atomic_sub_return atomic_sub_return
165 #if defined(arch_atomic_sub_return_acquire)
167 atomic_sub_return_acquire(int i, atomic_t *v)
169 kasan_check_write(v, sizeof(*v));
170 return arch_atomic_sub_return_acquire(i, v);
172 #define atomic_sub_return_acquire atomic_sub_return_acquire
175 #if defined(arch_atomic_sub_return_release)
177 atomic_sub_return_release(int i, atomic_t *v)
179 kasan_check_write(v, sizeof(*v));
180 return arch_atomic_sub_return_release(i, v);
182 #define atomic_sub_return_release atomic_sub_return_release
185 #if defined(arch_atomic_sub_return_relaxed)
187 atomic_sub_return_relaxed(int i, atomic_t *v)
189 kasan_check_write(v, sizeof(*v));
190 return arch_atomic_sub_return_relaxed(i, v);
192 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
195 #if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
197 atomic_fetch_sub(int i, atomic_t *v)
199 kasan_check_write(v, sizeof(*v));
200 return arch_atomic_fetch_sub(i, v);
202 #define atomic_fetch_sub atomic_fetch_sub
205 #if defined(arch_atomic_fetch_sub_acquire)
207 atomic_fetch_sub_acquire(int i, atomic_t *v)
209 kasan_check_write(v, sizeof(*v));
210 return arch_atomic_fetch_sub_acquire(i, v);
212 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
215 #if defined(arch_atomic_fetch_sub_release)
217 atomic_fetch_sub_release(int i, atomic_t *v)
219 kasan_check_write(v, sizeof(*v));
220 return arch_atomic_fetch_sub_release(i, v);
222 #define atomic_fetch_sub_release atomic_fetch_sub_release
225 #if defined(arch_atomic_fetch_sub_relaxed)
227 atomic_fetch_sub_relaxed(int i, atomic_t *v)
229 kasan_check_write(v, sizeof(*v));
230 return arch_atomic_fetch_sub_relaxed(i, v);
232 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
235 #if defined(arch_atomic_inc)
237 atomic_inc(atomic_t *v)
239 kasan_check_write(v, sizeof(*v));
242 #define atomic_inc atomic_inc
245 #if defined(arch_atomic_inc_return)
247 atomic_inc_return(atomic_t *v)
249 kasan_check_write(v, sizeof(*v));
250 return arch_atomic_inc_return(v);
252 #define atomic_inc_return atomic_inc_return
255 #if defined(arch_atomic_inc_return_acquire)
257 atomic_inc_return_acquire(atomic_t *v)
259 kasan_check_write(v, sizeof(*v));
260 return arch_atomic_inc_return_acquire(v);
262 #define atomic_inc_return_acquire atomic_inc_return_acquire
265 #if defined(arch_atomic_inc_return_release)
267 atomic_inc_return_release(atomic_t *v)
269 kasan_check_write(v, sizeof(*v));
270 return arch_atomic_inc_return_release(v);
272 #define atomic_inc_return_release atomic_inc_return_release
275 #if defined(arch_atomic_inc_return_relaxed)
277 atomic_inc_return_relaxed(atomic_t *v)
279 kasan_check_write(v, sizeof(*v));
280 return arch_atomic_inc_return_relaxed(v);
282 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
285 #if defined(arch_atomic_fetch_inc)
287 atomic_fetch_inc(atomic_t *v)
289 kasan_check_write(v, sizeof(*v));
290 return arch_atomic_fetch_inc(v);
292 #define atomic_fetch_inc atomic_fetch_inc
295 #if defined(arch_atomic_fetch_inc_acquire)
297 atomic_fetch_inc_acquire(atomic_t *v)
299 kasan_check_write(v, sizeof(*v));
300 return arch_atomic_fetch_inc_acquire(v);
302 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
305 #if defined(arch_atomic_fetch_inc_release)
307 atomic_fetch_inc_release(atomic_t *v)
309 kasan_check_write(v, sizeof(*v));
310 return arch_atomic_fetch_inc_release(v);
312 #define atomic_fetch_inc_release atomic_fetch_inc_release
315 #if defined(arch_atomic_fetch_inc_relaxed)
317 atomic_fetch_inc_relaxed(atomic_t *v)
319 kasan_check_write(v, sizeof(*v));
320 return arch_atomic_fetch_inc_relaxed(v);
322 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
325 #if defined(arch_atomic_dec)
327 atomic_dec(atomic_t *v)
329 kasan_check_write(v, sizeof(*v));
332 #define atomic_dec atomic_dec
335 #if defined(arch_atomic_dec_return)
337 atomic_dec_return(atomic_t *v)
339 kasan_check_write(v, sizeof(*v));
340 return arch_atomic_dec_return(v);
342 #define atomic_dec_return atomic_dec_return
345 #if defined(arch_atomic_dec_return_acquire)
347 atomic_dec_return_acquire(atomic_t *v)
349 kasan_check_write(v, sizeof(*v));
350 return arch_atomic_dec_return_acquire(v);
352 #define atomic_dec_return_acquire atomic_dec_return_acquire
355 #if defined(arch_atomic_dec_return_release)
357 atomic_dec_return_release(atomic_t *v)
359 kasan_check_write(v, sizeof(*v));
360 return arch_atomic_dec_return_release(v);
362 #define atomic_dec_return_release atomic_dec_return_release
365 #if defined(arch_atomic_dec_return_relaxed)
367 atomic_dec_return_relaxed(atomic_t *v)
369 kasan_check_write(v, sizeof(*v));
370 return arch_atomic_dec_return_relaxed(v);
372 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
375 #if defined(arch_atomic_fetch_dec)
377 atomic_fetch_dec(atomic_t *v)
379 kasan_check_write(v, sizeof(*v));
380 return arch_atomic_fetch_dec(v);
382 #define atomic_fetch_dec atomic_fetch_dec
385 #if defined(arch_atomic_fetch_dec_acquire)
387 atomic_fetch_dec_acquire(atomic_t *v)
389 kasan_check_write(v, sizeof(*v));
390 return arch_atomic_fetch_dec_acquire(v);
392 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
395 #if defined(arch_atomic_fetch_dec_release)
397 atomic_fetch_dec_release(atomic_t *v)
399 kasan_check_write(v, sizeof(*v));
400 return arch_atomic_fetch_dec_release(v);
402 #define atomic_fetch_dec_release atomic_fetch_dec_release
405 #if defined(arch_atomic_fetch_dec_relaxed)
407 atomic_fetch_dec_relaxed(atomic_t *v)
409 kasan_check_write(v, sizeof(*v));
410 return arch_atomic_fetch_dec_relaxed(v);
412 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
416 atomic_and(int i, atomic_t *v)
418 kasan_check_write(v, sizeof(*v));
419 arch_atomic_and(i, v);
421 #define atomic_and atomic_and
423 #if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
425 atomic_fetch_and(int i, atomic_t *v)
427 kasan_check_write(v, sizeof(*v));
428 return arch_atomic_fetch_and(i, v);
430 #define atomic_fetch_and atomic_fetch_and
433 #if defined(arch_atomic_fetch_and_acquire)
435 atomic_fetch_and_acquire(int i, atomic_t *v)
437 kasan_check_write(v, sizeof(*v));
438 return arch_atomic_fetch_and_acquire(i, v);
440 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
443 #if defined(arch_atomic_fetch_and_release)
445 atomic_fetch_and_release(int i, atomic_t *v)
447 kasan_check_write(v, sizeof(*v));
448 return arch_atomic_fetch_and_release(i, v);
450 #define atomic_fetch_and_release atomic_fetch_and_release
453 #if defined(arch_atomic_fetch_and_relaxed)
455 atomic_fetch_and_relaxed(int i, atomic_t *v)
457 kasan_check_write(v, sizeof(*v));
458 return arch_atomic_fetch_and_relaxed(i, v);
460 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
463 #if defined(arch_atomic_andnot)
465 atomic_andnot(int i, atomic_t *v)
467 kasan_check_write(v, sizeof(*v));
468 arch_atomic_andnot(i, v);
470 #define atomic_andnot atomic_andnot
473 #if defined(arch_atomic_fetch_andnot)
475 atomic_fetch_andnot(int i, atomic_t *v)
477 kasan_check_write(v, sizeof(*v));
478 return arch_atomic_fetch_andnot(i, v);
480 #define atomic_fetch_andnot atomic_fetch_andnot
483 #if defined(arch_atomic_fetch_andnot_acquire)
485 atomic_fetch_andnot_acquire(int i, atomic_t *v)
487 kasan_check_write(v, sizeof(*v));
488 return arch_atomic_fetch_andnot_acquire(i, v);
490 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
493 #if defined(arch_atomic_fetch_andnot_release)
495 atomic_fetch_andnot_release(int i, atomic_t *v)
497 kasan_check_write(v, sizeof(*v));
498 return arch_atomic_fetch_andnot_release(i, v);
500 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
503 #if defined(arch_atomic_fetch_andnot_relaxed)
505 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
507 kasan_check_write(v, sizeof(*v));
508 return arch_atomic_fetch_andnot_relaxed(i, v);
510 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
514 atomic_or(int i, atomic_t *v)
516 kasan_check_write(v, sizeof(*v));
517 arch_atomic_or(i, v);
519 #define atomic_or atomic_or
521 #if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
523 atomic_fetch_or(int i, atomic_t *v)
525 kasan_check_write(v, sizeof(*v));
526 return arch_atomic_fetch_or(i, v);
528 #define atomic_fetch_or atomic_fetch_or
531 #if defined(arch_atomic_fetch_or_acquire)
533 atomic_fetch_or_acquire(int i, atomic_t *v)
535 kasan_check_write(v, sizeof(*v));
536 return arch_atomic_fetch_or_acquire(i, v);
538 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
541 #if defined(arch_atomic_fetch_or_release)
543 atomic_fetch_or_release(int i, atomic_t *v)
545 kasan_check_write(v, sizeof(*v));
546 return arch_atomic_fetch_or_release(i, v);
548 #define atomic_fetch_or_release atomic_fetch_or_release
551 #if defined(arch_atomic_fetch_or_relaxed)
553 atomic_fetch_or_relaxed(int i, atomic_t *v)
555 kasan_check_write(v, sizeof(*v));
556 return arch_atomic_fetch_or_relaxed(i, v);
558 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
562 atomic_xor(int i, atomic_t *v)
564 kasan_check_write(v, sizeof(*v));
565 arch_atomic_xor(i, v);
567 #define atomic_xor atomic_xor
569 #if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
571 atomic_fetch_xor(int i, atomic_t *v)
573 kasan_check_write(v, sizeof(*v));
574 return arch_atomic_fetch_xor(i, v);
576 #define atomic_fetch_xor atomic_fetch_xor
579 #if defined(arch_atomic_fetch_xor_acquire)
581 atomic_fetch_xor_acquire(int i, atomic_t *v)
583 kasan_check_write(v, sizeof(*v));
584 return arch_atomic_fetch_xor_acquire(i, v);
586 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
589 #if defined(arch_atomic_fetch_xor_release)
591 atomic_fetch_xor_release(int i, atomic_t *v)
593 kasan_check_write(v, sizeof(*v));
594 return arch_atomic_fetch_xor_release(i, v);
596 #define atomic_fetch_xor_release atomic_fetch_xor_release
599 #if defined(arch_atomic_fetch_xor_relaxed)
601 atomic_fetch_xor_relaxed(int i, atomic_t *v)
603 kasan_check_write(v, sizeof(*v));
604 return arch_atomic_fetch_xor_relaxed(i, v);
606 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
609 #if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
611 atomic_xchg(atomic_t *v, int i)
613 kasan_check_write(v, sizeof(*v));
614 return arch_atomic_xchg(v, i);
616 #define atomic_xchg atomic_xchg
619 #if defined(arch_atomic_xchg_acquire)
621 atomic_xchg_acquire(atomic_t *v, int i)
623 kasan_check_write(v, sizeof(*v));
624 return arch_atomic_xchg_acquire(v, i);
626 #define atomic_xchg_acquire atomic_xchg_acquire
629 #if defined(arch_atomic_xchg_release)
631 atomic_xchg_release(atomic_t *v, int i)
633 kasan_check_write(v, sizeof(*v));
634 return arch_atomic_xchg_release(v, i);
636 #define atomic_xchg_release atomic_xchg_release
639 #if defined(arch_atomic_xchg_relaxed)
641 atomic_xchg_relaxed(atomic_t *v, int i)
643 kasan_check_write(v, sizeof(*v));
644 return arch_atomic_xchg_relaxed(v, i);
646 #define atomic_xchg_relaxed atomic_xchg_relaxed
649 #if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
651 atomic_cmpxchg(atomic_t *v, int old, int new)
653 kasan_check_write(v, sizeof(*v));
654 return arch_atomic_cmpxchg(v, old, new);
656 #define atomic_cmpxchg atomic_cmpxchg
659 #if defined(arch_atomic_cmpxchg_acquire)
661 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
663 kasan_check_write(v, sizeof(*v));
664 return arch_atomic_cmpxchg_acquire(v, old, new);
666 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
669 #if defined(arch_atomic_cmpxchg_release)
671 atomic_cmpxchg_release(atomic_t *v, int old, int new)
673 kasan_check_write(v, sizeof(*v));
674 return arch_atomic_cmpxchg_release(v, old, new);
676 #define atomic_cmpxchg_release atomic_cmpxchg_release
679 #if defined(arch_atomic_cmpxchg_relaxed)
681 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
683 kasan_check_write(v, sizeof(*v));
684 return arch_atomic_cmpxchg_relaxed(v, old, new);
686 #define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
689 #if defined(arch_atomic_try_cmpxchg)
691 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
693 kasan_check_write(v, sizeof(*v));
694 kasan_check_write(old, sizeof(*old));
695 return arch_atomic_try_cmpxchg(v, old, new);
697 #define atomic_try_cmpxchg atomic_try_cmpxchg
700 #if defined(arch_atomic_try_cmpxchg_acquire)
702 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
704 kasan_check_write(v, sizeof(*v));
705 kasan_check_write(old, sizeof(*old));
706 return arch_atomic_try_cmpxchg_acquire(v, old, new);
708 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
711 #if defined(arch_atomic_try_cmpxchg_release)
713 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
715 kasan_check_write(v, sizeof(*v));
716 kasan_check_write(old, sizeof(*old));
717 return arch_atomic_try_cmpxchg_release(v, old, new);
719 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
722 #if defined(arch_atomic_try_cmpxchg_relaxed)
724 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
726 kasan_check_write(v, sizeof(*v));
727 kasan_check_write(old, sizeof(*old));
728 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
730 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
733 #if defined(arch_atomic_sub_and_test)
735 atomic_sub_and_test(int i, atomic_t *v)
737 kasan_check_write(v, sizeof(*v));
738 return arch_atomic_sub_and_test(i, v);
740 #define atomic_sub_and_test atomic_sub_and_test
743 #if defined(arch_atomic_dec_and_test)
745 atomic_dec_and_test(atomic_t *v)
747 kasan_check_write(v, sizeof(*v));
748 return arch_atomic_dec_and_test(v);
750 #define atomic_dec_and_test atomic_dec_and_test
753 #if defined(arch_atomic_inc_and_test)
755 atomic_inc_and_test(atomic_t *v)
757 kasan_check_write(v, sizeof(*v));
758 return arch_atomic_inc_and_test(v);
760 #define atomic_inc_and_test atomic_inc_and_test
763 #if defined(arch_atomic_add_negative)
765 atomic_add_negative(int i, atomic_t *v)
767 kasan_check_write(v, sizeof(*v));
768 return arch_atomic_add_negative(i, v);
770 #define atomic_add_negative atomic_add_negative
773 #if defined(arch_atomic_fetch_add_unless)
775 atomic_fetch_add_unless(atomic_t *v, int a, int u)
777 kasan_check_write(v, sizeof(*v));
778 return arch_atomic_fetch_add_unless(v, a, u);
780 #define atomic_fetch_add_unless atomic_fetch_add_unless
783 #if defined(arch_atomic_add_unless)
785 atomic_add_unless(atomic_t *v, int a, int u)
787 kasan_check_write(v, sizeof(*v));
788 return arch_atomic_add_unless(v, a, u);
790 #define atomic_add_unless atomic_add_unless
793 #if defined(arch_atomic_inc_not_zero)
795 atomic_inc_not_zero(atomic_t *v)
797 kasan_check_write(v, sizeof(*v));
798 return arch_atomic_inc_not_zero(v);
800 #define atomic_inc_not_zero atomic_inc_not_zero
803 #if defined(arch_atomic_inc_unless_negative)
805 atomic_inc_unless_negative(atomic_t *v)
807 kasan_check_write(v, sizeof(*v));
808 return arch_atomic_inc_unless_negative(v);
810 #define atomic_inc_unless_negative atomic_inc_unless_negative
813 #if defined(arch_atomic_dec_unless_positive)
815 atomic_dec_unless_positive(atomic_t *v)
817 kasan_check_write(v, sizeof(*v));
818 return arch_atomic_dec_unless_positive(v);
820 #define atomic_dec_unless_positive atomic_dec_unless_positive
823 #if defined(arch_atomic_dec_if_positive)
825 atomic_dec_if_positive(atomic_t *v)
827 kasan_check_write(v, sizeof(*v));
828 return arch_atomic_dec_if_positive(v);
830 #define atomic_dec_if_positive atomic_dec_if_positive
834 atomic64_read(const atomic64_t *v)
836 kasan_check_read(v, sizeof(*v));
837 return arch_atomic64_read(v);
839 #define atomic64_read atomic64_read
841 #if defined(arch_atomic64_read_acquire)
843 atomic64_read_acquire(const atomic64_t *v)
845 kasan_check_read(v, sizeof(*v));
846 return arch_atomic64_read_acquire(v);
848 #define atomic64_read_acquire atomic64_read_acquire
852 atomic64_set(atomic64_t *v, s64 i)
854 kasan_check_write(v, sizeof(*v));
855 arch_atomic64_set(v, i);
857 #define atomic64_set atomic64_set
859 #if defined(arch_atomic64_set_release)
861 atomic64_set_release(atomic64_t *v, s64 i)
863 kasan_check_write(v, sizeof(*v));
864 arch_atomic64_set_release(v, i);
866 #define atomic64_set_release atomic64_set_release
870 atomic64_add(s64 i, atomic64_t *v)
872 kasan_check_write(v, sizeof(*v));
873 arch_atomic64_add(i, v);
875 #define atomic64_add atomic64_add
877 #if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
879 atomic64_add_return(s64 i, atomic64_t *v)
881 kasan_check_write(v, sizeof(*v));
882 return arch_atomic64_add_return(i, v);
884 #define atomic64_add_return atomic64_add_return
887 #if defined(arch_atomic64_add_return_acquire)
889 atomic64_add_return_acquire(s64 i, atomic64_t *v)
891 kasan_check_write(v, sizeof(*v));
892 return arch_atomic64_add_return_acquire(i, v);
894 #define atomic64_add_return_acquire atomic64_add_return_acquire
897 #if defined(arch_atomic64_add_return_release)
899 atomic64_add_return_release(s64 i, atomic64_t *v)
901 kasan_check_write(v, sizeof(*v));
902 return arch_atomic64_add_return_release(i, v);
904 #define atomic64_add_return_release atomic64_add_return_release
907 #if defined(arch_atomic64_add_return_relaxed)
909 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
911 kasan_check_write(v, sizeof(*v));
912 return arch_atomic64_add_return_relaxed(i, v);
914 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
917 #if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
919 atomic64_fetch_add(s64 i, atomic64_t *v)
921 kasan_check_write(v, sizeof(*v));
922 return arch_atomic64_fetch_add(i, v);
924 #define atomic64_fetch_add atomic64_fetch_add
927 #if defined(arch_atomic64_fetch_add_acquire)
929 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
931 kasan_check_write(v, sizeof(*v));
932 return arch_atomic64_fetch_add_acquire(i, v);
934 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
937 #if defined(arch_atomic64_fetch_add_release)
939 atomic64_fetch_add_release(s64 i, atomic64_t *v)
941 kasan_check_write(v, sizeof(*v));
942 return arch_atomic64_fetch_add_release(i, v);
944 #define atomic64_fetch_add_release atomic64_fetch_add_release
947 #if defined(arch_atomic64_fetch_add_relaxed)
949 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
951 kasan_check_write(v, sizeof(*v));
952 return arch_atomic64_fetch_add_relaxed(i, v);
954 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
958 atomic64_sub(s64 i, atomic64_t *v)
960 kasan_check_write(v, sizeof(*v));
961 arch_atomic64_sub(i, v);
963 #define atomic64_sub atomic64_sub
965 #if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
967 atomic64_sub_return(s64 i, atomic64_t *v)
969 kasan_check_write(v, sizeof(*v));
970 return arch_atomic64_sub_return(i, v);
972 #define atomic64_sub_return atomic64_sub_return
975 #if defined(arch_atomic64_sub_return_acquire)
977 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
979 kasan_check_write(v, sizeof(*v));
980 return arch_atomic64_sub_return_acquire(i, v);
982 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
985 #if defined(arch_atomic64_sub_return_release)
987 atomic64_sub_return_release(s64 i, atomic64_t *v)
989 kasan_check_write(v, sizeof(*v));
990 return arch_atomic64_sub_return_release(i, v);
992 #define atomic64_sub_return_release atomic64_sub_return_release
995 #if defined(arch_atomic64_sub_return_relaxed)
997 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
999 kasan_check_write(v, sizeof(*v));
1000 return arch_atomic64_sub_return_relaxed(i, v);
1002 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1005 #if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1007 atomic64_fetch_sub(s64 i, atomic64_t *v)
1009 kasan_check_write(v, sizeof(*v));
1010 return arch_atomic64_fetch_sub(i, v);
1012 #define atomic64_fetch_sub atomic64_fetch_sub
1015 #if defined(arch_atomic64_fetch_sub_acquire)
1017 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1019 kasan_check_write(v, sizeof(*v));
1020 return arch_atomic64_fetch_sub_acquire(i, v);
1022 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1025 #if defined(arch_atomic64_fetch_sub_release)
1027 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1029 kasan_check_write(v, sizeof(*v));
1030 return arch_atomic64_fetch_sub_release(i, v);
1032 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1035 #if defined(arch_atomic64_fetch_sub_relaxed)
1037 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1039 kasan_check_write(v, sizeof(*v));
1040 return arch_atomic64_fetch_sub_relaxed(i, v);
1042 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1045 #if defined(arch_atomic64_inc)
1047 atomic64_inc(atomic64_t *v)
1049 kasan_check_write(v, sizeof(*v));
1050 arch_atomic64_inc(v);
1052 #define atomic64_inc atomic64_inc
1055 #if defined(arch_atomic64_inc_return)
1057 atomic64_inc_return(atomic64_t *v)
1059 kasan_check_write(v, sizeof(*v));
1060 return arch_atomic64_inc_return(v);
1062 #define atomic64_inc_return atomic64_inc_return
1065 #if defined(arch_atomic64_inc_return_acquire)
1067 atomic64_inc_return_acquire(atomic64_t *v)
1069 kasan_check_write(v, sizeof(*v));
1070 return arch_atomic64_inc_return_acquire(v);
1072 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1075 #if defined(arch_atomic64_inc_return_release)
1077 atomic64_inc_return_release(atomic64_t *v)
1079 kasan_check_write(v, sizeof(*v));
1080 return arch_atomic64_inc_return_release(v);
1082 #define atomic64_inc_return_release atomic64_inc_return_release
1085 #if defined(arch_atomic64_inc_return_relaxed)
1087 atomic64_inc_return_relaxed(atomic64_t *v)
1089 kasan_check_write(v, sizeof(*v));
1090 return arch_atomic64_inc_return_relaxed(v);
1092 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1095 #if defined(arch_atomic64_fetch_inc)
1097 atomic64_fetch_inc(atomic64_t *v)
1099 kasan_check_write(v, sizeof(*v));
1100 return arch_atomic64_fetch_inc(v);
1102 #define atomic64_fetch_inc atomic64_fetch_inc
1105 #if defined(arch_atomic64_fetch_inc_acquire)
1107 atomic64_fetch_inc_acquire(atomic64_t *v)
1109 kasan_check_write(v, sizeof(*v));
1110 return arch_atomic64_fetch_inc_acquire(v);
1112 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1115 #if defined(arch_atomic64_fetch_inc_release)
1117 atomic64_fetch_inc_release(atomic64_t *v)
1119 kasan_check_write(v, sizeof(*v));
1120 return arch_atomic64_fetch_inc_release(v);
1122 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1125 #if defined(arch_atomic64_fetch_inc_relaxed)
1127 atomic64_fetch_inc_relaxed(atomic64_t *v)
1129 kasan_check_write(v, sizeof(*v));
1130 return arch_atomic64_fetch_inc_relaxed(v);
1132 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1135 #if defined(arch_atomic64_dec)
1137 atomic64_dec(atomic64_t *v)
1139 kasan_check_write(v, sizeof(*v));
1140 arch_atomic64_dec(v);
1142 #define atomic64_dec atomic64_dec
1145 #if defined(arch_atomic64_dec_return)
1147 atomic64_dec_return(atomic64_t *v)
1149 kasan_check_write(v, sizeof(*v));
1150 return arch_atomic64_dec_return(v);
1152 #define atomic64_dec_return atomic64_dec_return
1155 #if defined(arch_atomic64_dec_return_acquire)
1157 atomic64_dec_return_acquire(atomic64_t *v)
1159 kasan_check_write(v, sizeof(*v));
1160 return arch_atomic64_dec_return_acquire(v);
1162 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1165 #if defined(arch_atomic64_dec_return_release)
1167 atomic64_dec_return_release(atomic64_t *v)
1169 kasan_check_write(v, sizeof(*v));
1170 return arch_atomic64_dec_return_release(v);
1172 #define atomic64_dec_return_release atomic64_dec_return_release
1175 #if defined(arch_atomic64_dec_return_relaxed)
1177 atomic64_dec_return_relaxed(atomic64_t *v)
1179 kasan_check_write(v, sizeof(*v));
1180 return arch_atomic64_dec_return_relaxed(v);
1182 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1185 #if defined(arch_atomic64_fetch_dec)
1187 atomic64_fetch_dec(atomic64_t *v)
1189 kasan_check_write(v, sizeof(*v));
1190 return arch_atomic64_fetch_dec(v);
1192 #define atomic64_fetch_dec atomic64_fetch_dec
1195 #if defined(arch_atomic64_fetch_dec_acquire)
1197 atomic64_fetch_dec_acquire(atomic64_t *v)
1199 kasan_check_write(v, sizeof(*v));
1200 return arch_atomic64_fetch_dec_acquire(v);
1202 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1205 #if defined(arch_atomic64_fetch_dec_release)
1207 atomic64_fetch_dec_release(atomic64_t *v)
1209 kasan_check_write(v, sizeof(*v));
1210 return arch_atomic64_fetch_dec_release(v);
1212 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1215 #if defined(arch_atomic64_fetch_dec_relaxed)
1217 atomic64_fetch_dec_relaxed(atomic64_t *v)
1219 kasan_check_write(v, sizeof(*v));
1220 return arch_atomic64_fetch_dec_relaxed(v);
1222 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1226 atomic64_and(s64 i, atomic64_t *v)
1228 kasan_check_write(v, sizeof(*v));
1229 arch_atomic64_and(i, v);
1231 #define atomic64_and atomic64_and
1233 #if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1235 atomic64_fetch_and(s64 i, atomic64_t *v)
1237 kasan_check_write(v, sizeof(*v));
1238 return arch_atomic64_fetch_and(i, v);
1240 #define atomic64_fetch_and atomic64_fetch_and
1243 #if defined(arch_atomic64_fetch_and_acquire)
1245 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1247 kasan_check_write(v, sizeof(*v));
1248 return arch_atomic64_fetch_and_acquire(i, v);
1250 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1253 #if defined(arch_atomic64_fetch_and_release)
1255 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1257 kasan_check_write(v, sizeof(*v));
1258 return arch_atomic64_fetch_and_release(i, v);
1260 #define atomic64_fetch_and_release atomic64_fetch_and_release
1263 #if defined(arch_atomic64_fetch_and_relaxed)
1265 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
1267 kasan_check_write(v, sizeof(*v));
1268 return arch_atomic64_fetch_and_relaxed(i, v);
1270 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1273 #if defined(arch_atomic64_andnot)
1275 atomic64_andnot(s64 i, atomic64_t *v)
1277 kasan_check_write(v, sizeof(*v));
1278 arch_atomic64_andnot(i, v);
1280 #define atomic64_andnot atomic64_andnot
1283 #if defined(arch_atomic64_fetch_andnot)
1285 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1287 kasan_check_write(v, sizeof(*v));
1288 return arch_atomic64_fetch_andnot(i, v);
1290 #define atomic64_fetch_andnot atomic64_fetch_andnot
1293 #if defined(arch_atomic64_fetch_andnot_acquire)
1295 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1297 kasan_check_write(v, sizeof(*v));
1298 return arch_atomic64_fetch_andnot_acquire(i, v);
1300 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1303 #if defined(arch_atomic64_fetch_andnot_release)
1305 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1307 kasan_check_write(v, sizeof(*v));
1308 return arch_atomic64_fetch_andnot_release(i, v);
1310 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1313 #if defined(arch_atomic64_fetch_andnot_relaxed)
1315 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1317 kasan_check_write(v, sizeof(*v));
1318 return arch_atomic64_fetch_andnot_relaxed(i, v);
1320 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1324 atomic64_or(s64 i, atomic64_t *v)
1326 kasan_check_write(v, sizeof(*v));
1327 arch_atomic64_or(i, v);
1329 #define atomic64_or atomic64_or
1331 #if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1333 atomic64_fetch_or(s64 i, atomic64_t *v)
1335 kasan_check_write(v, sizeof(*v));
1336 return arch_atomic64_fetch_or(i, v);
1338 #define atomic64_fetch_or atomic64_fetch_or
1341 #if defined(arch_atomic64_fetch_or_acquire)
1343 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1345 kasan_check_write(v, sizeof(*v));
1346 return arch_atomic64_fetch_or_acquire(i, v);
1348 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1351 #if defined(arch_atomic64_fetch_or_release)
1353 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1355 kasan_check_write(v, sizeof(*v));
1356 return arch_atomic64_fetch_or_release(i, v);
1358 #define atomic64_fetch_or_release atomic64_fetch_or_release
1361 #if defined(arch_atomic64_fetch_or_relaxed)
1363 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
1365 kasan_check_write(v, sizeof(*v));
1366 return arch_atomic64_fetch_or_relaxed(i, v);
1368 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
1372 atomic64_xor(s64 i, atomic64_t *v)
1374 kasan_check_write(v, sizeof(*v));
1375 arch_atomic64_xor(i, v);
1377 #define atomic64_xor atomic64_xor
1379 #if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1381 atomic64_fetch_xor(s64 i, atomic64_t *v)
1383 kasan_check_write(v, sizeof(*v));
1384 return arch_atomic64_fetch_xor(i, v);
1386 #define atomic64_fetch_xor atomic64_fetch_xor
1389 #if defined(arch_atomic64_fetch_xor_acquire)
1391 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1393 kasan_check_write(v, sizeof(*v));
1394 return arch_atomic64_fetch_xor_acquire(i, v);
1396 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1399 #if defined(arch_atomic64_fetch_xor_release)
1401 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1403 kasan_check_write(v, sizeof(*v));
1404 return arch_atomic64_fetch_xor_release(i, v);
1406 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1409 #if defined(arch_atomic64_fetch_xor_relaxed)
1411 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1413 kasan_check_write(v, sizeof(*v));
1414 return arch_atomic64_fetch_xor_relaxed(i, v);
1416 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1419 #if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1421 atomic64_xchg(atomic64_t *v, s64 i)
1423 kasan_check_write(v, sizeof(*v));
1424 return arch_atomic64_xchg(v, i);
1426 #define atomic64_xchg atomic64_xchg
1429 #if defined(arch_atomic64_xchg_acquire)
1431 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1433 kasan_check_write(v, sizeof(*v));
1434 return arch_atomic64_xchg_acquire(v, i);
1436 #define atomic64_xchg_acquire atomic64_xchg_acquire
1439 #if defined(arch_atomic64_xchg_release)
1441 atomic64_xchg_release(atomic64_t *v, s64 i)
1443 kasan_check_write(v, sizeof(*v));
1444 return arch_atomic64_xchg_release(v, i);
1446 #define atomic64_xchg_release atomic64_xchg_release
1449 #if defined(arch_atomic64_xchg_relaxed)
1451 atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1453 kasan_check_write(v, sizeof(*v));
1454 return arch_atomic64_xchg_relaxed(v, i);
1456 #define atomic64_xchg_relaxed atomic64_xchg_relaxed
1459 #if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1461 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1463 kasan_check_write(v, sizeof(*v));
1464 return arch_atomic64_cmpxchg(v, old, new);
1466 #define atomic64_cmpxchg atomic64_cmpxchg
1469 #if defined(arch_atomic64_cmpxchg_acquire)
1471 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1473 kasan_check_write(v, sizeof(*v));
1474 return arch_atomic64_cmpxchg_acquire(v, old, new);
1476 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1479 #if defined(arch_atomic64_cmpxchg_release)
1481 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1483 kasan_check_write(v, sizeof(*v));
1484 return arch_atomic64_cmpxchg_release(v, old, new);
1486 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
1489 #if defined(arch_atomic64_cmpxchg_relaxed)
1491 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1493 kasan_check_write(v, sizeof(*v));
1494 return arch_atomic64_cmpxchg_relaxed(v, old, new);
1496 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1499 #if defined(arch_atomic64_try_cmpxchg)
1501 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1503 kasan_check_write(v, sizeof(*v));
1504 kasan_check_write(old, sizeof(*old));
1505 return arch_atomic64_try_cmpxchg(v, old, new);
1507 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
1510 #if defined(arch_atomic64_try_cmpxchg_acquire)
1512 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1514 kasan_check_write(v, sizeof(*v));
1515 kasan_check_write(old, sizeof(*old));
1516 return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1518 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1521 #if defined(arch_atomic64_try_cmpxchg_release)
1523 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1525 kasan_check_write(v, sizeof(*v));
1526 kasan_check_write(old, sizeof(*old));
1527 return arch_atomic64_try_cmpxchg_release(v, old, new);
1529 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1532 #if defined(arch_atomic64_try_cmpxchg_relaxed)
1534 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1536 kasan_check_write(v, sizeof(*v));
1537 kasan_check_write(old, sizeof(*old));
1538 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1540 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
1543 #if defined(arch_atomic64_sub_and_test)
1545 atomic64_sub_and_test(s64 i, atomic64_t *v)
1547 kasan_check_write(v, sizeof(*v));
1548 return arch_atomic64_sub_and_test(i, v);
1550 #define atomic64_sub_and_test atomic64_sub_and_test
1553 #if defined(arch_atomic64_dec_and_test)
1555 atomic64_dec_and_test(atomic64_t *v)
1557 kasan_check_write(v, sizeof(*v));
1558 return arch_atomic64_dec_and_test(v);
1560 #define atomic64_dec_and_test atomic64_dec_and_test
1563 #if defined(arch_atomic64_inc_and_test)
1565 atomic64_inc_and_test(atomic64_t *v)
1567 kasan_check_write(v, sizeof(*v));
1568 return arch_atomic64_inc_and_test(v);
1570 #define atomic64_inc_and_test atomic64_inc_and_test
1573 #if defined(arch_atomic64_add_negative)
1575 atomic64_add_negative(s64 i, atomic64_t *v)
1577 kasan_check_write(v, sizeof(*v));
1578 return arch_atomic64_add_negative(i, v);
1580 #define atomic64_add_negative atomic64_add_negative
1583 #if defined(arch_atomic64_fetch_add_unless)
1585 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1587 kasan_check_write(v, sizeof(*v));
1588 return arch_atomic64_fetch_add_unless(v, a, u);
1590 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
1593 #if defined(arch_atomic64_add_unless)
1595 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1597 kasan_check_write(v, sizeof(*v));
1598 return arch_atomic64_add_unless(v, a, u);
1600 #define atomic64_add_unless atomic64_add_unless
1603 #if defined(arch_atomic64_inc_not_zero)
1605 atomic64_inc_not_zero(atomic64_t *v)
1607 kasan_check_write(v, sizeof(*v));
1608 return arch_atomic64_inc_not_zero(v);
1610 #define atomic64_inc_not_zero atomic64_inc_not_zero
1613 #if defined(arch_atomic64_inc_unless_negative)
1615 atomic64_inc_unless_negative(atomic64_t *v)
1617 kasan_check_write(v, sizeof(*v));
1618 return arch_atomic64_inc_unless_negative(v);
1620 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
1623 #if defined(arch_atomic64_dec_unless_positive)
1625 atomic64_dec_unless_positive(atomic64_t *v)
1627 kasan_check_write(v, sizeof(*v));
1628 return arch_atomic64_dec_unless_positive(v);
1630 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
1633 #if defined(arch_atomic64_dec_if_positive)
1635 atomic64_dec_if_positive(atomic64_t *v)
1637 kasan_check_write(v, sizeof(*v));
1638 return arch_atomic64_dec_if_positive(v);
1640 #define atomic64_dec_if_positive atomic64_dec_if_positive
1643 #if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1644 #define xchg(ptr, ...) \
1646 typeof(ptr) __ai_ptr = (ptr); \
1647 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1648 arch_xchg(__ai_ptr, __VA_ARGS__); \
1652 #if defined(arch_xchg_acquire)
1653 #define xchg_acquire(ptr, ...) \
1655 typeof(ptr) __ai_ptr = (ptr); \
1656 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1657 arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1661 #if defined(arch_xchg_release)
1662 #define xchg_release(ptr, ...) \
1664 typeof(ptr) __ai_ptr = (ptr); \
1665 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1666 arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1670 #if defined(arch_xchg_relaxed)
1671 #define xchg_relaxed(ptr, ...) \
1673 typeof(ptr) __ai_ptr = (ptr); \
1674 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1675 arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1679 #if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1680 #define cmpxchg(ptr, ...) \
1682 typeof(ptr) __ai_ptr = (ptr); \
1683 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1684 arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
1688 #if defined(arch_cmpxchg_acquire)
1689 #define cmpxchg_acquire(ptr, ...) \
1691 typeof(ptr) __ai_ptr = (ptr); \
1692 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1693 arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1697 #if defined(arch_cmpxchg_release)
1698 #define cmpxchg_release(ptr, ...) \
1700 typeof(ptr) __ai_ptr = (ptr); \
1701 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1702 arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1706 #if defined(arch_cmpxchg_relaxed)
1707 #define cmpxchg_relaxed(ptr, ...) \
1709 typeof(ptr) __ai_ptr = (ptr); \
1710 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1711 arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1715 #if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1716 #define cmpxchg64(ptr, ...) \
1718 typeof(ptr) __ai_ptr = (ptr); \
1719 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1720 arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
1724 #if defined(arch_cmpxchg64_acquire)
1725 #define cmpxchg64_acquire(ptr, ...) \
1727 typeof(ptr) __ai_ptr = (ptr); \
1728 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1729 arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1733 #if defined(arch_cmpxchg64_release)
1734 #define cmpxchg64_release(ptr, ...) \
1736 typeof(ptr) __ai_ptr = (ptr); \
1737 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1738 arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1742 #if defined(arch_cmpxchg64_relaxed)
1743 #define cmpxchg64_relaxed(ptr, ...) \
1745 typeof(ptr) __ai_ptr = (ptr); \
1746 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1747 arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
1751 #define cmpxchg_local(ptr, ...) \
1753 typeof(ptr) __ai_ptr = (ptr); \
1754 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1755 arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
1758 #define cmpxchg64_local(ptr, ...) \
1760 typeof(ptr) __ai_ptr = (ptr); \
1761 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1762 arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
1765 #define sync_cmpxchg(ptr, ...) \
1767 typeof(ptr) __ai_ptr = (ptr); \
1768 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1769 arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
1772 #define cmpxchg_double(ptr, ...) \
1774 typeof(ptr) __ai_ptr = (ptr); \
1775 kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1776 arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
1780 #define cmpxchg_double_local(ptr, ...) \
1782 typeof(ptr) __ai_ptr = (ptr); \
1783 kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1784 arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
1787 #endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1788 // b29b625d5de9280f680e42c7be859b55b15e5f6a