32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
66 constexpr
atomic_bool(
bool __i) noexcept : _M_base(__i) { }
69 operator=(
bool __i) noexcept
70 {
return _M_base.operator=(__i); }
73 operator=(
bool __i)
volatile noexcept
74 {
return _M_base.operator=(__i); }
76 operator bool()
const noexcept
77 {
return _M_base.load(); }
79 operator bool()
const volatile noexcept
80 {
return _M_base.load(); }
83 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
86 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
89 store(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
90 { _M_base.store(__i, __m); }
93 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
94 { _M_base.store(__i, __m); }
97 load(
memory_order __m = memory_order_seq_cst)
const noexcept
98 {
return _M_base.load(__m); }
101 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
102 {
return _M_base.load(__m); }
105 exchange(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
106 {
return _M_base.exchange(__i, __m); }
110 memory_order __m = memory_order_seq_cst)
volatile noexcept
111 {
return _M_base.exchange(__i, __m); }
114 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
116 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
119 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
121 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
124 compare_exchange_weak(
bool& __i1,
bool __i2,
126 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
129 compare_exchange_weak(
bool& __i1,
bool __i2,
130 memory_order __m = memory_order_seq_cst)
volatile noexcept
131 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
134 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
136 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
139 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
141 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
144 compare_exchange_strong(
bool& __i1,
bool __i2,
146 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
149 compare_exchange_strong(
bool& __i1,
bool __i2,
150 memory_order __m = memory_order_seq_cst)
volatile noexcept
151 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
160 template<
typename _Tp>
167 atomic() noexcept =
default;
168 ~
atomic() noexcept =
default;
173 constexpr
atomic(_Tp __i) noexcept : _M_i(__i) { }
175 operator _Tp()
const noexcept
178 operator _Tp()
const volatile noexcept
182 operator=(_Tp __i) noexcept
183 { store(__i);
return __i; }
186 operator=(_Tp __i)
volatile noexcept
187 { store(__i);
return __i; }
190 is_lock_free()
const noexcept
191 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
194 is_lock_free()
const volatile noexcept
195 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
198 store(_Tp __i,
memory_order _m = memory_order_seq_cst) noexcept
199 { __atomic_store(&_M_i, &__i, _m); }
202 store(_Tp __i,
memory_order _m = memory_order_seq_cst)
volatile noexcept
203 { __atomic_store(&_M_i, &__i, _m); }
206 load(
memory_order _m = memory_order_seq_cst)
const noexcept
209 __atomic_load(&_M_i, &tmp, _m);
214 load(
memory_order _m = memory_order_seq_cst)
const volatile noexcept
217 __atomic_load(&_M_i, &tmp, _m);
222 exchange(_Tp __i,
memory_order _m = memory_order_seq_cst) noexcept
225 __atomic_exchange(&_M_i, &__i, &tmp, _m);
231 memory_order _m = memory_order_seq_cst)
volatile noexcept
234 __atomic_exchange(&_M_i, &__i, &tmp, _m);
239 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
242 return __atomic_compare_exchange(&_M_i, &__e, &__i,
true, __s, __f);
246 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
249 return __atomic_compare_exchange(&_M_i, &__e, &__i,
true, __s, __f);
253 compare_exchange_weak(_Tp& __e, _Tp __i,
255 {
return compare_exchange_weak(__e, __i, __m,
256 __cmpexch_failure_order(__m)); }
259 compare_exchange_weak(_Tp& __e, _Tp __i,
260 memory_order __m = memory_order_seq_cst)
volatile noexcept
261 {
return compare_exchange_weak(__e, __i, __m,
262 __cmpexch_failure_order(__m)); }
265 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
268 return __atomic_compare_exchange(&_M_i, &__e, &__i,
false, __s, __f);
272 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
275 return __atomic_compare_exchange(&_M_i, &__e, &__i,
false, __s, __f);
279 compare_exchange_strong(_Tp& __e, _Tp __i,
281 {
return compare_exchange_strong(__e, __i, __m,
282 __cmpexch_failure_order(__m)); }
285 compare_exchange_strong(_Tp& __e, _Tp __i,
286 memory_order __m = memory_order_seq_cst)
volatile noexcept
287 {
return compare_exchange_strong(__e, __i, __m,
288 __cmpexch_failure_order(__m)); }
293 template<
typename _Tp>
296 typedef _Tp* __pointer_type;
300 atomic() noexcept =
default;
301 ~
atomic() noexcept =
default;
306 constexpr
atomic(__pointer_type __p) noexcept : _M_b(__p) { }
308 operator __pointer_type()
const noexcept
309 {
return __pointer_type(_M_b); }
311 operator __pointer_type()
const volatile noexcept
312 {
return __pointer_type(_M_b); }
315 operator=(__pointer_type __p) noexcept
316 {
return _M_b.operator=(__p); }
319 operator=(__pointer_type __p)
volatile noexcept
320 {
return _M_b.operator=(__p); }
323 operator++(
int) noexcept
327 operator++(
int)
volatile noexcept
331 operator--(
int) noexcept
335 operator--(
int)
volatile noexcept
339 operator++() noexcept
343 operator++()
volatile noexcept
347 operator--() noexcept
351 operator--()
volatile noexcept
355 operator+=(ptrdiff_t __d) noexcept
356 {
return _M_b.operator+=(__d); }
359 operator+=(ptrdiff_t __d)
volatile noexcept
360 {
return _M_b.operator+=(__d); }
363 operator-=(ptrdiff_t __d) noexcept
364 {
return _M_b.operator-=(__d); }
367 operator-=(ptrdiff_t __d)
volatile noexcept
368 {
return _M_b.operator-=(__d); }
371 is_lock_free()
const noexcept
372 {
return _M_b.is_lock_free(); }
375 is_lock_free()
const volatile noexcept
376 {
return _M_b.is_lock_free(); }
379 store(__pointer_type __p,
381 {
return _M_b.store(__p, __m); }
384 store(__pointer_type __p,
385 memory_order __m = memory_order_seq_cst)
volatile noexcept
386 {
return _M_b.store(__p, __m); }
389 load(
memory_order __m = memory_order_seq_cst)
const noexcept
390 {
return _M_b.load(__m); }
393 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
394 {
return _M_b.load(__m); }
397 exchange(__pointer_type __p,
399 {
return _M_b.exchange(__p, __m); }
402 exchange(__pointer_type __p,
403 memory_order __m = memory_order_seq_cst)
volatile noexcept
404 {
return _M_b.exchange(__p, __m); }
407 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
409 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
412 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
415 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
418 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
421 return compare_exchange_weak(__p1, __p2, __m,
422 __cmpexch_failure_order(__m));
426 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
427 memory_order __m = memory_order_seq_cst)
volatile noexcept
429 return compare_exchange_weak(__p1, __p2, __m,
430 __cmpexch_failure_order(__m));
434 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
436 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
439 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
442 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
445 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
448 return _M_b.compare_exchange_strong(__p1, __p2, __m,
449 __cmpexch_failure_order(__m));
453 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
454 memory_order __m = memory_order_seq_cst)
volatile noexcept
456 return _M_b.compare_exchange_strong(__p1, __p2, __m,
457 __cmpexch_failure_order(__m));
461 fetch_add(ptrdiff_t __d,
463 {
return _M_b.fetch_add(__d, __m); }
466 fetch_add(ptrdiff_t __d,
467 memory_order __m = memory_order_seq_cst)
volatile noexcept
468 {
return _M_b.fetch_add(__d, __m); }
471 fetch_sub(ptrdiff_t __d,
473 {
return _M_b.fetch_sub(__d, __m); }
476 fetch_sub(ptrdiff_t __d,
477 memory_order __m = memory_order_seq_cst)
volatile noexcept
478 {
return _M_b.fetch_sub(__d, __m); }
486 typedef bool __integral_type;
489 atomic() noexcept =
default;
490 ~
atomic() noexcept =
default;
497 using __base_type::operator __integral_type;
498 using __base_type::operator=;
505 typedef char __integral_type;
508 atomic() noexcept =
default;
509 ~
atomic() noexcept =
default;
516 using __base_type::operator __integral_type;
517 using __base_type::operator=;
524 typedef signed char __integral_type;
527 atomic() noexcept=
default;
528 ~
atomic() noexcept =
default;
535 using __base_type::operator __integral_type;
536 using __base_type::operator=;
543 typedef unsigned char __integral_type;
546 atomic() noexcept=
default;
547 ~
atomic() noexcept =
default;
554 using __base_type::operator __integral_type;
555 using __base_type::operator=;
562 typedef short __integral_type;
565 atomic() noexcept =
default;
566 ~
atomic() noexcept =
default;
573 using __base_type::operator __integral_type;
574 using __base_type::operator=;
581 typedef unsigned short __integral_type;
584 atomic() noexcept =
default;
585 ~
atomic() noexcept =
default;
592 using __base_type::operator __integral_type;
593 using __base_type::operator=;
600 typedef int __integral_type;
603 atomic() noexcept =
default;
604 ~
atomic() noexcept =
default;
611 using __base_type::operator __integral_type;
612 using __base_type::operator=;
619 typedef unsigned int __integral_type;
622 atomic() noexcept =
default;
623 ~
atomic() noexcept =
default;
630 using __base_type::operator __integral_type;
631 using __base_type::operator=;
638 typedef long __integral_type;
641 atomic() noexcept =
default;
642 ~
atomic() noexcept =
default;
649 using __base_type::operator __integral_type;
650 using __base_type::operator=;
657 typedef unsigned long __integral_type;
660 atomic() noexcept =
default;
661 ~
atomic() noexcept =
default;
668 using __base_type::operator __integral_type;
669 using __base_type::operator=;
676 typedef long long __integral_type;
679 atomic() noexcept =
default;
680 ~
atomic() noexcept =
default;
687 using __base_type::operator __integral_type;
688 using __base_type::operator=;
695 typedef unsigned long long __integral_type;
698 atomic() noexcept =
default;
699 ~
atomic() noexcept =
default;
706 using __base_type::operator __integral_type;
707 using __base_type::operator=;
714 typedef wchar_t __integral_type;
717 atomic() noexcept =
default;
718 ~
atomic() noexcept =
default;
725 using __base_type::operator __integral_type;
726 using __base_type::operator=;
733 typedef char16_t __integral_type;
736 atomic() noexcept =
default;
737 ~
atomic() noexcept =
default;
744 using __base_type::operator __integral_type;
745 using __base_type::operator=;
752 typedef char32_t __integral_type;
755 atomic() noexcept =
default;
756 ~
atomic() noexcept =
default;
763 using __base_type::operator __integral_type;
764 using __base_type::operator=;
770 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
772 {
return __a->test_and_set(__m); }
775 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
777 {
return __a->test_and_set(__m); }
780 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m) noexcept
784 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
789 atomic_flag_test_and_set(atomic_flag* __a) noexcept
790 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
793 atomic_flag_test_and_set(
volatile atomic_flag* __a) noexcept
794 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
797 atomic_flag_clear(atomic_flag* __a) noexcept
798 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
801 atomic_flag_clear(
volatile atomic_flag* __a) noexcept
802 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
806 template<
typename _ITp>
808 atomic_is_lock_free(
const atomic<_ITp>* __a) noexcept
809 {
return __a->is_lock_free(); }
811 template<
typename _ITp>
813 atomic_is_lock_free(
const volatile atomic<_ITp>* __a) noexcept
814 {
return __a->is_lock_free(); }
816 template<
typename _ITp>
818 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
820 template<
typename _ITp>
822 atomic_init(
volatile atomic<_ITp>* __a, _ITp __i) noexcept;
824 template<
typename _ITp>
826 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
828 { __a->store(__i, __m); }
830 template<
typename _ITp>
832 atomic_store_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
834 { __a->store(__i, __m); }
836 template<
typename _ITp>
838 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m) noexcept
839 {
return __a->load(__m); }
841 template<
typename _ITp>
843 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
845 {
return __a->load(__m); }
847 template<
typename _ITp>
849 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
851 {
return __a->exchange(__i, __m); }
853 template<
typename _ITp>
855 atomic_exchange_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
857 {
return __a->exchange(__i, __m); }
859 template<
typename _ITp>
861 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
862 _ITp* __i1, _ITp __i2,
865 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
867 template<
typename _ITp>
869 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
870 _ITp* __i1, _ITp __i2,
873 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
875 template<
typename _ITp>
877 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
878 _ITp* __i1, _ITp __i2,
881 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
883 template<
typename _ITp>
885 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
886 _ITp* __i1, _ITp __i2,
889 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
892 template<
typename _ITp>
894 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
895 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
897 template<
typename _ITp>
899 atomic_store(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
900 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
902 template<
typename _ITp>
904 atomic_load(
const atomic<_ITp>* __a) noexcept
905 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
907 template<
typename _ITp>
909 atomic_load(
const volatile atomic<_ITp>* __a) noexcept
910 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
912 template<
typename _ITp>
914 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
915 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
917 template<
typename _ITp>
919 atomic_exchange(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
920 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
922 template<
typename _ITp>
924 atomic_compare_exchange_weak(atomic<_ITp>* __a,
925 _ITp* __i1, _ITp __i2) noexcept
927 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
928 memory_order_seq_cst,
929 memory_order_seq_cst);
932 template<
typename _ITp>
934 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
935 _ITp* __i1, _ITp __i2) noexcept
937 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
938 memory_order_seq_cst,
939 memory_order_seq_cst);
942 template<
typename _ITp>
944 atomic_compare_exchange_strong(atomic<_ITp>* __a,
945 _ITp* __i1, _ITp __i2) noexcept
947 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
948 memory_order_seq_cst,
949 memory_order_seq_cst);
952 template<
typename _ITp>
954 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
955 _ITp* __i1, _ITp __i2) noexcept
957 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
958 memory_order_seq_cst,
959 memory_order_seq_cst);
966 template<
typename _ITp>
968 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
970 {
return __a->fetch_add(__i, __m); }
972 template<
typename _ITp>
974 atomic_fetch_add_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
976 {
return __a->fetch_add(__i, __m); }
978 template<
typename _ITp>
980 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
982 {
return __a->fetch_sub(__i, __m); }
984 template<
typename _ITp>
986 atomic_fetch_sub_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
988 {
return __a->fetch_sub(__i, __m); }
990 template<
typename _ITp>
992 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
994 {
return __a->fetch_and(__i, __m); }
996 template<
typename _ITp>
998 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1000 {
return __a->fetch_and(__i, __m); }
1002 template<
typename _ITp>
1004 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1006 {
return __a->fetch_or(__i, __m); }
1008 template<
typename _ITp>
1010 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1012 {
return __a->fetch_or(__i, __m); }
1014 template<
typename _ITp>
1016 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1018 {
return __a->fetch_xor(__i, __m); }
1020 template<
typename _ITp>
1022 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1024 {
return __a->fetch_xor(__i, __m); }
1026 template<
typename _ITp>
1028 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1029 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1031 template<
typename _ITp>
1033 atomic_fetch_add(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1034 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1036 template<
typename _ITp>
1038 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1039 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1041 template<
typename _ITp>
1043 atomic_fetch_sub(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1044 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1046 template<
typename _ITp>
1048 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1049 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1051 template<
typename _ITp>
1053 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1054 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1056 template<
typename _ITp>
1058 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1059 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1061 template<
typename _ITp>
1063 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1064 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1066 template<
typename _ITp>
1068 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1069 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1071 template<
typename _ITp>
1073 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1074 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1078 template<
typename _ITp>
1080 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1082 {
return __a->fetch_add(__d, __m); }
1084 template<
typename _ITp>
1086 atomic_fetch_add_explicit(
volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1088 {
return __a->fetch_add(__d, __m); }
1090 template<
typename _ITp>
1092 atomic_fetch_add(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1093 {
return __a->fetch_add(__d); }
1095 template<
typename _ITp>
1097 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1098 {
return __a->fetch_add(__d); }
1100 template<
typename _ITp>
1102 atomic_fetch_sub_explicit(
volatile atomic<_ITp*>* __a,
1104 {
return __a->fetch_sub(__d, __m); }
1106 template<
typename _ITp>
1108 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1110 {
return __a->fetch_sub(__d, __m); }
1112 template<
typename _ITp>
1114 atomic_fetch_sub(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1115 {
return __a->fetch_sub(__d); }
1117 template<
typename _ITp>
1119 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1120 {
return __a->fetch_sub(__d); }
1123 _GLIBCXX_END_NAMESPACE_VERSION
memory_order
Enumeration for memory_order.
Generic atomic type, primary class template.