30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
40 #ifndef _GLIBCXX_ALWAYS_INLINE
41 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((always_inline))
44 namespace std _GLIBCXX_VISIBILITY(default)
46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
66 enum __memory_order_modifier
68 __memory_order_mask = 0x0ffff,
69 __memory_order_modifier_mask = 0xffff0000,
70 __memory_order_hle_acquire = 0x10000,
71 __memory_order_hle_release = 0x20000
90 return __m == memory_order_acq_rel ? memory_order_acquire
91 : __m == memory_order_release ? memory_order_relaxed : __m;
97 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
98 | (__m & __memory_order_modifier_mask));
101 _GLIBCXX_ALWAYS_INLINE
void
103 { __atomic_thread_fence(__m); }
105 _GLIBCXX_ALWAYS_INLINE
void
107 { __atomic_signal_fence(__m); }
110 template<
typename _Tp>
120 template<
typename _IntTp>
238 #define ATOMIC_VAR_INIT(_VI) { _VI }
240 template<
typename _Tp>
243 template<
typename _Tp>
247 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
248 typedef bool __atomic_flag_data_type;
250 typedef unsigned char __atomic_flag_data_type;
263 _GLIBCXX_BEGIN_EXTERN_C
267 __atomic_flag_data_type _M_i;
270 _GLIBCXX_END_EXTERN_C
272 #define ATOMIC_FLAG_INIT { 0 }
288 _GLIBCXX_ALWAYS_INLINE
bool
289 test_and_set(
memory_order __m = memory_order_seq_cst) noexcept
291 return __atomic_test_and_set (&_M_i, __m);
294 _GLIBCXX_ALWAYS_INLINE
bool
295 test_and_set(
memory_order __m = memory_order_seq_cst)
volatile noexcept
297 return __atomic_test_and_set (&_M_i, __m);
300 _GLIBCXX_ALWAYS_INLINE
void
304 __glibcxx_assert(__b != memory_order_consume);
305 __glibcxx_assert(__b != memory_order_acquire);
306 __glibcxx_assert(__b != memory_order_acq_rel);
308 __atomic_clear (&_M_i, __m);
311 _GLIBCXX_ALWAYS_INLINE
void
312 clear(
memory_order __m = memory_order_seq_cst)
volatile noexcept
315 __glibcxx_assert(__b != memory_order_consume);
316 __glibcxx_assert(__b != memory_order_acquire);
317 __glibcxx_assert(__b != memory_order_acq_rel);
319 __atomic_clear (&_M_i, __m);
323 static constexpr __atomic_flag_data_type
325 {
return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
352 template<
typename _ITp>
356 typedef _ITp __int_type;
368 constexpr
__atomic_base(__int_type __i) noexcept : _M_i (__i) { }
370 operator __int_type() const noexcept
373 operator __int_type() const volatile noexcept
377 operator=(__int_type __i) noexcept
384 operator=(__int_type __i)
volatile noexcept
391 operator++(
int) noexcept
392 {
return fetch_add(1); }
395 operator++(
int) volatile noexcept
396 {
return fetch_add(1); }
399 operator--(
int) noexcept
400 {
return fetch_sub(1); }
403 operator--(
int) volatile noexcept
404 {
return fetch_sub(1); }
407 operator++() noexcept
408 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
411 operator++() volatile noexcept
412 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
415 operator--() noexcept
416 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
419 operator--() volatile noexcept
420 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
423 operator+=(__int_type __i) noexcept
424 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
427 operator+=(__int_type __i)
volatile noexcept
428 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
431 operator-=(__int_type __i) noexcept
432 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
435 operator-=(__int_type __i)
volatile noexcept
436 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
439 operator&=(__int_type __i) noexcept
440 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
443 operator&=(__int_type __i)
volatile noexcept
444 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
447 operator|=(__int_type __i) noexcept
448 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
451 operator|=(__int_type __i)
volatile noexcept
452 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
455 operator^=(__int_type __i) noexcept
456 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
459 operator^=(__int_type __i)
volatile noexcept
460 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
463 is_lock_free() const noexcept
464 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
467 is_lock_free() const volatile noexcept
468 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
470 _GLIBCXX_ALWAYS_INLINE
void
471 store(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
474 __glibcxx_assert(__b != memory_order_acquire);
475 __glibcxx_assert(__b != memory_order_acq_rel);
476 __glibcxx_assert(__b != memory_order_consume);
478 __atomic_store_n(&_M_i, __i, __m);
481 _GLIBCXX_ALWAYS_INLINE
void
482 store(__int_type __i,
483 memory_order __m = memory_order_seq_cst) volatile noexcept
486 __glibcxx_assert(__b != memory_order_acquire);
487 __glibcxx_assert(__b != memory_order_acq_rel);
488 __glibcxx_assert(__b != memory_order_consume);
490 __atomic_store_n(&_M_i, __i, __m);
493 _GLIBCXX_ALWAYS_INLINE __int_type
494 load(
memory_order __m = memory_order_seq_cst) const noexcept
497 __glibcxx_assert(__b != memory_order_release);
498 __glibcxx_assert(__b != memory_order_acq_rel);
500 return __atomic_load_n(&_M_i, __m);
503 _GLIBCXX_ALWAYS_INLINE __int_type
504 load(
memory_order __m = memory_order_seq_cst) const volatile noexcept
507 __glibcxx_assert(__b != memory_order_release);
508 __glibcxx_assert(__b != memory_order_acq_rel);
510 return __atomic_load_n(&_M_i, __m);
513 _GLIBCXX_ALWAYS_INLINE __int_type
514 exchange(__int_type __i,
517 return __atomic_exchange_n(&_M_i, __i, __m);
521 _GLIBCXX_ALWAYS_INLINE __int_type
522 exchange(__int_type __i,
523 memory_order __m = memory_order_seq_cst) volatile noexcept
525 return __atomic_exchange_n(&_M_i, __i, __m);
528 _GLIBCXX_ALWAYS_INLINE
bool
529 compare_exchange_weak(__int_type& __i1, __int_type __i2,
534 __glibcxx_assert(__b2 != memory_order_release);
535 __glibcxx_assert(__b2 != memory_order_acq_rel);
536 __glibcxx_assert(__b2 <= __b1);
538 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
541 _GLIBCXX_ALWAYS_INLINE
bool
542 compare_exchange_weak(__int_type& __i1, __int_type __i2,
548 __glibcxx_assert(__b2 != memory_order_release);
549 __glibcxx_assert(__b2 != memory_order_acq_rel);
550 __glibcxx_assert(__b2 <= __b1);
552 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
555 _GLIBCXX_ALWAYS_INLINE
bool
556 compare_exchange_weak(__int_type& __i1, __int_type __i2,
559 return compare_exchange_weak(__i1, __i2, __m,
560 __cmpexch_failure_order(__m));
563 _GLIBCXX_ALWAYS_INLINE
bool
564 compare_exchange_weak(__int_type& __i1, __int_type __i2,
565 memory_order __m = memory_order_seq_cst) volatile noexcept
567 return compare_exchange_weak(__i1, __i2, __m,
568 __cmpexch_failure_order(__m));
571 _GLIBCXX_ALWAYS_INLINE
bool
572 compare_exchange_strong(__int_type& __i1, __int_type __i2,
577 __glibcxx_assert(__b2 != memory_order_release);
578 __glibcxx_assert(__b2 != memory_order_acq_rel);
579 __glibcxx_assert(__b2 <= __b1);
581 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
584 _GLIBCXX_ALWAYS_INLINE
bool
585 compare_exchange_strong(__int_type& __i1, __int_type __i2,
592 __glibcxx_assert(__b2 != memory_order_release);
593 __glibcxx_assert(__b2 != memory_order_acq_rel);
594 __glibcxx_assert(__b2 <= __b1);
596 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
599 _GLIBCXX_ALWAYS_INLINE
bool
600 compare_exchange_strong(__int_type& __i1, __int_type __i2,
603 return compare_exchange_strong(__i1, __i2, __m,
604 __cmpexch_failure_order(__m));
607 _GLIBCXX_ALWAYS_INLINE
bool
608 compare_exchange_strong(__int_type& __i1, __int_type __i2,
609 memory_order __m = memory_order_seq_cst) volatile noexcept
611 return compare_exchange_strong(__i1, __i2, __m,
612 __cmpexch_failure_order(__m));
615 _GLIBCXX_ALWAYS_INLINE __int_type
616 fetch_add(__int_type __i,
618 {
return __atomic_fetch_add(&_M_i, __i, __m); }
620 _GLIBCXX_ALWAYS_INLINE __int_type
621 fetch_add(__int_type __i,
622 memory_order __m = memory_order_seq_cst) volatile noexcept
623 {
return __atomic_fetch_add(&_M_i, __i, __m); }
625 _GLIBCXX_ALWAYS_INLINE __int_type
626 fetch_sub(__int_type __i,
628 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
630 _GLIBCXX_ALWAYS_INLINE __int_type
631 fetch_sub(__int_type __i,
632 memory_order __m = memory_order_seq_cst) volatile noexcept
633 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
635 _GLIBCXX_ALWAYS_INLINE __int_type
636 fetch_and(__int_type __i,
638 {
return __atomic_fetch_and(&_M_i, __i, __m); }
640 _GLIBCXX_ALWAYS_INLINE __int_type
641 fetch_and(__int_type __i,
642 memory_order __m = memory_order_seq_cst) volatile noexcept
643 {
return __atomic_fetch_and(&_M_i, __i, __m); }
645 _GLIBCXX_ALWAYS_INLINE __int_type
646 fetch_or(__int_type __i,
648 {
return __atomic_fetch_or(&_M_i, __i, __m); }
650 _GLIBCXX_ALWAYS_INLINE __int_type
651 fetch_or(__int_type __i,
652 memory_order __m = memory_order_seq_cst) volatile noexcept
653 {
return __atomic_fetch_or(&_M_i, __i, __m); }
655 _GLIBCXX_ALWAYS_INLINE __int_type
656 fetch_xor(__int_type __i,
658 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
660 _GLIBCXX_ALWAYS_INLINE __int_type
661 fetch_xor(__int_type __i,
662 memory_order __m = memory_order_seq_cst) volatile noexcept
663 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
668 template<
typename _PTp>
672 typedef _PTp* __pointer_type;
678 _M_type_size(ptrdiff_t __d) {
return __d *
sizeof(_PTp); }
681 _M_type_size(ptrdiff_t __d)
volatile {
return __d *
sizeof(_PTp); }
691 constexpr
__atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
693 operator __pointer_type()
const noexcept
696 operator __pointer_type()
const volatile noexcept
700 operator=(__pointer_type __p) noexcept
707 operator=(__pointer_type __p)
volatile noexcept
714 operator++(
int) noexcept
715 {
return fetch_add(1); }
718 operator++(
int)
volatile noexcept
719 {
return fetch_add(1); }
722 operator--(
int) noexcept
723 {
return fetch_sub(1); }
726 operator--(
int)
volatile noexcept
727 {
return fetch_sub(1); }
730 operator++() noexcept
731 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
732 memory_order_seq_cst); }
735 operator++()
volatile noexcept
736 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
737 memory_order_seq_cst); }
740 operator--() noexcept
741 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
742 memory_order_seq_cst); }
745 operator--()
volatile noexcept
746 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
747 memory_order_seq_cst); }
750 operator+=(ptrdiff_t __d) noexcept
751 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
752 memory_order_seq_cst); }
755 operator+=(ptrdiff_t __d)
volatile noexcept
756 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
757 memory_order_seq_cst); }
760 operator-=(ptrdiff_t __d) noexcept
761 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
762 memory_order_seq_cst); }
765 operator-=(ptrdiff_t __d)
volatile noexcept
766 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
767 memory_order_seq_cst); }
770 is_lock_free()
const noexcept
771 {
return __atomic_is_lock_free(_M_type_size(1),
nullptr); }
774 is_lock_free()
const volatile noexcept
775 {
return __atomic_is_lock_free(_M_type_size(1),
nullptr); }
777 _GLIBCXX_ALWAYS_INLINE
void
778 store(__pointer_type __p,
783 __glibcxx_assert(__b != memory_order_acquire);
784 __glibcxx_assert(__b != memory_order_acq_rel);
785 __glibcxx_assert(__b != memory_order_consume);
787 __atomic_store_n(&_M_p, __p, __m);
790 _GLIBCXX_ALWAYS_INLINE
void
791 store(__pointer_type __p,
792 memory_order __m = memory_order_seq_cst)
volatile noexcept
795 __glibcxx_assert(__b != memory_order_acquire);
796 __glibcxx_assert(__b != memory_order_acq_rel);
797 __glibcxx_assert(__b != memory_order_consume);
799 __atomic_store_n(&_M_p, __p, __m);
802 _GLIBCXX_ALWAYS_INLINE __pointer_type
803 load(
memory_order __m = memory_order_seq_cst)
const noexcept
806 __glibcxx_assert(__b != memory_order_release);
807 __glibcxx_assert(__b != memory_order_acq_rel);
809 return __atomic_load_n(&_M_p, __m);
812 _GLIBCXX_ALWAYS_INLINE __pointer_type
813 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
816 __glibcxx_assert(__b != memory_order_release);
817 __glibcxx_assert(__b != memory_order_acq_rel);
819 return __atomic_load_n(&_M_p, __m);
822 _GLIBCXX_ALWAYS_INLINE __pointer_type
823 exchange(__pointer_type __p,
826 return __atomic_exchange_n(&_M_p, __p, __m);
830 _GLIBCXX_ALWAYS_INLINE __pointer_type
831 exchange(__pointer_type __p,
832 memory_order __m = memory_order_seq_cst)
volatile noexcept
834 return __atomic_exchange_n(&_M_p, __p, __m);
837 _GLIBCXX_ALWAYS_INLINE
bool
838 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
844 __glibcxx_assert(__b2 != memory_order_release);
845 __glibcxx_assert(__b2 != memory_order_acq_rel);
846 __glibcxx_assert(__b2 <= __b1);
848 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
851 _GLIBCXX_ALWAYS_INLINE
bool
852 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
859 __glibcxx_assert(__b2 != memory_order_release);
860 __glibcxx_assert(__b2 != memory_order_acq_rel);
861 __glibcxx_assert(__b2 <= __b1);
863 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
866 _GLIBCXX_ALWAYS_INLINE __pointer_type
867 fetch_add(ptrdiff_t __d,
869 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
871 _GLIBCXX_ALWAYS_INLINE __pointer_type
872 fetch_add(ptrdiff_t __d,
873 memory_order __m = memory_order_seq_cst)
volatile noexcept
874 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
876 _GLIBCXX_ALWAYS_INLINE __pointer_type
877 fetch_sub(ptrdiff_t __d,
879 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
881 _GLIBCXX_ALWAYS_INLINE __pointer_type
882 fetch_sub(ptrdiff_t __d,
883 memory_order __m = memory_order_seq_cst)
volatile noexcept
884 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
889 _GLIBCXX_END_NAMESPACE_VERSION
__atomic_base< intmax_t > atomic_intmax_t
atomic_intmax_t
bitset< _Nb > operator|(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
__atomic_base< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
__atomic_base< unsigned long long > atomic_ullong
atomic_ullong
__atomic_base< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
__atomic_base< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
__atomic_base< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
__atomic_base< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
__atomic_base< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
__atomic_base< wchar_t > atomic_wchar_t
atomic_wchar_t
_Tp kill_dependency(_Tp __y) noexcept
kill_dependency
__atomic_base< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
__atomic_base< long > atomic_long
atomic_long
__atomic_base< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
__atomic_base< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
__atomic_base< int > atomic_int
atomic_int
__atomic_base< char32_t > atomic_char32_t
atomic_char32_t
__atomic_base< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
Base type for atomic_flag.
__atomic_base< size_t > atomic_size_t
atomic_size_t
__atomic_base< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
__atomic_base< intptr_t > atomic_intptr_t
atomic_intptr_t
__atomic_base< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
__atomic_base< unsigned int > atomic_uint
atomic_uint
__atomic_base< short > atomic_short
atomic_short
__atomic_base< unsigned char > atomic_uchar
atomic_uchar
__atomic_base< unsigned long > atomic_ulong
atomic_ulong
__atomic_base< unsigned short > atomic_ushort
atomic_ushort
__atomic_base< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
__atomic_base< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
__atomic_base< long long > atomic_llong
atomic_llong
__atomic_base< char16_t > atomic_char16_t
atomic_char16_t
memory_order
Enumeration for memory_order.
bitset< _Nb > operator&(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
__atomic_base< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
__atomic_base< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
__atomic_base< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
__atomic_base< char > atomic_char
atomic_char
Base class for atomic integrals.
Generic atomic type, primary class template.
__atomic_base< signed char > atomic_schar
atomic_schar
__atomic_base< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
__atomic_base< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t