30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
35 #include <bits/c++config.h>
42 _GLIBCXX_BEGIN_NAMESPACE_VERSION
51 typedef enum memory_order
62 enum __memory_order_modifier
64 __memory_order_mask = 0x0ffff,
65 __memory_order_modifier_mask = 0xffff0000,
66 __memory_order_hle_acquire = 0x10000,
67 __memory_order_hle_release = 0x20000
70 constexpr memory_order
71 operator|(memory_order __m, __memory_order_modifier __mod)
73 return memory_order(__m |
int(__mod));
76 constexpr memory_order
77 operator&(memory_order __m, __memory_order_modifier __mod)
79 return memory_order(__m &
int(__mod));
83 constexpr memory_order
84 __cmpexch_failure_order2(memory_order __m) noexcept
86 return __m == memory_order_acq_rel ? memory_order_acquire
87 : __m == memory_order_release ? memory_order_relaxed : __m;
90 constexpr memory_order
91 __cmpexch_failure_order(memory_order __m) noexcept
93 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
94 | (__m & __memory_order_modifier_mask));
98 atomic_thread_fence(memory_order __m) noexcept
99 { __atomic_thread_fence(__m); }
102 atomic_signal_fence(memory_order __m) noexcept
103 { __atomic_signal_fence(__m); }
106 template<
typename _Tp>
108 kill_dependency(_Tp __y) noexcept
116 template<
typename _IntTp>
117 struct __atomic_base;
120 typedef __atomic_base<char> atomic_char;
123 typedef __atomic_base<signed char> atomic_schar;
126 typedef __atomic_base<unsigned char> atomic_uchar;
129 typedef __atomic_base<short> atomic_short;
132 typedef __atomic_base<unsigned short> atomic_ushort;
135 typedef __atomic_base<int> atomic_int;
138 typedef __atomic_base<unsigned int> atomic_uint;
141 typedef __atomic_base<long> atomic_long;
144 typedef __atomic_base<unsigned long> atomic_ulong;
147 typedef __atomic_base<long long> atomic_llong;
150 typedef __atomic_base<unsigned long long> atomic_ullong;
153 typedef __atomic_base<wchar_t> atomic_wchar_t;
156 typedef __atomic_base<char16_t> atomic_char16_t;
159 typedef __atomic_base<char32_t> atomic_char32_t;
162 typedef __atomic_base<char32_t> atomic_char32_t;
166 typedef __atomic_base<int_least8_t> atomic_int_least8_t;
169 typedef __atomic_base<uint_least8_t> atomic_uint_least8_t;
172 typedef __atomic_base<int_least16_t> atomic_int_least16_t;
175 typedef __atomic_base<uint_least16_t> atomic_uint_least16_t;
178 typedef __atomic_base<int_least32_t> atomic_int_least32_t;
181 typedef __atomic_base<uint_least32_t> atomic_uint_least32_t;
184 typedef __atomic_base<int_least64_t> atomic_int_least64_t;
187 typedef __atomic_base<uint_least64_t> atomic_uint_least64_t;
191 typedef __atomic_base<int_fast8_t> atomic_int_fast8_t;
194 typedef __atomic_base<uint_fast8_t> atomic_uint_fast8_t;
197 typedef __atomic_base<int_fast16_t> atomic_int_fast16_t;
200 typedef __atomic_base<uint_fast16_t> atomic_uint_fast16_t;
203 typedef __atomic_base<int_fast32_t> atomic_int_fast32_t;
206 typedef __atomic_base<uint_fast32_t> atomic_uint_fast32_t;
209 typedef __atomic_base<int_fast64_t> atomic_int_fast64_t;
212 typedef __atomic_base<uint_fast64_t> atomic_uint_fast64_t;
216 typedef __atomic_base<intptr_t> atomic_intptr_t;
219 typedef __atomic_base<uintptr_t> atomic_uintptr_t;
222 typedef __atomic_base<size_t> atomic_size_t;
225 typedef __atomic_base<intmax_t> atomic_intmax_t;
228 typedef __atomic_base<uintmax_t> atomic_uintmax_t;
231 typedef __atomic_base<ptrdiff_t> atomic_ptrdiff_t;
234 #define ATOMIC_VAR_INIT(_VI) { _VI }
236 template<
typename _Tp>
239 template<
typename _Tp>
243 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
244 typedef bool __atomic_flag_data_type;
246 typedef unsigned char __atomic_flag_data_type;
259 _GLIBCXX_BEGIN_EXTERN_C
261 struct __atomic_flag_base
263 __atomic_flag_data_type _M_i;
266 _GLIBCXX_END_EXTERN_C
268 #define ATOMIC_FLAG_INIT { 0 }
271 struct atomic_flag :
public __atomic_flag_base
273 atomic_flag() noexcept = default;
274 ~atomic_flag() noexcept = default;
275 atomic_flag(const atomic_flag&) = delete;
276 atomic_flag& operator=(const atomic_flag&) = delete;
277 atomic_flag& operator=(const atomic_flag&) volatile = delete;
280 constexpr atomic_flag(
bool __i) noexcept
281 : __atomic_flag_base{ _S_init(__i) }
285 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
287 return __atomic_test_and_set (&_M_i, __m);
291 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
293 return __atomic_test_and_set (&_M_i, __m);
297 clear(memory_order __m = memory_order_seq_cst) noexcept
299 memory_order __b = __m & __memory_order_mask;
300 __glibcxx_assert(__b != memory_order_consume);
301 __glibcxx_assert(__b != memory_order_acquire);
302 __glibcxx_assert(__b != memory_order_acq_rel);
304 __atomic_clear (&_M_i, __m);
308 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
310 memory_order __b = __m & __memory_order_mask;
311 __glibcxx_assert(__b != memory_order_consume);
312 __glibcxx_assert(__b != memory_order_acquire);
313 __glibcxx_assert(__b != memory_order_acq_rel);
315 __atomic_clear (&_M_i, __m);
319 static constexpr __atomic_flag_data_type
321 {
return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
348 template<
typename _ITp>
352 typedef _ITp __int_type;
357 __atomic_base() noexcept = default;
358 ~__atomic_base() noexcept = default;
359 __atomic_base(const __atomic_base&) = delete;
360 __atomic_base& operator=(const __atomic_base&) = delete;
361 __atomic_base& operator=(const __atomic_base&) volatile = delete;
364 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
366 operator __int_type() const noexcept
369 operator __int_type() const volatile noexcept
373 operator=(__int_type __i) noexcept
380 operator=(__int_type __i)
volatile noexcept
387 operator++(
int) noexcept
388 {
return fetch_add(1); }
391 operator++(
int) volatile noexcept
392 {
return fetch_add(1); }
395 operator--(
int) noexcept
396 {
return fetch_sub(1); }
399 operator--(
int) volatile noexcept
400 {
return fetch_sub(1); }
403 operator++() noexcept
404 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
407 operator++() volatile noexcept
408 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
411 operator--() noexcept
412 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
415 operator--() volatile noexcept
416 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
419 operator+=(__int_type __i) noexcept
420 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
423 operator+=(__int_type __i)
volatile noexcept
424 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
427 operator-=(__int_type __i) noexcept
428 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
431 operator-=(__int_type __i)
volatile noexcept
432 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
435 operator&=(__int_type __i) noexcept
436 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
439 operator&=(__int_type __i)
volatile noexcept
440 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
443 operator|=(__int_type __i) noexcept
444 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
447 operator|=(__int_type __i)
volatile noexcept
448 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
451 operator^=(__int_type __i) noexcept
452 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
455 operator^=(__int_type __i)
volatile noexcept
456 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
459 is_lock_free() const noexcept
460 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
463 is_lock_free() const volatile noexcept
464 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
467 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
469 memory_order __b = __m & __memory_order_mask;
470 __glibcxx_assert(__b != memory_order_acquire);
471 __glibcxx_assert(__b != memory_order_acq_rel);
472 __glibcxx_assert(__b != memory_order_consume);
474 __atomic_store_n(&_M_i, __i, __m);
478 store(__int_type __i,
479 memory_order __m = memory_order_seq_cst) volatile noexcept
481 memory_order __b = __m & __memory_order_mask;
482 __glibcxx_assert(__b != memory_order_acquire);
483 __glibcxx_assert(__b != memory_order_acq_rel);
484 __glibcxx_assert(__b != memory_order_consume);
486 __atomic_store_n(&_M_i, __i, __m);
490 load(memory_order __m = memory_order_seq_cst) const noexcept
492 memory_order __b = __m & __memory_order_mask;
493 __glibcxx_assert(__b != memory_order_release);
494 __glibcxx_assert(__b != memory_order_acq_rel);
496 return __atomic_load_n(&_M_i, __m);
500 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
502 memory_order __b = __m & __memory_order_mask;
503 __glibcxx_assert(__b != memory_order_release);
504 __glibcxx_assert(__b != memory_order_acq_rel);
506 return __atomic_load_n(&_M_i, __m);
510 exchange(__int_type __i,
511 memory_order __m = memory_order_seq_cst) noexcept
513 return __atomic_exchange_n(&_M_i, __i, __m);
518 exchange(__int_type __i,
519 memory_order __m = memory_order_seq_cst) volatile noexcept
521 return __atomic_exchange_n(&_M_i, __i, __m);
525 compare_exchange_weak(__int_type& __i1, __int_type __i2,
526 memory_order __m1, memory_order __m2) noexcept
528 memory_order __b2 = __m2 & __memory_order_mask;
529 memory_order __b1 = __m1 & __memory_order_mask;
530 __glibcxx_assert(__b2 != memory_order_release);
531 __glibcxx_assert(__b2 != memory_order_acq_rel);
532 __glibcxx_assert(__b2 <= __b1);
534 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
538 compare_exchange_weak(__int_type& __i1, __int_type __i2,
540 memory_order __m2)
volatile noexcept
542 memory_order __b2 = __m2 & __memory_order_mask;
543 memory_order __b1 = __m1 & __memory_order_mask;
544 __glibcxx_assert(__b2 != memory_order_release);
545 __glibcxx_assert(__b2 != memory_order_acq_rel);
546 __glibcxx_assert(__b2 <= __b1);
548 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
552 compare_exchange_weak(__int_type& __i1, __int_type __i2,
553 memory_order __m = memory_order_seq_cst) noexcept
555 return compare_exchange_weak(__i1, __i2, __m,
556 __cmpexch_failure_order(__m));
560 compare_exchange_weak(__int_type& __i1, __int_type __i2,
561 memory_order __m = memory_order_seq_cst) volatile noexcept
563 return compare_exchange_weak(__i1, __i2, __m,
564 __cmpexch_failure_order(__m));
568 compare_exchange_strong(__int_type& __i1, __int_type __i2,
569 memory_order __m1, memory_order __m2) noexcept
571 memory_order __b2 = __m2 & __memory_order_mask;
572 memory_order __b1 = __m1 & __memory_order_mask;
573 __glibcxx_assert(__b2 != memory_order_release);
574 __glibcxx_assert(__b2 != memory_order_acq_rel);
575 __glibcxx_assert(__b2 <= __b1);
577 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
581 compare_exchange_strong(__int_type& __i1, __int_type __i2,
583 memory_order __m2)
volatile noexcept
585 memory_order __b2 = __m2 & __memory_order_mask;
586 memory_order __b1 = __m1 & __memory_order_mask;
588 __glibcxx_assert(__b2 != memory_order_release);
589 __glibcxx_assert(__b2 != memory_order_acq_rel);
590 __glibcxx_assert(__b2 <= __b1);
592 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
596 compare_exchange_strong(__int_type& __i1, __int_type __i2,
597 memory_order __m = memory_order_seq_cst) noexcept
599 return compare_exchange_strong(__i1, __i2, __m,
600 __cmpexch_failure_order(__m));
604 compare_exchange_strong(__int_type& __i1, __int_type __i2,
605 memory_order __m = memory_order_seq_cst) volatile noexcept
607 return compare_exchange_strong(__i1, __i2, __m,
608 __cmpexch_failure_order(__m));
612 fetch_add(__int_type __i,
613 memory_order __m = memory_order_seq_cst) noexcept
614 {
return __atomic_fetch_add(&_M_i, __i, __m); }
617 fetch_add(__int_type __i,
618 memory_order __m = memory_order_seq_cst) volatile noexcept
619 {
return __atomic_fetch_add(&_M_i, __i, __m); }
622 fetch_sub(__int_type __i,
623 memory_order __m = memory_order_seq_cst) noexcept
624 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
627 fetch_sub(__int_type __i,
628 memory_order __m = memory_order_seq_cst) volatile noexcept
629 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
632 fetch_and(__int_type __i,
633 memory_order __m = memory_order_seq_cst) noexcept
634 {
return __atomic_fetch_and(&_M_i, __i, __m); }
637 fetch_and(__int_type __i,
638 memory_order __m = memory_order_seq_cst) volatile noexcept
639 {
return __atomic_fetch_and(&_M_i, __i, __m); }
642 fetch_or(__int_type __i,
643 memory_order __m = memory_order_seq_cst) noexcept
644 {
return __atomic_fetch_or(&_M_i, __i, __m); }
647 fetch_or(__int_type __i,
648 memory_order __m = memory_order_seq_cst) volatile noexcept
649 {
return __atomic_fetch_or(&_M_i, __i, __m); }
652 fetch_xor(__int_type __i,
653 memory_order __m = memory_order_seq_cst) noexcept
654 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
657 fetch_xor(__int_type __i,
658 memory_order __m = memory_order_seq_cst) volatile noexcept
659 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
664 template<
typename _PTp>
665 struct __atomic_base<_PTp*>
668 typedef _PTp* __pointer_type;
674 _M_type_size(
ptrdiff_t __d) {
return __d *
sizeof(_PTp); }
677 _M_type_size(
ptrdiff_t __d)
volatile {
return __d *
sizeof(_PTp); }
680 __atomic_base() noexcept = default;
681 ~__atomic_base() noexcept = default;
682 __atomic_base(const __atomic_base&) = delete;
683 __atomic_base& operator=(const __atomic_base&) = delete;
684 __atomic_base& operator=(const __atomic_base&) volatile = delete;
687 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
689 operator __pointer_type() const noexcept
692 operator __pointer_type() const volatile noexcept
696 operator=(__pointer_type __p) noexcept
703 operator=(__pointer_type __p)
volatile noexcept
710 operator++(
int) noexcept
711 {
return fetch_add(1); }
714 operator++(
int) volatile noexcept
715 {
return fetch_add(1); }
718 operator--(
int) noexcept
719 {
return fetch_sub(1); }
722 operator--(
int) volatile noexcept
723 {
return fetch_sub(1); }
726 operator++() noexcept
727 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
728 memory_order_seq_cst); }
731 operator++() volatile noexcept
732 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
733 memory_order_seq_cst); }
736 operator--() noexcept
737 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
738 memory_order_seq_cst); }
741 operator--() volatile noexcept
742 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
743 memory_order_seq_cst); }
747 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
748 memory_order_seq_cst); }
751 operator+=(
ptrdiff_t __d)
volatile noexcept
752 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
753 memory_order_seq_cst); }
757 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
758 memory_order_seq_cst); }
761 operator-=(
ptrdiff_t __d)
volatile noexcept
762 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
763 memory_order_seq_cst); }
766 is_lock_free() const noexcept
767 {
return __atomic_is_lock_free(
sizeof(__pointer_type),
nullptr); }
770 is_lock_free() const volatile noexcept
771 {
return __atomic_is_lock_free(
sizeof(__pointer_type),
nullptr); }
774 store(__pointer_type __p,
775 memory_order __m = memory_order_seq_cst) noexcept
777 memory_order __b = __m & __memory_order_mask;
779 __glibcxx_assert(__b != memory_order_acquire);
780 __glibcxx_assert(__b != memory_order_acq_rel);
781 __glibcxx_assert(__b != memory_order_consume);
783 __atomic_store_n(&_M_p, __p, __m);
787 store(__pointer_type __p,
788 memory_order __m = memory_order_seq_cst) volatile noexcept
790 memory_order __b = __m & __memory_order_mask;
791 __glibcxx_assert(__b != memory_order_acquire);
792 __glibcxx_assert(__b != memory_order_acq_rel);
793 __glibcxx_assert(__b != memory_order_consume);
795 __atomic_store_n(&_M_p, __p, __m);
799 load(memory_order __m = memory_order_seq_cst) const noexcept
801 memory_order __b = __m & __memory_order_mask;
802 __glibcxx_assert(__b != memory_order_release);
803 __glibcxx_assert(__b != memory_order_acq_rel);
805 return __atomic_load_n(&_M_p, __m);
809 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
811 memory_order __b = __m & __memory_order_mask;
812 __glibcxx_assert(__b != memory_order_release);
813 __glibcxx_assert(__b != memory_order_acq_rel);
815 return __atomic_load_n(&_M_p, __m);
819 exchange(__pointer_type __p,
820 memory_order __m = memory_order_seq_cst) noexcept
822 return __atomic_exchange_n(&_M_p, __p, __m);
827 exchange(__pointer_type __p,
828 memory_order __m = memory_order_seq_cst) volatile noexcept
830 return __atomic_exchange_n(&_M_p, __p, __m);
834 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
836 memory_order __m2) noexcept
838 memory_order __b2 = __m2 & __memory_order_mask;
839 memory_order __b1 = __m1 & __memory_order_mask;
840 __glibcxx_assert(__b2 != memory_order_release);
841 __glibcxx_assert(__b2 != memory_order_acq_rel);
842 __glibcxx_assert(__b2 <= __b1);
844 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
848 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
850 memory_order __m2)
volatile noexcept
852 memory_order __b2 = __m2 & __memory_order_mask;
853 memory_order __b1 = __m1 & __memory_order_mask;
855 __glibcxx_assert(__b2 != memory_order_release);
856 __glibcxx_assert(__b2 != memory_order_acq_rel);
857 __glibcxx_assert(__b2 <= __b1);
859 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
864 memory_order __m = memory_order_seq_cst) noexcept
865 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
869 memory_order __m = memory_order_seq_cst) volatile noexcept
870 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
874 memory_order __m = memory_order_seq_cst) noexcept
875 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
879 memory_order __m = memory_order_seq_cst) volatile noexcept
880 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
885 _GLIBCXX_END_NAMESPACE_VERSION
namespace std _GLIBCXX_VISIBILITY(default)
Definition: auto_ptr.h:36
__PTRDIFF_TYPE__ ptrdiff_t
Definition: stddef.h:147