32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
35#pragma GCC system_header
37#if __cplusplus < 201103L
43namespace std _GLIBCXX_VISIBILITY(default)
45_GLIBCXX_BEGIN_NAMESPACE_VERSION
52#if __cplusplus >= 201703L
53# define __cpp_lib_atomic_is_always_lock_free 201603L
56 template<
typename _Tp>
64 using value_type = bool;
70 atomic()
noexcept =
default;
76 constexpr atomic(
bool __i) noexcept : _M_base(__i) { }
79 operator=(
bool __i)
noexcept
80 {
return _M_base.operator=(__i); }
83 operator=(
bool __i)
volatile noexcept
84 {
return _M_base.operator=(__i); }
86 operator bool()
const noexcept
87 {
return _M_base.load(); }
89 operator bool()
const volatile noexcept
90 {
return _M_base.load(); }
93 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
96 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
98#if __cplusplus >= 201703L
103 store(
bool __i,
memory_order __m = memory_order_seq_cst)
noexcept
104 { _M_base.store(__i, __m); }
107 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
108 { _M_base.store(__i, __m); }
111 load(
memory_order __m = memory_order_seq_cst)
const noexcept
112 {
return _M_base.load(__m); }
115 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
116 {
return _M_base.load(__m); }
119 exchange(
bool __i,
memory_order __m = memory_order_seq_cst)
noexcept
120 {
return _M_base.exchange(__i, __m); }
124 memory_order __m = memory_order_seq_cst)
volatile noexcept
125 {
return _M_base.exchange(__i, __m); }
128 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
130 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
133 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
135 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
138 compare_exchange_weak(
bool& __i1,
bool __i2,
140 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
143 compare_exchange_weak(
bool& __i1,
bool __i2,
144 memory_order __m = memory_order_seq_cst)
volatile noexcept
145 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
148 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
150 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
153 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
155 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
158 compare_exchange_strong(
bool& __i1,
bool __i2,
160 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
163 compare_exchange_strong(
bool& __i1,
bool __i2,
164 memory_order __m = memory_order_seq_cst)
volatile noexcept
165 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
167#if __cpp_lib_atomic_wait
169 wait(
bool __old,
memory_order __m = memory_order_seq_cst)
const noexcept
170 { _M_base.wait(__old, __m); }
175 notify_one()
noexcept
176 { _M_base.notify_one(); }
179 notify_all()
noexcept
180 { _M_base.notify_all(); }
184#if __cplusplus <= 201703L
185# define _GLIBCXX20_INIT(I)
187# define _GLIBCXX20_INIT(I) = I
195 template<
typename _Tp>
198 using value_type = _Tp;
202 static constexpr int _S_min_alignment
203 = (
sizeof(_Tp) & (
sizeof(_Tp) - 1)) ||
sizeof(_Tp) > 16
206 static constexpr int _S_alignment
207 = _S_min_alignment >
alignof(_Tp) ? _S_min_alignment :
alignof(_Tp);
209 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
211 static_assert(__is_trivially_copyable(_Tp),
212 "std::atomic requires a trivially copyable type");
214 static_assert(
sizeof(_Tp) > 0,
215 "Incomplete or zero-sized types are not supported");
217#if __cplusplus > 201703L
218 static_assert(is_copy_constructible_v<_Tp>);
219 static_assert(is_move_constructible_v<_Tp>);
220 static_assert(is_copy_assignable_v<_Tp>);
221 static_assert(is_move_assignable_v<_Tp>);
231 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
233 operator _Tp()
const noexcept
236 operator _Tp()
const volatile noexcept
240 operator=(_Tp __i)
noexcept
241 { store(__i);
return __i; }
244 operator=(_Tp __i)
volatile noexcept
245 { store(__i);
return __i; }
248 is_lock_free()
const noexcept
251 return __atomic_is_lock_free(
sizeof(_M_i),
252 reinterpret_cast<void *
>(-_S_alignment));
256 is_lock_free()
const volatile noexcept
259 return __atomic_is_lock_free(
sizeof(_M_i),
260 reinterpret_cast<void *
>(-_S_alignment));
263#if __cplusplus >= 201703L
264 static constexpr bool is_always_lock_free
265 = __atomic_always_lock_free(
sizeof(_M_i), 0);
269 store(_Tp __i,
memory_order __m = memory_order_seq_cst)
noexcept
275 store(_Tp __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
281 load(
memory_order __m = memory_order_seq_cst)
const noexcept
283 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
284 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
290 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
292 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
293 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
299 exchange(_Tp __i,
memory_order __m = memory_order_seq_cst)
noexcept
301 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
302 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
310 memory_order __m = memory_order_seq_cst)
volatile noexcept
312 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
313 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
320 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
323 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
328 true,
int(__s),
int(__f));
332 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
335 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
340 true,
int(__s),
int(__f));
344 compare_exchange_weak(_Tp& __e, _Tp __i,
346 {
return compare_exchange_weak(__e, __i, __m,
347 __cmpexch_failure_order(__m)); }
350 compare_exchange_weak(_Tp& __e, _Tp __i,
351 memory_order __m = memory_order_seq_cst)
volatile noexcept
352 {
return compare_exchange_weak(__e, __i, __m,
353 __cmpexch_failure_order(__m)); }
356 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
359 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
364 false,
int(__s),
int(__f));
368 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
371 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
376 false,
int(__s),
int(__f));
380 compare_exchange_strong(_Tp& __e, _Tp __i,
382 {
return compare_exchange_strong(__e, __i, __m,
383 __cmpexch_failure_order(__m)); }
386 compare_exchange_strong(_Tp& __e, _Tp __i,
387 memory_order __m = memory_order_seq_cst)
volatile noexcept
388 {
return compare_exchange_strong(__e, __i, __m,
389 __cmpexch_failure_order(__m)); }
391#if __cpp_lib_atomic_wait
393 wait(_Tp __old,
memory_order __m = memory_order_seq_cst)
const noexcept
395 std::__atomic_wait_address_v(&_M_i, __old,
396 [__m,
this] {
return this->load(__m); });
402 notify_one()
noexcept
403 { std::__atomic_notify_address(&_M_i,
false); }
406 notify_all()
noexcept
407 { std::__atomic_notify_address(&_M_i,
true); }
411#undef _GLIBCXX20_INIT
414 template<
typename _Tp>
417 using value_type = _Tp*;
418 using difference_type = ptrdiff_t;
420 typedef _Tp* __pointer_type;
424 atomic()
noexcept =
default;
430 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
432 operator __pointer_type()
const noexcept
433 {
return __pointer_type(_M_b); }
435 operator __pointer_type()
const volatile noexcept
436 {
return __pointer_type(_M_b); }
439 operator=(__pointer_type __p)
noexcept
440 {
return _M_b.operator=(__p); }
443 operator=(__pointer_type __p)
volatile noexcept
444 {
return _M_b.operator=(__p); }
447 operator++(
int)
noexcept
449#if __cplusplus >= 201703L
456 operator++(
int)
volatile noexcept
458#if __cplusplus >= 201703L
465 operator--(
int)
noexcept
467#if __cplusplus >= 201703L
474 operator--(
int)
volatile noexcept
476#if __cplusplus >= 201703L
483 operator++()
noexcept
485#if __cplusplus >= 201703L
492 operator++()
volatile noexcept
494#if __cplusplus >= 201703L
501 operator--()
noexcept
503#if __cplusplus >= 201703L
510 operator--()
volatile noexcept
512#if __cplusplus >= 201703L
519 operator+=(ptrdiff_t __d)
noexcept
521#if __cplusplus >= 201703L
524 return _M_b.operator+=(__d);
528 operator+=(ptrdiff_t __d)
volatile noexcept
530#if __cplusplus >= 201703L
533 return _M_b.operator+=(__d);
537 operator-=(ptrdiff_t __d)
noexcept
539#if __cplusplus >= 201703L
542 return _M_b.operator-=(__d);
546 operator-=(ptrdiff_t __d)
volatile noexcept
548#if __cplusplus >= 201703L
551 return _M_b.operator-=(__d);
555 is_lock_free()
const noexcept
556 {
return _M_b.is_lock_free(); }
559 is_lock_free()
const volatile noexcept
560 {
return _M_b.is_lock_free(); }
562#if __cplusplus >= 201703L
563 static constexpr bool is_always_lock_free
564 = ATOMIC_POINTER_LOCK_FREE == 2;
568 store(__pointer_type __p,
570 {
return _M_b.store(__p, __m); }
573 store(__pointer_type __p,
574 memory_order __m = memory_order_seq_cst)
volatile noexcept
575 {
return _M_b.store(__p, __m); }
578 load(
memory_order __m = memory_order_seq_cst)
const noexcept
579 {
return _M_b.load(__m); }
582 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
583 {
return _M_b.load(__m); }
588 {
return _M_b.exchange(__p, __m); }
592 memory_order __m = memory_order_seq_cst)
volatile noexcept
593 {
return _M_b.exchange(__p, __m); }
596 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
598 {
return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
601 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
604 {
return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
607 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
610 return compare_exchange_weak(__p1, __p2, __m,
611 __cmpexch_failure_order(__m));
615 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
616 memory_order __m = memory_order_seq_cst)
volatile noexcept
618 return compare_exchange_weak(__p1, __p2, __m,
619 __cmpexch_failure_order(__m));
623 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
625 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
628 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
631 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
634 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
637 return _M_b.compare_exchange_strong(__p1, __p2, __m,
638 __cmpexch_failure_order(__m));
642 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
643 memory_order __m = memory_order_seq_cst)
volatile noexcept
645 return _M_b.compare_exchange_strong(__p1, __p2, __m,
646 __cmpexch_failure_order(__m));
649#if __cpp_lib_atomic_wait
651 wait(__pointer_type __old,
memory_order __m = memory_order_seq_cst)
const noexcept
652 { _M_b.wait(__old, __m); }
657 notify_one()
noexcept
658 { _M_b.notify_one(); }
661 notify_all()
noexcept
662 { _M_b.notify_all(); }
666 fetch_add(ptrdiff_t __d,
669#if __cplusplus >= 201703L
672 return _M_b.fetch_add(__d, __m);
676 fetch_add(ptrdiff_t __d,
677 memory_order __m = memory_order_seq_cst)
volatile noexcept
679#if __cplusplus >= 201703L
682 return _M_b.fetch_add(__d, __m);
686 fetch_sub(ptrdiff_t __d,
689#if __cplusplus >= 201703L
692 return _M_b.fetch_sub(__d, __m);
696 fetch_sub(ptrdiff_t __d,
697 memory_order __m = memory_order_seq_cst)
volatile noexcept
699#if __cplusplus >= 201703L
702 return _M_b.fetch_sub(__d, __m);
711 typedef char __integral_type;
714 atomic()
noexcept =
default;
722 using __base_type::operator __integral_type;
723 using __base_type::operator=;
725#if __cplusplus >= 201703L
726 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
734 typedef signed char __integral_type;
737 atomic()
noexcept=
default;
745 using __base_type::operator __integral_type;
746 using __base_type::operator=;
748#if __cplusplus >= 201703L
749 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
757 typedef unsigned char __integral_type;
760 atomic()
noexcept=
default;
768 using __base_type::operator __integral_type;
769 using __base_type::operator=;
771#if __cplusplus >= 201703L
772 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
780 typedef short __integral_type;
783 atomic()
noexcept =
default;
791 using __base_type::operator __integral_type;
792 using __base_type::operator=;
794#if __cplusplus >= 201703L
795 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
803 typedef unsigned short __integral_type;
806 atomic()
noexcept =
default;
814 using __base_type::operator __integral_type;
815 using __base_type::operator=;
817#if __cplusplus >= 201703L
818 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
826 typedef int __integral_type;
829 atomic()
noexcept =
default;
837 using __base_type::operator __integral_type;
838 using __base_type::operator=;
840#if __cplusplus >= 201703L
841 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
849 typedef unsigned int __integral_type;
852 atomic()
noexcept =
default;
860 using __base_type::operator __integral_type;
861 using __base_type::operator=;
863#if __cplusplus >= 201703L
864 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
872 typedef long __integral_type;
875 atomic()
noexcept =
default;
883 using __base_type::operator __integral_type;
884 using __base_type::operator=;
886#if __cplusplus >= 201703L
887 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
895 typedef unsigned long __integral_type;
898 atomic()
noexcept =
default;
906 using __base_type::operator __integral_type;
907 using __base_type::operator=;
909#if __cplusplus >= 201703L
910 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
918 typedef long long __integral_type;
921 atomic()
noexcept =
default;
929 using __base_type::operator __integral_type;
930 using __base_type::operator=;
932#if __cplusplus >= 201703L
933 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
941 typedef unsigned long long __integral_type;
944 atomic()
noexcept =
default;
952 using __base_type::operator __integral_type;
953 using __base_type::operator=;
955#if __cplusplus >= 201703L
956 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
964 typedef wchar_t __integral_type;
967 atomic()
noexcept =
default;
975 using __base_type::operator __integral_type;
976 using __base_type::operator=;
978#if __cplusplus >= 201703L
979 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
983#ifdef _GLIBCXX_USE_CHAR8_T
988 typedef char8_t __integral_type;
991 atomic() noexcept = default;
992 ~
atomic() noexcept = default;
997 constexpr
atomic(__integral_type __i) noexcept : __base_type(__i) { }
999 using __base_type::operator __integral_type;
1000 using __base_type::operator=;
1002#if __cplusplus > 201402L
1003 static constexpr bool is_always_lock_free
1004 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1013 typedef char16_t __integral_type;
1016 atomic()
noexcept =
default;
1024 using __base_type::operator __integral_type;
1025 using __base_type::operator=;
1027#if __cplusplus >= 201703L
1028 static constexpr bool is_always_lock_free
1029 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1037 typedef char32_t __integral_type;
1040 atomic()
noexcept =
default;
1048 using __base_type::operator __integral_type;
1049 using __base_type::operator=;
1051#if __cplusplus >= 201703L
1052 static constexpr bool is_always_lock_free
1053 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1097#ifdef _GLIBCXX_USE_CHAR8_T
1108#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1200#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1210 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
1212 {
return __a->test_and_set(__m); }
1215 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
1217 {
return __a->test_and_set(__m); }
1220 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m)
noexcept
1221 { __a->clear(__m); }
1224 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
1226 { __a->clear(__m); }
1229 atomic_flag_test_and_set(atomic_flag* __a)
noexcept
1230 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1233 atomic_flag_test_and_set(
volatile atomic_flag* __a)
noexcept
1234 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1237 atomic_flag_clear(atomic_flag* __a)
noexcept
1238 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1241 atomic_flag_clear(
volatile atomic_flag* __a)
noexcept
1242 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1246 template<
typename _Tp>
1247 using __atomic_val_t = __type_identity_t<_Tp>;
1248 template<
typename _Tp>
1249 using __atomic_diff_t =
typename atomic<_Tp>::difference_type;
1253 template<
typename _ITp>
1255 atomic_is_lock_free(
const atomic<_ITp>* __a)
noexcept
1256 {
return __a->is_lock_free(); }
1258 template<
typename _ITp>
1260 atomic_is_lock_free(
const volatile atomic<_ITp>* __a)
noexcept
1261 {
return __a->is_lock_free(); }
1263 template<
typename _ITp>
1265 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1266 { __a->store(__i, memory_order_relaxed); }
1268 template<
typename _ITp>
1270 atomic_init(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1271 { __a->store(__i, memory_order_relaxed); }
1273 template<
typename _ITp>
1275 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1277 { __a->store(__i, __m); }
1279 template<
typename _ITp>
1281 atomic_store_explicit(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1283 { __a->store(__i, __m); }
1285 template<
typename _ITp>
1287 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m)
noexcept
1288 {
return __a->load(__m); }
1290 template<
typename _ITp>
1292 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
1294 {
return __a->load(__m); }
1296 template<
typename _ITp>
1298 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1300 {
return __a->exchange(__i, __m); }
1302 template<
typename _ITp>
1304 atomic_exchange_explicit(
volatile atomic<_ITp>* __a,
1305 __atomic_val_t<_ITp> __i,
1307 {
return __a->exchange(__i, __m); }
1309 template<
typename _ITp>
1311 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1312 __atomic_val_t<_ITp>* __i1,
1313 __atomic_val_t<_ITp> __i2,
1316 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1318 template<
typename _ITp>
1320 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
1321 __atomic_val_t<_ITp>* __i1,
1322 __atomic_val_t<_ITp> __i2,
1325 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1327 template<
typename _ITp>
1329 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1330 __atomic_val_t<_ITp>* __i1,
1331 __atomic_val_t<_ITp> __i2,
1334 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1336 template<
typename _ITp>
1338 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
1339 __atomic_val_t<_ITp>* __i1,
1340 __atomic_val_t<_ITp> __i2,
1343 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1346 template<
typename _ITp>
1348 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1349 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1351 template<
typename _ITp>
1353 atomic_store(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1354 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1356 template<
typename _ITp>
1358 atomic_load(
const atomic<_ITp>* __a)
noexcept
1359 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1361 template<
typename _ITp>
1363 atomic_load(
const volatile atomic<_ITp>* __a)
noexcept
1364 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1366 template<
typename _ITp>
1368 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1369 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1371 template<
typename _ITp>
1373 atomic_exchange(
volatile atomic<_ITp>* __a,
1374 __atomic_val_t<_ITp> __i)
noexcept
1375 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1377 template<
typename _ITp>
1379 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1380 __atomic_val_t<_ITp>* __i1,
1381 __atomic_val_t<_ITp> __i2)
noexcept
1383 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1384 memory_order_seq_cst,
1385 memory_order_seq_cst);
1388 template<
typename _ITp>
1390 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
1391 __atomic_val_t<_ITp>* __i1,
1392 __atomic_val_t<_ITp> __i2)
noexcept
1394 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1395 memory_order_seq_cst,
1396 memory_order_seq_cst);
1399 template<
typename _ITp>
1401 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1402 __atomic_val_t<_ITp>* __i1,
1403 __atomic_val_t<_ITp> __i2)
noexcept
1405 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1406 memory_order_seq_cst,
1407 memory_order_seq_cst);
1410 template<
typename _ITp>
1412 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
1413 __atomic_val_t<_ITp>* __i1,
1414 __atomic_val_t<_ITp> __i2)
noexcept
1416 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1417 memory_order_seq_cst,
1418 memory_order_seq_cst);
1422#if __cpp_lib_atomic_wait
1423 template<
typename _Tp>
1425 atomic_wait(
const atomic<_Tp>* __a,
1426 typename std::atomic<_Tp>::value_type __old)
noexcept
1427 { __a->wait(__old); }
1429 template<
typename _Tp>
1431 atomic_wait_explicit(
const atomic<_Tp>* __a,
1432 typename std::atomic<_Tp>::value_type __old,
1434 { __a->wait(__old, __m); }
1436 template<
typename _Tp>
1438 atomic_notify_one(atomic<_Tp>* __a)
noexcept
1439 { __a->notify_one(); }
1441 template<
typename _Tp>
1443 atomic_notify_all(atomic<_Tp>* __a)
noexcept
1444 { __a->notify_all(); }
1451 template<
typename _ITp>
1453 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1454 __atomic_diff_t<_ITp> __i,
1456 {
return __a->fetch_add(__i, __m); }
1458 template<
typename _ITp>
1460 atomic_fetch_add_explicit(
volatile atomic<_ITp>* __a,
1461 __atomic_diff_t<_ITp> __i,
1463 {
return __a->fetch_add(__i, __m); }
1465 template<
typename _ITp>
1467 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1468 __atomic_diff_t<_ITp> __i,
1470 {
return __a->fetch_sub(__i, __m); }
1472 template<
typename _ITp>
1474 atomic_fetch_sub_explicit(
volatile atomic<_ITp>* __a,
1475 __atomic_diff_t<_ITp> __i,
1477 {
return __a->fetch_sub(__i, __m); }
1479 template<
typename _ITp>
1481 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1482 __atomic_val_t<_ITp> __i,
1484 {
return __a->fetch_and(__i, __m); }
1486 template<
typename _ITp>
1488 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a,
1489 __atomic_val_t<_ITp> __i,
1491 {
return __a->fetch_and(__i, __m); }
1493 template<
typename _ITp>
1495 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1496 __atomic_val_t<_ITp> __i,
1498 {
return __a->fetch_or(__i, __m); }
1500 template<
typename _ITp>
1502 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a,
1503 __atomic_val_t<_ITp> __i,
1505 {
return __a->fetch_or(__i, __m); }
1507 template<
typename _ITp>
1509 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1510 __atomic_val_t<_ITp> __i,
1512 {
return __a->fetch_xor(__i, __m); }
1514 template<
typename _ITp>
1516 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a,
1517 __atomic_val_t<_ITp> __i,
1519 {
return __a->fetch_xor(__i, __m); }
1521 template<
typename _ITp>
1523 atomic_fetch_add(atomic<_ITp>* __a,
1524 __atomic_diff_t<_ITp> __i)
noexcept
1525 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1527 template<
typename _ITp>
1529 atomic_fetch_add(
volatile atomic<_ITp>* __a,
1530 __atomic_diff_t<_ITp> __i)
noexcept
1531 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1533 template<
typename _ITp>
1535 atomic_fetch_sub(atomic<_ITp>* __a,
1536 __atomic_diff_t<_ITp> __i)
noexcept
1537 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1539 template<
typename _ITp>
1541 atomic_fetch_sub(
volatile atomic<_ITp>* __a,
1542 __atomic_diff_t<_ITp> __i)
noexcept
1543 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1545 template<
typename _ITp>
1547 atomic_fetch_and(__atomic_base<_ITp>* __a,
1548 __atomic_val_t<_ITp> __i)
noexcept
1549 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1551 template<
typename _ITp>
1553 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a,
1554 __atomic_val_t<_ITp> __i)
noexcept
1555 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1557 template<
typename _ITp>
1559 atomic_fetch_or(__atomic_base<_ITp>* __a,
1560 __atomic_val_t<_ITp> __i)
noexcept
1561 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1563 template<
typename _ITp>
1565 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a,
1566 __atomic_val_t<_ITp> __i)
noexcept
1567 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1569 template<
typename _ITp>
1571 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1572 __atomic_val_t<_ITp> __i)
noexcept
1573 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1575 template<
typename _ITp>
1577 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a,
1578 __atomic_val_t<_ITp> __i)
noexcept
1579 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1581#if __cplusplus > 201703L
1582#define __cpp_lib_atomic_float 201711L
1584 struct atomic<float> : __atomic_float<float>
1586 atomic() noexcept = default;
1589 atomic(
float __fp) noexcept : __atomic_float<
float>(__fp)
1592 atomic& operator=(
const atomic&)
volatile =
delete;
1593 atomic& operator=(
const atomic&) =
delete;
1595 using __atomic_float<
float>::operator=;
1599 struct atomic<double> : __atomic_float<double>
1601 atomic() noexcept = default;
1604 atomic(
double __fp) noexcept : __atomic_float<
double>(__fp)
1607 atomic& operator=(
const atomic&)
volatile =
delete;
1608 atomic& operator=(
const atomic&) =
delete;
1610 using __atomic_float<
double>::operator=;
1614 struct atomic<long double> : __atomic_float<long double>
1616 atomic() noexcept = default;
1619 atomic(
long double __fp) noexcept : __atomic_float<
long double>(__fp)
1622 atomic& operator=(
const atomic&)
volatile =
delete;
1623 atomic& operator=(
const atomic&) =
delete;
1625 using __atomic_float<
long double>::operator=;
1628#define __cpp_lib_atomic_ref 201806L
1631 template<
typename _Tp>
1635 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1642 using __atomic_ref<_Tp>::operator=;
1649_GLIBCXX_END_NAMESPACE_VERSION
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
atomic< unsigned long > atomic_ulong
atomic_ulong
atomic< intmax_t > atomic_intmax_t
atomic_intmax_t
atomic< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
atomic< signed char > atomic_schar
atomic_schar
atomic< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
atomic< unsigned long long > atomic_ullong
atomic_ullong
atomic< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
atomic< intptr_t > atomic_intptr_t
atomic_intptr_t
atomic< int16_t > atomic_int16_t
atomic_int16_t
atomic< size_t > atomic_size_t
atomic_size_t
atomic< long > atomic_long
atomic_long
atomic< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
atomic< short > atomic_short
atomic_short
atomic< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
atomic< uint16_t > atomic_uint16_t
atomic_uint16_t
atomic< uint64_t > atomic_uint64_t
atomic_uint64_t
atomic< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
atomic< uint8_t > atomic_uint8_t
atomic_uint8_t
#define ATOMIC_BOOL_LOCK_FREE
atomic< wchar_t > atomic_wchar_t
atomic_wchar_t
atomic< unsigned int > atomic_uint
atomic_uint
atomic< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
atomic< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
atomic< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
atomic< char > atomic_char
atomic_char
atomic< int > atomic_int
atomic_int
atomic< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
atomic< int64_t > atomic_int64_t
atomic_int64_t
atomic< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
atomic< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
atomic< int32_t > atomic_int32_t
atomic_int32_t
atomic< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
atomic< int8_t > atomic_int8_t
atomic_int8_t
atomic< long long > atomic_llong
atomic_llong
atomic< char16_t > atomic_char16_t
atomic_char16_t
atomic< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
atomic< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
atomic< char32_t > atomic_char32_t
atomic_char32_t
atomic< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
atomic< unsigned char > atomic_uchar
atomic_uchar
atomic< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
memory_order
Enumeration for memory_order.
atomic< unsigned short > atomic_ushort
atomic_ushort
atomic< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
atomic< bool > atomic_bool
atomic_bool
atomic< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
atomic< uint32_t > atomic_uint32_t
atomic_uint32_t
ISO C++ entities toplevel namespace is std.
constexpr _Tp exchange(_Tp &__obj, _Up &&__new_val) noexcept(__and_< is_nothrow_move_constructible< _Tp >, is_nothrow_assignable< _Tp &, _Up > >::value)
Assign __new_val to __obj and return its previous value.
Generic atomic type, primary class template.
Class template to provide atomic operations on a non-atomic variable.
Base class for atomic integrals.