1// -*- C++ -*- header.
2
3// Copyright (C) 2008-2024 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/atomic
26 * This is a Standard C++ Library header.
27 */
28
29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
34
35#pragma GCC system_header
36
37#if __cplusplus < 201103L
38# include <bits/c++0x_warning.h>
39#else
40
41#define __glibcxx_want_atomic_is_always_lock_free
42#define __glibcxx_want_atomic_flag_test
43#define __glibcxx_want_atomic_float
44#define __glibcxx_want_atomic_ref
45#define __glibcxx_want_atomic_lock_free_type_aliases
46#define __glibcxx_want_atomic_value_initialization
47#define __glibcxx_want_atomic_wait
48#include <bits/version.h>
49
50#include <bits/atomic_base.h>
51
52namespace std _GLIBCXX_VISIBILITY(default)
53{
54_GLIBCXX_BEGIN_NAMESPACE_VERSION
55
56 /**
57 * @addtogroup atomics
58 * @{
59 */
60
61 template<typename _Tp>
62 struct atomic;
63
64 /// atomic<bool>
65 // NB: No operators or fetch-operations for this type.
66 template<>
67 struct atomic<bool>
68 {
69 using value_type = bool;
70
71 private:
72 __atomic_base<bool> _M_base;
73
74 public:
75 atomic() noexcept = default;
76 ~atomic() noexcept = default;
77 atomic(const atomic&) = delete;
78 atomic& operator=(const atomic&) = delete;
79 atomic& operator=(const atomic&) volatile = delete;
80
81 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
82
83 bool
84 operator=(bool __i) noexcept
85 { return _M_base.operator=(__i); }
86
87 bool
88 operator=(bool __i) volatile noexcept
89 { return _M_base.operator=(__i); }
90
91 operator bool() const noexcept
92 { return _M_base.load(); }
93
94 operator bool() const volatile noexcept
95 { return _M_base.load(); }
96
97 bool
98 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
99
100 bool
101 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
102
103#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
104 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
105#endif
106
107 void
108 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
109 { _M_base.store(__i, __m); }
110
111 void
112 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
113 { _M_base.store(__i, __m); }
114
115 bool
116 load(memory_order __m = memory_order_seq_cst) const noexcept
117 { return _M_base.load(__m); }
118
119 bool
120 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
121 { return _M_base.load(__m); }
122
123 bool
124 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
125 { return _M_base.exchange(__i, __m); }
126
127 bool
128 exchange(bool __i,
129 memory_order __m = memory_order_seq_cst) volatile noexcept
130 { return _M_base.exchange(__i, __m); }
131
132 bool
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
136
137 bool
138 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
139 memory_order __m2) volatile noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
141
142 bool
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
146
147 bool
148 compare_exchange_weak(bool& __i1, bool __i2,
149 memory_order __m = memory_order_seq_cst) volatile noexcept
150 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
151
152 bool
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
156
157 bool
158 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
159 memory_order __m2) volatile noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
161
162 bool
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
166
167 bool
168 compare_exchange_strong(bool& __i1, bool __i2,
169 memory_order __m = memory_order_seq_cst) volatile noexcept
170 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
171
172#if __cpp_lib_atomic_wait
173 void
174 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
175 { _M_base.wait(__old, __m); }
176
177 // TODO add const volatile overload
178
179 void
180 notify_one() noexcept
181 { _M_base.notify_one(); }
182
183 void
184 notify_all() noexcept
185 { _M_base.notify_all(); }
186#endif // __cpp_lib_atomic_wait
187 };
188
189/// @cond undocumented
190#if __cpp_lib_atomic_value_initialization
191# define _GLIBCXX20_INIT(I) = I
192#else
193# define _GLIBCXX20_INIT(I)
194#endif
195/// @endcond
196
197 /**
198 * @brief Generic atomic type, primary class template.
199 *
200 * @tparam _Tp Type to be made atomic, must be trivially copyable.
201 */
202 template<typename _Tp>
203 struct atomic
204 {
205 using value_type = _Tp;
206
207 private:
208 // Align 1/2/4/8/16-byte types to at least their size.
209 static constexpr int _S_min_alignment
210 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
211 ? 0 : sizeof(_Tp);
212
213 static constexpr int _S_alignment
214 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
215
216 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
217
218 static_assert(__is_trivially_copyable(_Tp),
219 "std::atomic requires a trivially copyable type");
220
221 static_assert(sizeof(_Tp) > 0,
222 "Incomplete or zero-sized types are not supported");
223
224#if __cplusplus > 201703L
225 static_assert(is_copy_constructible_v<_Tp>);
226 static_assert(is_move_constructible_v<_Tp>);
227 static_assert(is_copy_assignable_v<_Tp>);
228 static_assert(is_move_assignable_v<_Tp>);
229#endif
230
231 public:
232 atomic() = default;
233 ~atomic() noexcept = default;
234 atomic(const atomic&) = delete;
235 atomic& operator=(const atomic&) = delete;
236 atomic& operator=(const atomic&) volatile = delete;
237
238 constexpr atomic(_Tp __i) noexcept : _M_i(__i)
239 {
240#if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
241 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
242 __builtin_clear_padding(std::__addressof(_M_i));
243#endif
244 }
245
246 operator _Tp() const noexcept
247 { return load(); }
248
249 operator _Tp() const volatile noexcept
250 { return load(); }
251
252 _Tp
253 operator=(_Tp __i) noexcept
254 { store(__i); return __i; }
255
256 _Tp
257 operator=(_Tp __i) volatile noexcept
258 { store(__i); return __i; }
259
260 bool
261 is_lock_free() const noexcept
262 {
263 // Produce a fake, minimally aligned pointer.
264 return __atomic_is_lock_free(sizeof(_M_i),
265 reinterpret_cast<void *>(-_S_alignment));
266 }
267
268 bool
269 is_lock_free() const volatile noexcept
270 {
271 // Produce a fake, minimally aligned pointer.
272 return __atomic_is_lock_free(sizeof(_M_i),
273 reinterpret_cast<void *>(-_S_alignment));
274 }
275
276#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
277 static constexpr bool is_always_lock_free
278 = __atomic_always_lock_free(sizeof(_M_i), 0);
279#endif
280
281 void
282 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
283 {
284 __atomic_store(std::__addressof(_M_i),
285 __atomic_impl::__clear_padding(__i),
286 int(__m));
287 }
288
289 void
290 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
291 {
292 __atomic_store(std::__addressof(_M_i),
293 __atomic_impl::__clear_padding(__i),
294 int(__m));
295 }
296
297 _Tp
298 load(memory_order __m = memory_order_seq_cst) const noexcept
299 {
300 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
301 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
302 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
303 return *__ptr;
304 }
305
306 _Tp
307 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
308 {
309 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
310 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
311 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
312 return *__ptr;
313 }
314
315 _Tp
316 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
317 {
318 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
319 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
320 __atomic_exchange(std::__addressof(_M_i),
321 __atomic_impl::__clear_padding(__i),
322 __ptr, int(__m));
323 return *__ptr;
324 }
325
326 _Tp
327 exchange(_Tp __i,
328 memory_order __m = memory_order_seq_cst) volatile noexcept
329 {
330 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
331 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
332 __atomic_exchange(std::__addressof(_M_i),
333 __atomic_impl::__clear_padding(__i),
334 __ptr, int(__m));
335 return *__ptr;
336 }
337
338 bool
339 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
340 memory_order __f) noexcept
341 {
342 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
343 __s, __f);
344 }
345
346 bool
347 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
348 memory_order __f) volatile noexcept
349 {
350 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
351 __s, __f);
352 }
353
354 bool
355 compare_exchange_weak(_Tp& __e, _Tp __i,
356 memory_order __m = memory_order_seq_cst) noexcept
357 { return compare_exchange_weak(__e, __i, __m,
358 __cmpexch_failure_order(__m)); }
359
360 bool
361 compare_exchange_weak(_Tp& __e, _Tp __i,
362 memory_order __m = memory_order_seq_cst) volatile noexcept
363 { return compare_exchange_weak(__e, __i, __m,
364 __cmpexch_failure_order(__m)); }
365
366 bool
367 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
368 memory_order __f) noexcept
369 {
370 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
371 __s, __f);
372 }
373
374 bool
375 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
376 memory_order __f) volatile noexcept
377 {
378 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
379 __s, __f);
380 }
381
382 bool
383 compare_exchange_strong(_Tp& __e, _Tp __i,
384 memory_order __m = memory_order_seq_cst) noexcept
385 { return compare_exchange_strong(__e, __i, __m,
386 __cmpexch_failure_order(__m)); }
387
388 bool
389 compare_exchange_strong(_Tp& __e, _Tp __i,
390 memory_order __m = memory_order_seq_cst) volatile noexcept
391 { return compare_exchange_strong(__e, __i, __m,
392 __cmpexch_failure_order(__m)); }
393
394#if __cpp_lib_atomic_wait // C++ >= 20
395 void
396 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
397 {
398 std::__atomic_wait_address_v(&_M_i, __old,
399 [__m, this] { return this->load(__m); });
400 }
401
402 // TODO add const volatile overload
403
404 void
405 notify_one() noexcept
406 { std::__atomic_notify_address(&_M_i, false); }
407
408 void
409 notify_all() noexcept
410 { std::__atomic_notify_address(&_M_i, true); }
411#endif // __cpp_lib_atomic_wait
412
413 };
414#undef _GLIBCXX20_INIT
415
416 /// Partial specialization for pointer types.
417 template<typename _Tp>
418 struct atomic<_Tp*>
419 {
420 using value_type = _Tp*;
421 using difference_type = ptrdiff_t;
422
423 typedef _Tp* __pointer_type;
424 typedef __atomic_base<_Tp*> __base_type;
425 __base_type _M_b;
426
427 atomic() noexcept = default;
428 ~atomic() noexcept = default;
429 atomic(const atomic&) = delete;
430 atomic& operator=(const atomic&) = delete;
431 atomic& operator=(const atomic&) volatile = delete;
432
433 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
434
435 operator __pointer_type() const noexcept
436 { return __pointer_type(_M_b); }
437
438 operator __pointer_type() const volatile noexcept
439 { return __pointer_type(_M_b); }
440
441 __pointer_type
442 operator=(__pointer_type __p) noexcept
443 { return _M_b.operator=(__p); }
444
445 __pointer_type
446 operator=(__pointer_type __p) volatile noexcept
447 { return _M_b.operator=(__p); }
448
449 __pointer_type
450 operator++(int) noexcept
451 {
452#if __cplusplus >= 201703L
453 static_assert( is_object<_Tp>::value, "pointer to object type" );
454#endif
455 return _M_b++;
456 }
457
458 __pointer_type
459 operator++(int) volatile noexcept
460 {
461#if __cplusplus >= 201703L
462 static_assert( is_object<_Tp>::value, "pointer to object type" );
463#endif
464 return _M_b++;
465 }
466
467 __pointer_type
468 operator--(int) noexcept
469 {
470#if __cplusplus >= 201703L
471 static_assert( is_object<_Tp>::value, "pointer to object type" );
472#endif
473 return _M_b--;
474 }
475
476 __pointer_type
477 operator--(int) volatile noexcept
478 {
479#if __cplusplus >= 201703L
480 static_assert( is_object<_Tp>::value, "pointer to object type" );
481#endif
482 return _M_b--;
483 }
484
485 __pointer_type
486 operator++() noexcept
487 {
488#if __cplusplus >= 201703L
489 static_assert( is_object<_Tp>::value, "pointer to object type" );
490#endif
491 return ++_M_b;
492 }
493
494 __pointer_type
495 operator++() volatile noexcept
496 {
497#if __cplusplus >= 201703L
498 static_assert( is_object<_Tp>::value, "pointer to object type" );
499#endif
500 return ++_M_b;
501 }
502
503 __pointer_type
504 operator--() noexcept
505 {
506#if __cplusplus >= 201703L
507 static_assert( is_object<_Tp>::value, "pointer to object type" );
508#endif
509 return --_M_b;
510 }
511
512 __pointer_type
513 operator--() volatile noexcept
514 {
515#if __cplusplus >= 201703L
516 static_assert( is_object<_Tp>::value, "pointer to object type" );
517#endif
518 return --_M_b;
519 }
520
521 __pointer_type
522 operator+=(ptrdiff_t __d) noexcept
523 {
524#if __cplusplus >= 201703L
525 static_assert( is_object<_Tp>::value, "pointer to object type" );
526#endif
527 return _M_b.operator+=(__d);
528 }
529
530 __pointer_type
531 operator+=(ptrdiff_t __d) volatile noexcept
532 {
533#if __cplusplus >= 201703L
534 static_assert( is_object<_Tp>::value, "pointer to object type" );
535#endif
536 return _M_b.operator+=(__d);
537 }
538
539 __pointer_type
540 operator-=(ptrdiff_t __d) noexcept
541 {
542#if __cplusplus >= 201703L
543 static_assert( is_object<_Tp>::value, "pointer to object type" );
544#endif
545 return _M_b.operator-=(__d);
546 }
547
548 __pointer_type
549 operator-=(ptrdiff_t __d) volatile noexcept
550 {
551#if __cplusplus >= 201703L
552 static_assert( is_object<_Tp>::value, "pointer to object type" );
553#endif
554 return _M_b.operator-=(__d);
555 }
556
557 bool
558 is_lock_free() const noexcept
559 { return _M_b.is_lock_free(); }
560
561 bool
562 is_lock_free() const volatile noexcept
563 { return _M_b.is_lock_free(); }
564
565#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
566 static constexpr bool is_always_lock_free
567 = ATOMIC_POINTER_LOCK_FREE == 2;
568#endif
569
570 void
571 store(__pointer_type __p,
572 memory_order __m = memory_order_seq_cst) noexcept
573 { return _M_b.store(__p, __m); }
574
575 void
576 store(__pointer_type __p,
577 memory_order __m = memory_order_seq_cst) volatile noexcept
578 { return _M_b.store(__p, __m); }
579
580 __pointer_type
581 load(memory_order __m = memory_order_seq_cst) const noexcept
582 { return _M_b.load(__m); }
583
584 __pointer_type
585 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
586 { return _M_b.load(__m); }
587
588 __pointer_type
589 exchange(__pointer_type __p,
590 memory_order __m = memory_order_seq_cst) noexcept
591 { return _M_b.exchange(__p, __m); }
592
593 __pointer_type
594 exchange(__pointer_type __p,
595 memory_order __m = memory_order_seq_cst) volatile noexcept
596 { return _M_b.exchange(__p, __m); }
597
598 bool
599 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
600 memory_order __m1, memory_order __m2) noexcept
601 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
602
603 bool
604 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
605 memory_order __m1,
606 memory_order __m2) volatile noexcept
607 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
608
609 bool
610 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
611 memory_order __m = memory_order_seq_cst) noexcept
612 {
613 return compare_exchange_weak(__p1, __p2, __m,
614 __cmpexch_failure_order(__m));
615 }
616
617 bool
618 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
619 memory_order __m = memory_order_seq_cst) volatile noexcept
620 {
621 return compare_exchange_weak(__p1, __p2, __m,
622 __cmpexch_failure_order(__m));
623 }
624
625 bool
626 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
627 memory_order __m1, memory_order __m2) noexcept
628 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
629
630 bool
631 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
632 memory_order __m1,
633 memory_order __m2) volatile noexcept
634 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
635
636 bool
637 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
638 memory_order __m = memory_order_seq_cst) noexcept
639 {
640 return _M_b.compare_exchange_strong(__p1, __p2, __m,
641 __cmpexch_failure_order(__m));
642 }
643
644 bool
645 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
646 memory_order __m = memory_order_seq_cst) volatile noexcept
647 {
648 return _M_b.compare_exchange_strong(__p1, __p2, __m,
649 __cmpexch_failure_order(__m));
650 }
651
652#if __cpp_lib_atomic_wait
653 void
654 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
655 { _M_b.wait(__old, __m); }
656
657 // TODO add const volatile overload
658
659 void
660 notify_one() noexcept
661 { _M_b.notify_one(); }
662
663 void
664 notify_all() noexcept
665 { _M_b.notify_all(); }
666#endif // __cpp_lib_atomic_wait
667
668 __pointer_type
669 fetch_add(ptrdiff_t __d,
670 memory_order __m = memory_order_seq_cst) noexcept
671 {
672#if __cplusplus >= 201703L
673 static_assert( is_object<_Tp>::value, "pointer to object type" );
674#endif
675 return _M_b.fetch_add(__d, __m);
676 }
677
678 __pointer_type
679 fetch_add(ptrdiff_t __d,
680 memory_order __m = memory_order_seq_cst) volatile noexcept
681 {
682#if __cplusplus >= 201703L
683 static_assert( is_object<_Tp>::value, "pointer to object type" );
684#endif
685 return _M_b.fetch_add(__d, __m);
686 }
687
688 __pointer_type
689 fetch_sub(ptrdiff_t __d,
690 memory_order __m = memory_order_seq_cst) noexcept
691 {
692#if __cplusplus >= 201703L
693 static_assert( is_object<_Tp>::value, "pointer to object type" );
694#endif
695 return _M_b.fetch_sub(__d, __m);
696 }
697
698 __pointer_type
699 fetch_sub(ptrdiff_t __d,
700 memory_order __m = memory_order_seq_cst) volatile noexcept
701 {
702#if __cplusplus >= 201703L
703 static_assert( is_object<_Tp>::value, "pointer to object type" );
704#endif
705 return _M_b.fetch_sub(__d, __m);
706 }
707 };
708
709
710 /// Explicit specialization for char.
711 template<>
712 struct atomic<char> : __atomic_base<char>
713 {
714 typedef char __integral_type;
715 typedef __atomic_base<char> __base_type;
716
717 atomic() noexcept = default;
718 ~atomic() noexcept = default;
719 atomic(const atomic&) = delete;
720 atomic& operator=(const atomic&) = delete;
721 atomic& operator=(const atomic&) volatile = delete;
722
723 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
724
725 using __base_type::operator __integral_type;
726 using __base_type::operator=;
727
728#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
729 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
730#endif
731 };
732
733 /// Explicit specialization for signed char.
734 template<>
735 struct atomic<signed char> : __atomic_base<signed char>
736 {
737 typedef signed char __integral_type;
738 typedef __atomic_base<signed char> __base_type;
739
740 atomic() noexcept= default;
741 ~atomic() noexcept = default;
742 atomic(const atomic&) = delete;
743 atomic& operator=(const atomic&) = delete;
744 atomic& operator=(const atomic&) volatile = delete;
745
746 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
747
748 using __base_type::operator __integral_type;
749 using __base_type::operator=;
750
751#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
752 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
753#endif
754 };
755
756 /// Explicit specialization for unsigned char.
757 template<>
758 struct atomic<unsigned char> : __atomic_base<unsigned char>
759 {
760 typedef unsigned char __integral_type;
761 typedef __atomic_base<unsigned char> __base_type;
762
763 atomic() noexcept= default;
764 ~atomic() noexcept = default;
765 atomic(const atomic&) = delete;
766 atomic& operator=(const atomic&) = delete;
767 atomic& operator=(const atomic&) volatile = delete;
768
769 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
770
771 using __base_type::operator __integral_type;
772 using __base_type::operator=;
773
774#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
775 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
776#endif
777 };
778
779 /// Explicit specialization for short.
780 template<>
781 struct atomic<short> : __atomic_base<short>
782 {
783 typedef short __integral_type;
784 typedef __atomic_base<short> __base_type;
785
786 atomic() noexcept = default;
787 ~atomic() noexcept = default;
788 atomic(const atomic&) = delete;
789 atomic& operator=(const atomic&) = delete;
790 atomic& operator=(const atomic&) volatile = delete;
791
792 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
793
794 using __base_type::operator __integral_type;
795 using __base_type::operator=;
796
797#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
798 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
799#endif
800 };
801
802 /// Explicit specialization for unsigned short.
803 template<>
804 struct atomic<unsigned short> : __atomic_base<unsigned short>
805 {
806 typedef unsigned short __integral_type;
807 typedef __atomic_base<unsigned short> __base_type;
808
809 atomic() noexcept = default;
810 ~atomic() noexcept = default;
811 atomic(const atomic&) = delete;
812 atomic& operator=(const atomic&) = delete;
813 atomic& operator=(const atomic&) volatile = delete;
814
815 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
816
817 using __base_type::operator __integral_type;
818 using __base_type::operator=;
819
820#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
821 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
822#endif
823 };
824
825 /// Explicit specialization for int.
826 template<>
827 struct atomic<int> : __atomic_base<int>
828 {
829 typedef int __integral_type;
830 typedef __atomic_base<int> __base_type;
831
832 atomic() noexcept = default;
833 ~atomic() noexcept = default;
834 atomic(const atomic&) = delete;
835 atomic& operator=(const atomic&) = delete;
836 atomic& operator=(const atomic&) volatile = delete;
837
838 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
839
840 using __base_type::operator __integral_type;
841 using __base_type::operator=;
842
843#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
844 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
845#endif
846 };
847
848 /// Explicit specialization for unsigned int.
849 template<>
850 struct atomic<unsigned int> : __atomic_base<unsigned int>
851 {
852 typedef unsigned int __integral_type;
853 typedef __atomic_base<unsigned int> __base_type;
854
855 atomic() noexcept = default;
856 ~atomic() noexcept = default;
857 atomic(const atomic&) = delete;
858 atomic& operator=(const atomic&) = delete;
859 atomic& operator=(const atomic&) volatile = delete;
860
861 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
862
863 using __base_type::operator __integral_type;
864 using __base_type::operator=;
865
866#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
867 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
868#endif
869 };
870
871 /// Explicit specialization for long.
872 template<>
873 struct atomic<long> : __atomic_base<long>
874 {
875 typedef long __integral_type;
876 typedef __atomic_base<long> __base_type;
877
878 atomic() noexcept = default;
879 ~atomic() noexcept = default;
880 atomic(const atomic&) = delete;
881 atomic& operator=(const atomic&) = delete;
882 atomic& operator=(const atomic&) volatile = delete;
883
884 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
885
886 using __base_type::operator __integral_type;
887 using __base_type::operator=;
888
889#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
890 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
891#endif
892 };
893
894 /// Explicit specialization for unsigned long.
895 template<>
896 struct atomic<unsigned long> : __atomic_base<unsigned long>
897 {
898 typedef unsigned long __integral_type;
899 typedef __atomic_base<unsigned long> __base_type;
900
901 atomic() noexcept = default;
902 ~atomic() noexcept = default;
903 atomic(const atomic&) = delete;
904 atomic& operator=(const atomic&) = delete;
905 atomic& operator=(const atomic&) volatile = delete;
906
907 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
908
909 using __base_type::operator __integral_type;
910 using __base_type::operator=;
911
912#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
913 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
914#endif
915 };
916
917 /// Explicit specialization for long long.
918 template<>
919 struct atomic<long long> : __atomic_base<long long>
920 {
921 typedef long long __integral_type;
922 typedef __atomic_base<long long> __base_type;
923
924 atomic() noexcept = default;
925 ~atomic() noexcept = default;
926 atomic(const atomic&) = delete;
927 atomic& operator=(const atomic&) = delete;
928 atomic& operator=(const atomic&) volatile = delete;
929
930 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
931
932 using __base_type::operator __integral_type;
933 using __base_type::operator=;
934
935#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
936 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
937#endif
938 };
939
940 /// Explicit specialization for unsigned long long.
941 template<>
942 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
943 {
944 typedef unsigned long long __integral_type;
945 typedef __atomic_base<unsigned long long> __base_type;
946
947 atomic() noexcept = default;
948 ~atomic() noexcept = default;
949 atomic(const atomic&) = delete;
950 atomic& operator=(const atomic&) = delete;
951 atomic& operator=(const atomic&) volatile = delete;
952
953 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
954
955 using __base_type::operator __integral_type;
956 using __base_type::operator=;
957
958#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
959 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
960#endif
961 };
962
963 /// Explicit specialization for wchar_t.
964 template<>
965 struct atomic<wchar_t> : __atomic_base<wchar_t>
966 {
967 typedef wchar_t __integral_type;
968 typedef __atomic_base<wchar_t> __base_type;
969
970 atomic() noexcept = default;
971 ~atomic() noexcept = default;
972 atomic(const atomic&) = delete;
973 atomic& operator=(const atomic&) = delete;
974 atomic& operator=(const atomic&) volatile = delete;
975
976 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
977
978 using __base_type::operator __integral_type;
979 using __base_type::operator=;
980
981#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
982 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
983#endif
984 };
985
986#ifdef _GLIBCXX_USE_CHAR8_T
987 /// Explicit specialization for char8_t.
988 template<>
989 struct atomic<char8_t> : __atomic_base<char8_t>
990 {
991 typedef char8_t __integral_type;
992 typedef __atomic_base<char8_t> __base_type;
993
994 atomic() noexcept = default;
995 ~atomic() noexcept = default;
996 atomic(const atomic&) = delete;
997 atomic& operator=(const atomic&) = delete;
998 atomic& operator=(const atomic&) volatile = delete;
999
1000 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1001
1002 using __base_type::operator __integral_type;
1003 using __base_type::operator=;
1004
1005#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1006 static constexpr bool is_always_lock_free
1007 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1008#endif
1009 };
1010#endif
1011
1012 /// Explicit specialization for char16_t.
1013 template<>
1014 struct atomic<char16_t> : __atomic_base<char16_t>
1015 {
1016 typedef char16_t __integral_type;
1017 typedef __atomic_base<char16_t> __base_type;
1018
1019 atomic() noexcept = default;
1020 ~atomic() noexcept = default;
1021 atomic(const atomic&) = delete;
1022 atomic& operator=(const atomic&) = delete;
1023 atomic& operator=(const atomic&) volatile = delete;
1024
1025 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1026
1027 using __base_type::operator __integral_type;
1028 using __base_type::operator=;
1029
1030#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1031 static constexpr bool is_always_lock_free
1032 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1033#endif
1034 };
1035
1036 /// Explicit specialization for char32_t.
1037 template<>
1038 struct atomic<char32_t> : __atomic_base<char32_t>
1039 {
1040 typedef char32_t __integral_type;
1041 typedef __atomic_base<char32_t> __base_type;
1042
1043 atomic() noexcept = default;
1044 ~atomic() noexcept = default;
1045 atomic(const atomic&) = delete;
1046 atomic& operator=(const atomic&) = delete;
1047 atomic& operator=(const atomic&) volatile = delete;
1048
1049 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1050
1051 using __base_type::operator __integral_type;
1052 using __base_type::operator=;
1053
1054#ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1055 static constexpr bool is_always_lock_free
1056 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1057#endif
1058 };
1059
1060
1061 /// atomic_bool
1062 typedef atomic<bool> atomic_bool;
1063
1064 /// atomic_char
1065 typedef atomic<char> atomic_char;
1066
1067 /// atomic_schar
1068 typedef atomic<signed char> atomic_schar;
1069
1070 /// atomic_uchar
1071 typedef atomic<unsigned char> atomic_uchar;
1072
1073 /// atomic_short
1074 typedef atomic<short> atomic_short;
1075
1076 /// atomic_ushort
1077 typedef atomic<unsigned short> atomic_ushort;
1078
1079 /// atomic_int
1080 typedef atomic<int> atomic_int;
1081
1082 /// atomic_uint
1083 typedef atomic<unsigned int> atomic_uint;
1084
1085 /// atomic_long
1086 typedef atomic<long> atomic_long;
1087
1088 /// atomic_ulong
1089 typedef atomic<unsigned long> atomic_ulong;
1090
1091 /// atomic_llong
1092 typedef atomic<long long> atomic_llong;
1093
1094 /// atomic_ullong
1095 typedef atomic<unsigned long long> atomic_ullong;
1096
1097 /// atomic_wchar_t
1098 typedef atomic<wchar_t> atomic_wchar_t;
1099
1100#ifdef _GLIBCXX_USE_CHAR8_T
1101 /// atomic_char8_t
1102 typedef atomic<char8_t> atomic_char8_t;
1103#endif
1104
1105 /// atomic_char16_t
1106 typedef atomic<char16_t> atomic_char16_t;
1107
1108 /// atomic_char32_t
1109 typedef atomic<char32_t> atomic_char32_t;
1110
1111#ifdef _GLIBCXX_USE_C99_STDINT
1112 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1113 // 2441. Exact-width atomic typedefs should be provided
1114
1115 /// atomic_int8_t
1116 typedef atomic<int8_t> atomic_int8_t;
1117
1118 /// atomic_uint8_t
1119 typedef atomic<uint8_t> atomic_uint8_t;
1120
1121 /// atomic_int16_t
1122 typedef atomic<int16_t> atomic_int16_t;
1123
1124 /// atomic_uint16_t
1125 typedef atomic<uint16_t> atomic_uint16_t;
1126
1127 /// atomic_int32_t
1128 typedef atomic<int32_t> atomic_int32_t;
1129
1130 /// atomic_uint32_t
1131 typedef atomic<uint32_t> atomic_uint32_t;
1132
1133 /// atomic_int64_t
1134 typedef atomic<int64_t> atomic_int64_t;
1135
1136 /// atomic_uint64_t
1137 typedef atomic<uint64_t> atomic_uint64_t;
1138#endif
1139
1140 /// atomic_int_least8_t
1141 typedef atomic<int_least8_t> atomic_int_least8_t;
1142
1143 /// atomic_uint_least8_t
1144 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1145
1146 /// atomic_int_least16_t
1147 typedef atomic<int_least16_t> atomic_int_least16_t;
1148
1149 /// atomic_uint_least16_t
1150 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1151
1152 /// atomic_int_least32_t
1153 typedef atomic<int_least32_t> atomic_int_least32_t;
1154
1155 /// atomic_uint_least32_t
1156 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1157
1158 /// atomic_int_least64_t
1159 typedef atomic<int_least64_t> atomic_int_least64_t;
1160
1161 /// atomic_uint_least64_t
1162 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1163
1164
1165 /// atomic_int_fast8_t
1166 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1167
1168 /// atomic_uint_fast8_t
1169 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1170
1171 /// atomic_int_fast16_t
1172 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1173
1174 /// atomic_uint_fast16_t
1175 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1176
1177 /// atomic_int_fast32_t
1178 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1179
1180 /// atomic_uint_fast32_t
1181 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1182
1183 /// atomic_int_fast64_t
1184 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1185
1186 /// atomic_uint_fast64_t
1187 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1188
1189
1190 /// atomic_intptr_t
1191 typedef atomic<intptr_t> atomic_intptr_t;
1192
1193 /// atomic_uintptr_t
1194 typedef atomic<uintptr_t> atomic_uintptr_t;
1195
1196 /// atomic_size_t
1197 typedef atomic<size_t> atomic_size_t;
1198
1199 /// atomic_ptrdiff_t
1200 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1201
1202 /// atomic_intmax_t
1203 typedef atomic<intmax_t> atomic_intmax_t;
1204
1205 /// atomic_uintmax_t
1206 typedef atomic<uintmax_t> atomic_uintmax_t;
1207
1208 // Function definitions, atomic_flag operations.
1209 inline bool
1210 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1211 memory_order __m) noexcept
1212 { return __a->test_and_set(__m); }
1213
1214 inline bool
1215 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1216 memory_order __m) noexcept
1217 { return __a->test_and_set(__m); }
1218
1219#if __cpp_lib_atomic_flag_test
1220 inline bool
1221 atomic_flag_test(const atomic_flag* __a) noexcept
1222 { return __a->test(); }
1223
1224 inline bool
1225 atomic_flag_test(const volatile atomic_flag* __a) noexcept
1226 { return __a->test(); }
1227
1228 inline bool
1229 atomic_flag_test_explicit(const atomic_flag* __a,
1230 memory_order __m) noexcept
1231 { return __a->test(__m); }
1232
1233 inline bool
1234 atomic_flag_test_explicit(const volatile atomic_flag* __a,
1235 memory_order __m) noexcept
1236 { return __a->test(__m); }
1237#endif
1238
1239 inline void
1240 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1241 { __a->clear(__m); }
1242
1243 inline void
1244 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1245 memory_order __m) noexcept
1246 { __a->clear(__m); }
1247
1248 inline bool
1249 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1250 { return atomic_flag_test_and_set_explicit(__a, m: memory_order_seq_cst); }
1251
1252 inline bool
1253 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1254 { return atomic_flag_test_and_set_explicit(__a, m: memory_order_seq_cst); }
1255
1256 inline void
1257 atomic_flag_clear(atomic_flag* __a) noexcept
1258 { atomic_flag_clear_explicit(__a, m: memory_order_seq_cst); }
1259
1260 inline void
1261 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1262 { atomic_flag_clear_explicit(__a, m: memory_order_seq_cst); }
1263
1264#if __cpp_lib_atomic_wait
1265 inline void
1266 atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1267 { __a->wait(__old); }
1268
1269 inline void
1270 atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1271 memory_order __m) noexcept
1272 { __a->wait(__old, __m); }
1273
1274 inline void
1275 atomic_flag_notify_one(atomic_flag* __a) noexcept
1276 { __a->notify_one(); }
1277
1278 inline void
1279 atomic_flag_notify_all(atomic_flag* __a) noexcept
1280 { __a->notify_all(); }
1281#endif // __cpp_lib_atomic_wait
1282
1283 /// @cond undocumented
1284 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1285 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1286 template<typename _Tp>
1287 using __atomic_val_t = __type_identity_t<_Tp>;
1288 template<typename _Tp>
1289 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1290 /// @endcond
1291
1292 // [atomics.nonmembers] Non-member functions.
1293 // Function templates generally applicable to atomic types.
1294 template<typename _ITp>
1295 inline bool
1296 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1297 { return __a->is_lock_free(); }
1298
1299 template<typename _ITp>
1300 inline bool
1301 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1302 { return __a->is_lock_free(); }
1303
1304 template<typename _ITp>
1305 inline void
1306 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1307 { __a->store(__i, memory_order_relaxed); }
1308
1309 template<typename _ITp>
1310 inline void
1311 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1312 { __a->store(__i, memory_order_relaxed); }
1313
1314 template<typename _ITp>
1315 inline void
1316 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1317 memory_order __m) noexcept
1318 { __a->store(__i, __m); }
1319
1320 template<typename _ITp>
1321 inline void
1322 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1323 memory_order __m) noexcept
1324 { __a->store(__i, __m); }
1325
1326 template<typename _ITp>
1327 inline _ITp
1328 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1329 { return __a->load(__m); }
1330
1331 template<typename _ITp>
1332 inline _ITp
1333 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1334 memory_order __m) noexcept
1335 { return __a->load(__m); }
1336
1337 template<typename _ITp>
1338 inline _ITp
1339 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1340 memory_order __m) noexcept
1341 { return __a->exchange(__i, __m); }
1342
1343 template<typename _ITp>
1344 inline _ITp
1345 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1346 __atomic_val_t<_ITp> __i,
1347 memory_order __m) noexcept
1348 { return __a->exchange(__i, __m); }
1349
1350 template<typename _ITp>
1351 inline bool
1352 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1353 __atomic_val_t<_ITp>* __i1,
1354 __atomic_val_t<_ITp> __i2,
1355 memory_order __m1,
1356 memory_order __m2) noexcept
1357 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1358
1359 template<typename _ITp>
1360 inline bool
1361 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1362 __atomic_val_t<_ITp>* __i1,
1363 __atomic_val_t<_ITp> __i2,
1364 memory_order __m1,
1365 memory_order __m2) noexcept
1366 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1367
1368 template<typename _ITp>
1369 inline bool
1370 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1371 __atomic_val_t<_ITp>* __i1,
1372 __atomic_val_t<_ITp> __i2,
1373 memory_order __m1,
1374 memory_order __m2) noexcept
1375 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1376
1377 template<typename _ITp>
1378 inline bool
1379 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1380 __atomic_val_t<_ITp>* __i1,
1381 __atomic_val_t<_ITp> __i2,
1382 memory_order __m1,
1383 memory_order __m2) noexcept
1384 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1385
1386
1387 template<typename _ITp>
1388 inline void
1389 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1390 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1391
1392 template<typename _ITp>
1393 inline void
1394 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1395 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1396
1397 template<typename _ITp>
1398 inline _ITp
1399 atomic_load(const atomic<_ITp>* __a) noexcept
1400 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1401
1402 template<typename _ITp>
1403 inline _ITp
1404 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1405 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1406
1407 template<typename _ITp>
1408 inline _ITp
1409 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1410 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1411
1412 template<typename _ITp>
1413 inline _ITp
1414 atomic_exchange(volatile atomic<_ITp>* __a,
1415 __atomic_val_t<_ITp> __i) noexcept
1416 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1417
1418 template<typename _ITp>
1419 inline bool
1420 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1421 __atomic_val_t<_ITp>* __i1,
1422 __atomic_val_t<_ITp> __i2) noexcept
1423 {
1424 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1425 memory_order_seq_cst,
1426 memory_order_seq_cst);
1427 }
1428
1429 template<typename _ITp>
1430 inline bool
1431 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1432 __atomic_val_t<_ITp>* __i1,
1433 __atomic_val_t<_ITp> __i2) noexcept
1434 {
1435 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1436 memory_order_seq_cst,
1437 memory_order_seq_cst);
1438 }
1439
1440 template<typename _ITp>
1441 inline bool
1442 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1443 __atomic_val_t<_ITp>* __i1,
1444 __atomic_val_t<_ITp> __i2) noexcept
1445 {
1446 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1447 memory_order_seq_cst,
1448 memory_order_seq_cst);
1449 }
1450
1451 template<typename _ITp>
1452 inline bool
1453 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1454 __atomic_val_t<_ITp>* __i1,
1455 __atomic_val_t<_ITp> __i2) noexcept
1456 {
1457 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1458 memory_order_seq_cst,
1459 memory_order_seq_cst);
1460 }
1461
1462
1463#if __cpp_lib_atomic_wait
1464 template<typename _Tp>
1465 inline void
1466 atomic_wait(const atomic<_Tp>* __a,
1467 typename std::atomic<_Tp>::value_type __old) noexcept
1468 { __a->wait(__old); }
1469
1470 template<typename _Tp>
1471 inline void
1472 atomic_wait_explicit(const atomic<_Tp>* __a,
1473 typename std::atomic<_Tp>::value_type __old,
1474 std::memory_order __m) noexcept
1475 { __a->wait(__old, __m); }
1476
1477 template<typename _Tp>
1478 inline void
1479 atomic_notify_one(atomic<_Tp>* __a) noexcept
1480 { __a->notify_one(); }
1481
1482 template<typename _Tp>
1483 inline void
1484 atomic_notify_all(atomic<_Tp>* __a) noexcept
1485 { __a->notify_all(); }
1486#endif // __cpp_lib_atomic_wait
1487
1488 // Function templates for atomic_integral and atomic_pointer operations only.
1489 // Some operations (and, or, xor) are only available for atomic integrals,
1490 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1491
1492 template<typename _ITp>
1493 inline _ITp
1494 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1495 __atomic_diff_t<_ITp> __i,
1496 memory_order __m) noexcept
1497 { return __a->fetch_add(__i, __m); }
1498
1499 template<typename _ITp>
1500 inline _ITp
1501 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1502 __atomic_diff_t<_ITp> __i,
1503 memory_order __m) noexcept
1504 { return __a->fetch_add(__i, __m); }
1505
1506 template<typename _ITp>
1507 inline _ITp
1508 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1509 __atomic_diff_t<_ITp> __i,
1510 memory_order __m) noexcept
1511 { return __a->fetch_sub(__i, __m); }
1512
1513 template<typename _ITp>
1514 inline _ITp
1515 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1516 __atomic_diff_t<_ITp> __i,
1517 memory_order __m) noexcept
1518 { return __a->fetch_sub(__i, __m); }
1519
1520 template<typename _ITp>
1521 inline _ITp
1522 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1523 __atomic_val_t<_ITp> __i,
1524 memory_order __m) noexcept
1525 { return __a->fetch_and(__i, __m); }
1526
1527 template<typename _ITp>
1528 inline _ITp
1529 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1530 __atomic_val_t<_ITp> __i,
1531 memory_order __m) noexcept
1532 { return __a->fetch_and(__i, __m); }
1533
1534 template<typename _ITp>
1535 inline _ITp
1536 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1537 __atomic_val_t<_ITp> __i,
1538 memory_order __m) noexcept
1539 { return __a->fetch_or(__i, __m); }
1540
1541 template<typename _ITp>
1542 inline _ITp
1543 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1544 __atomic_val_t<_ITp> __i,
1545 memory_order __m) noexcept
1546 { return __a->fetch_or(__i, __m); }
1547
1548 template<typename _ITp>
1549 inline _ITp
1550 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1551 __atomic_val_t<_ITp> __i,
1552 memory_order __m) noexcept
1553 { return __a->fetch_xor(__i, __m); }
1554
1555 template<typename _ITp>
1556 inline _ITp
1557 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1558 __atomic_val_t<_ITp> __i,
1559 memory_order __m) noexcept
1560 { return __a->fetch_xor(__i, __m); }
1561
1562 template<typename _ITp>
1563 inline _ITp
1564 atomic_fetch_add(atomic<_ITp>* __a,
1565 __atomic_diff_t<_ITp> __i) noexcept
1566 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1567
1568 template<typename _ITp>
1569 inline _ITp
1570 atomic_fetch_add(volatile atomic<_ITp>* __a,
1571 __atomic_diff_t<_ITp> __i) noexcept
1572 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1573
1574 template<typename _ITp>
1575 inline _ITp
1576 atomic_fetch_sub(atomic<_ITp>* __a,
1577 __atomic_diff_t<_ITp> __i) noexcept
1578 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1579
1580 template<typename _ITp>
1581 inline _ITp
1582 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1583 __atomic_diff_t<_ITp> __i) noexcept
1584 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1585
1586 template<typename _ITp>
1587 inline _ITp
1588 atomic_fetch_and(__atomic_base<_ITp>* __a,
1589 __atomic_val_t<_ITp> __i) noexcept
1590 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1591
1592 template<typename _ITp>
1593 inline _ITp
1594 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1595 __atomic_val_t<_ITp> __i) noexcept
1596 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1597
1598 template<typename _ITp>
1599 inline _ITp
1600 atomic_fetch_or(__atomic_base<_ITp>* __a,
1601 __atomic_val_t<_ITp> __i) noexcept
1602 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1603
1604 template<typename _ITp>
1605 inline _ITp
1606 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1607 __atomic_val_t<_ITp> __i) noexcept
1608 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1609
1610 template<typename _ITp>
1611 inline _ITp
1612 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1613 __atomic_val_t<_ITp> __i) noexcept
1614 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1615
1616 template<typename _ITp>
1617 inline _ITp
1618 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1619 __atomic_val_t<_ITp> __i) noexcept
1620 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1621
1622#ifdef __cpp_lib_atomic_float
1623 template<>
1624 struct atomic<float> : __atomic_float<float>
1625 {
1626 atomic() noexcept = default;
1627
1628 constexpr
1629 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1630 { }
1631
1632 atomic& operator=(const atomic&) volatile = delete;
1633 atomic& operator=(const atomic&) = delete;
1634
1635 using __atomic_float<float>::operator=;
1636 };
1637
1638 template<>
1639 struct atomic<double> : __atomic_float<double>
1640 {
1641 atomic() noexcept = default;
1642
1643 constexpr
1644 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1645 { }
1646
1647 atomic& operator=(const atomic&) volatile = delete;
1648 atomic& operator=(const atomic&) = delete;
1649
1650 using __atomic_float<double>::operator=;
1651 };
1652
1653 template<>
1654 struct atomic<long double> : __atomic_float<long double>
1655 {
1656 atomic() noexcept = default;
1657
1658 constexpr
1659 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1660 { }
1661
1662 atomic& operator=(const atomic&) volatile = delete;
1663 atomic& operator=(const atomic&) = delete;
1664
1665 using __atomic_float<long double>::operator=;
1666 };
1667
1668#ifdef __STDCPP_FLOAT16_T__
1669 template<>
1670 struct atomic<_Float16> : __atomic_float<_Float16>
1671 {
1672 atomic() noexcept = default;
1673
1674 constexpr
1675 atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
1676 { }
1677
1678 atomic& operator=(const atomic&) volatile = delete;
1679 atomic& operator=(const atomic&) = delete;
1680
1681 using __atomic_float<_Float16>::operator=;
1682 };
1683#endif
1684
1685#ifdef __STDCPP_FLOAT32_T__
1686 template<>
1687 struct atomic<_Float32> : __atomic_float<_Float32>
1688 {
1689 atomic() noexcept = default;
1690
1691 constexpr
1692 atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
1693 { }
1694
1695 atomic& operator=(const atomic&) volatile = delete;
1696 atomic& operator=(const atomic&) = delete;
1697
1698 using __atomic_float<_Float32>::operator=;
1699 };
1700#endif
1701
1702#ifdef __STDCPP_FLOAT64_T__
1703 template<>
1704 struct atomic<_Float64> : __atomic_float<_Float64>
1705 {
1706 atomic() noexcept = default;
1707
1708 constexpr
1709 atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
1710 { }
1711
1712 atomic& operator=(const atomic&) volatile = delete;
1713 atomic& operator=(const atomic&) = delete;
1714
1715 using __atomic_float<_Float64>::operator=;
1716 };
1717#endif
1718
1719#ifdef __STDCPP_FLOAT128_T__
1720 template<>
1721 struct atomic<_Float128> : __atomic_float<_Float128>
1722 {
1723 atomic() noexcept = default;
1724
1725 constexpr
1726 atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
1727 { }
1728
1729 atomic& operator=(const atomic&) volatile = delete;
1730 atomic& operator=(const atomic&) = delete;
1731
1732 using __atomic_float<_Float128>::operator=;
1733 };
1734#endif
1735
1736#ifdef __STDCPP_BFLOAT16_T__
1737 template<>
1738 struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
1739 {
1740 atomic() noexcept = default;
1741
1742 constexpr
1743 atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
1744 { }
1745
1746 atomic& operator=(const atomic&) volatile = delete;
1747 atomic& operator=(const atomic&) = delete;
1748
1749 using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
1750 };
1751#endif
1752#endif // __cpp_lib_atomic_float
1753
1754#ifdef __cpp_lib_atomic_ref
1755 /// Class template to provide atomic operations on a non-atomic variable.
1756 template<typename _Tp>
1757 struct atomic_ref : __atomic_ref<_Tp>
1758 {
1759 explicit
1760 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1761 { }
1762
1763 atomic_ref& operator=(const atomic_ref&) = delete;
1764
1765 atomic_ref(const atomic_ref&) = default;
1766
1767 using __atomic_ref<_Tp>::operator=;
1768 };
1769#endif // __cpp_lib_atomic_ref
1770
1771#ifdef __cpp_lib_atomic_lock_free_type_aliases
1772# ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
1773 using atomic_signed_lock_free
1774 = atomic<make_signed_t<__detail::__platform_wait_t>>;
1775 using atomic_unsigned_lock_free
1776 = atomic<make_unsigned_t<__detail::__platform_wait_t>>;
1777# elif ATOMIC_INT_LOCK_FREE == 2
1778 using atomic_signed_lock_free = atomic<signed int>;
1779 using atomic_unsigned_lock_free = atomic<unsigned int>;
1780# elif ATOMIC_LONG_LOCK_FREE == 2
1781 using atomic_signed_lock_free = atomic<signed long>;
1782 using atomic_unsigned_lock_free = atomic<unsigned long>;
1783# elif ATOMIC_CHAR_LOCK_FREE == 2
1784 using atomic_signed_lock_free = atomic<signed char>;
1785 using atomic_unsigned_lock_free = atomic<unsigned char>;
1786# else
1787# error "libstdc++ bug: no lock-free atomics but they were emitted in <version>"
1788# endif
1789#endif
1790
1791 /// @} group atomics
1792
1793_GLIBCXX_END_NAMESPACE_VERSION
1794} // namespace
1795
1796#endif // C++11
1797
1798#endif // _GLIBCXX_ATOMIC
1799