libstdc++
atomic_base.h
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008-2013 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file bits/atomic_base.h
26  * This is an internal header file, included by other library headers.
27  * Do not attempt to use it directly. @headername{atomic}
28  */
29 
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
32 
33 #pragma GCC system_header
34 
35 #include <bits/c++config.h>
36 #include <stdbool.h>
37 #include <stdint.h>
39 
40 namespace std _GLIBCXX_VISIBILITY(default)
41 {
42 _GLIBCXX_BEGIN_NAMESPACE_VERSION
43 
44  /**
45  * @defgroup atomics Atomics
46  *
47  * Components for performing atomic operations.
48  * @{
49  */
50 
51  /// Enumeration for memory_order
52  typedef enum memory_order
53  {
54  memory_order_relaxed,
55  memory_order_consume,
56  memory_order_acquire,
57  memory_order_release,
58  memory_order_acq_rel,
59  memory_order_seq_cst
60  } memory_order;
61 
62  enum __memory_order_modifier
63  {
64  __memory_order_mask = 0x0ffff,
65  __memory_order_modifier_mask = 0xffff0000,
66  __memory_order_hle_acquire = 0x10000,
67  __memory_order_hle_release = 0x20000
68  };
69 
70  constexpr memory_order
71  operator|(memory_order __m, __memory_order_modifier __mod)
72  {
73  return memory_order(__m | int(__mod));
74  }
75 
76  constexpr memory_order
77  operator&(memory_order __m, __memory_order_modifier __mod)
78  {
79  return memory_order(__m & int(__mod));
80  }
81 
82  // Drop release ordering as per [atomics.types.operations.req]/21
83  constexpr memory_order
84  __cmpexch_failure_order2(memory_order __m) noexcept
85  {
86  return __m == memory_order_acq_rel ? memory_order_acquire
87  : __m == memory_order_release ? memory_order_relaxed : __m;
88  }
89 
90  constexpr memory_order
91  __cmpexch_failure_order(memory_order __m) noexcept
92  {
93  return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
94  | (__m & __memory_order_modifier_mask));
95  }
96 
97  inline void
98  atomic_thread_fence(memory_order __m) noexcept
99  { __atomic_thread_fence(__m); }
100 
101  inline void
102  atomic_signal_fence(memory_order __m) noexcept
103  { __atomic_signal_fence(__m); }
104 
105  /// kill_dependency
106  template<typename _Tp>
107  inline _Tp
108  kill_dependency(_Tp __y) noexcept
109  {
110  _Tp __ret(__y);
111  return __ret;
112  }
113 
114 
115  // Base types for atomics.
116  template<typename _IntTp>
118 
119  /// atomic_char
121 
122  /// atomic_schar
124 
125  /// atomic_uchar
127 
128  /// atomic_short
130 
131  /// atomic_ushort
133 
134  /// atomic_int
136 
137  /// atomic_uint
139 
140  /// atomic_long
142 
143  /// atomic_ulong
145 
146  /// atomic_llong
148 
149  /// atomic_ullong
151 
152  /// atomic_wchar_t
154 
155  /// atomic_char16_t
157 
158  /// atomic_char32_t
160 
161  /// atomic_char32_t
163 
164 
165  /// atomic_int_least8_t
167 
168  /// atomic_uint_least8_t
170 
171  /// atomic_int_least16_t
173 
174  /// atomic_uint_least16_t
176 
177  /// atomic_int_least32_t
179 
180  /// atomic_uint_least32_t
182 
183  /// atomic_int_least64_t
185 
186  /// atomic_uint_least64_t
188 
189 
190  /// atomic_int_fast8_t
192 
193  /// atomic_uint_fast8_t
195 
196  /// atomic_int_fast16_t
198 
199  /// atomic_uint_fast16_t
201 
202  /// atomic_int_fast32_t
204 
205  /// atomic_uint_fast32_t
207 
208  /// atomic_int_fast64_t
210 
211  /// atomic_uint_fast64_t
213 
214 
215  /// atomic_intptr_t
217 
218  /// atomic_uintptr_t
220 
221  /// atomic_size_t
223 
224  /// atomic_intmax_t
226 
227  /// atomic_uintmax_t
229 
230  /// atomic_ptrdiff_t
232 
233 
234 #define ATOMIC_VAR_INIT(_VI) { _VI }
235 
236  template<typename _Tp>
237  struct atomic;
238 
239  template<typename _Tp>
240  struct atomic<_Tp*>;
241 
242 
243  /**
244  * @brief Base type for atomic_flag.
245  *
246  * Base type is POD with data, allowing atomic_flag to derive from
247  * it and meet the standard layout type requirement. In addition to
248  * compatibilty with a C interface, this allows different
249  * implementations of atomic_flag to use the same atomic operation
250  * functions, via a standard conversion to the __atomic_flag_base
251  * argument.
252  */
253  _GLIBCXX_BEGIN_EXTERN_C
254 
256  {
257  /* The target's "set" value for test-and-set may not be exactly 1. */
258 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
259  bool _M_i;
260 #else
261  unsigned char _M_i;
262 #endif
263  };
264 
265  _GLIBCXX_END_EXTERN_C
266 
267 #define ATOMIC_FLAG_INIT { 0 }
268 
269  /// atomic_flag
271  {
272  atomic_flag() noexcept = default;
273  ~atomic_flag() noexcept = default;
274  atomic_flag(const atomic_flag&) = delete;
275  atomic_flag& operator=(const atomic_flag&) = delete;
276  atomic_flag& operator=(const atomic_flag&) volatile = delete;
277 
278  // Conversion to ATOMIC_FLAG_INIT.
279  constexpr atomic_flag(bool __i) noexcept
280  : __atomic_flag_base({ __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0 })
281  { }
282 
283  bool
284  test_and_set(memory_order __m = memory_order_seq_cst) noexcept
285  {
286  return __atomic_test_and_set (&_M_i, __m);
287  }
288 
289  bool
290  test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
291  {
292  return __atomic_test_and_set (&_M_i, __m);
293  }
294 
295  void
296  clear(memory_order __m = memory_order_seq_cst) noexcept
297  {
298  memory_order __b = __m & __memory_order_mask;
299  __glibcxx_assert(__b != memory_order_consume);
300  __glibcxx_assert(__b != memory_order_acquire);
301  __glibcxx_assert(__b != memory_order_acq_rel);
302 
303  __atomic_clear (&_M_i, __m);
304  }
305 
306  void
307  clear(memory_order __m = memory_order_seq_cst) volatile noexcept
308  {
309  memory_order __b = __m & __memory_order_mask;
310  __glibcxx_assert(__b != memory_order_consume);
311  __glibcxx_assert(__b != memory_order_acquire);
312  __glibcxx_assert(__b != memory_order_acq_rel);
313 
314  __atomic_clear (&_M_i, __m);
315  }
316  };
317 
318 
319  /// Base class for atomic integrals.
320  //
321  // For each of the integral types, define atomic_[integral type] struct
322  //
323  // atomic_bool bool
324  // atomic_char char
325  // atomic_schar signed char
326  // atomic_uchar unsigned char
327  // atomic_short short
328  // atomic_ushort unsigned short
329  // atomic_int int
330  // atomic_uint unsigned int
331  // atomic_long long
332  // atomic_ulong unsigned long
333  // atomic_llong long long
334  // atomic_ullong unsigned long long
335  // atomic_char16_t char16_t
336  // atomic_char32_t char32_t
337  // atomic_wchar_t wchar_t
338  //
339  // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
340  // 8 bytes, since that is what GCC built-in functions for atomic
341  // memory access expect.
342  template<typename _ITp>
343  struct __atomic_base
344  {
345  private:
346  typedef _ITp __int_type;
347 
348  __int_type _M_i;
349 
350  public:
351  __atomic_base() noexcept = default;
352  ~__atomic_base() noexcept = default;
353  __atomic_base(const __atomic_base&) = delete;
354  __atomic_base& operator=(const __atomic_base&) = delete;
355  __atomic_base& operator=(const __atomic_base&) volatile = delete;
356 
357  // Requires __int_type convertible to _M_i.
358  constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
359 
360  operator __int_type() const noexcept
361  { return load(); }
362 
363  operator __int_type() const volatile noexcept
364  { return load(); }
365 
366  __int_type
367  operator=(__int_type __i) noexcept
368  {
369  store(__i);
370  return __i;
371  }
372 
373  __int_type
374  operator=(__int_type __i) volatile noexcept
375  {
376  store(__i);
377  return __i;
378  }
379 
380  __int_type
381  operator++(int) noexcept
382  { return fetch_add(1); }
383 
384  __int_type
385  operator++(int) volatile noexcept
386  { return fetch_add(1); }
387 
388  __int_type
389  operator--(int) noexcept
390  { return fetch_sub(1); }
391 
392  __int_type
393  operator--(int) volatile noexcept
394  { return fetch_sub(1); }
395 
396  __int_type
397  operator++() noexcept
398  { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
399 
400  __int_type
401  operator++() volatile noexcept
402  { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
403 
404  __int_type
405  operator--() noexcept
406  { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
407 
408  __int_type
409  operator--() volatile noexcept
410  { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
411 
412  __int_type
413  operator+=(__int_type __i) noexcept
414  { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
415 
416  __int_type
417  operator+=(__int_type __i) volatile noexcept
418  { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
419 
420  __int_type
421  operator-=(__int_type __i) noexcept
422  { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
423 
424  __int_type
425  operator-=(__int_type __i) volatile noexcept
426  { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
427 
428  __int_type
429  operator&=(__int_type __i) noexcept
430  { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
431 
432  __int_type
433  operator&=(__int_type __i) volatile noexcept
434  { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
435 
436  __int_type
437  operator|=(__int_type __i) noexcept
438  { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
439 
440  __int_type
441  operator|=(__int_type __i) volatile noexcept
442  { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
443 
444  __int_type
445  operator^=(__int_type __i) noexcept
446  { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
447 
448  __int_type
449  operator^=(__int_type __i) volatile noexcept
450  { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
451 
452  bool
453  is_lock_free() const noexcept
454  { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
455 
456  bool
457  is_lock_free() const volatile noexcept
458  { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
459 
460  void
461  store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
462  {
463  memory_order __b = __m & __memory_order_mask;
464  __glibcxx_assert(__b != memory_order_acquire);
465  __glibcxx_assert(__b != memory_order_acq_rel);
466  __glibcxx_assert(__b != memory_order_consume);
467 
468  __atomic_store_n(&_M_i, __i, __m);
469  }
470 
471  void
472  store(__int_type __i,
473  memory_order __m = memory_order_seq_cst) volatile noexcept
474  {
475  memory_order __b = __m & __memory_order_mask;
476  __glibcxx_assert(__b != memory_order_acquire);
477  __glibcxx_assert(__b != memory_order_acq_rel);
478  __glibcxx_assert(__b != memory_order_consume);
479 
480  __atomic_store_n(&_M_i, __i, __m);
481  }
482 
483  __int_type
484  load(memory_order __m = memory_order_seq_cst) const noexcept
485  {
486  memory_order __b = __m & __memory_order_mask;
487  __glibcxx_assert(__b != memory_order_release);
488  __glibcxx_assert(__b != memory_order_acq_rel);
489 
490  return __atomic_load_n(&_M_i, __m);
491  }
492 
493  __int_type
494  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
495  {
496  memory_order __b = __m & __memory_order_mask;
497  __glibcxx_assert(__b != memory_order_release);
498  __glibcxx_assert(__b != memory_order_acq_rel);
499 
500  return __atomic_load_n(&_M_i, __m);
501  }
502 
503  __int_type
504  exchange(__int_type __i,
505  memory_order __m = memory_order_seq_cst) noexcept
506  {
507  return __atomic_exchange_n(&_M_i, __i, __m);
508  }
509 
510 
511  __int_type
512  exchange(__int_type __i,
513  memory_order __m = memory_order_seq_cst) volatile noexcept
514  {
515  return __atomic_exchange_n(&_M_i, __i, __m);
516  }
517 
518  bool
519  compare_exchange_weak(__int_type& __i1, __int_type __i2,
520  memory_order __m1, memory_order __m2) noexcept
521  {
522  memory_order __b2 = __m2 & __memory_order_mask;
523  memory_order __b1 = __m1 & __memory_order_mask;
524  __glibcxx_assert(__b2 != memory_order_release);
525  __glibcxx_assert(__b2 != memory_order_acq_rel);
526  __glibcxx_assert(__b2 <= __b1);
527 
528  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
529  }
530 
531  bool
532  compare_exchange_weak(__int_type& __i1, __int_type __i2,
533  memory_order __m1,
534  memory_order __m2) volatile noexcept
535  {
536  memory_order __b2 = __m2 & __memory_order_mask;
537  memory_order __b1 = __m1 & __memory_order_mask;
538  __glibcxx_assert(__b2 != memory_order_release);
539  __glibcxx_assert(__b2 != memory_order_acq_rel);
540  __glibcxx_assert(__b2 <= __b1);
541 
542  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
543  }
544 
545  bool
546  compare_exchange_weak(__int_type& __i1, __int_type __i2,
547  memory_order __m = memory_order_seq_cst) noexcept
548  {
549  return compare_exchange_weak(__i1, __i2, __m,
550  __cmpexch_failure_order(__m));
551  }
552 
553  bool
554  compare_exchange_weak(__int_type& __i1, __int_type __i2,
555  memory_order __m = memory_order_seq_cst) volatile noexcept
556  {
557  return compare_exchange_weak(__i1, __i2, __m,
558  __cmpexch_failure_order(__m));
559  }
560 
561  bool
562  compare_exchange_strong(__int_type& __i1, __int_type __i2,
563  memory_order __m1, memory_order __m2) noexcept
564  {
565  memory_order __b2 = __m2 & __memory_order_mask;
566  memory_order __b1 = __m1 & __memory_order_mask;
567  __glibcxx_assert(__b2 != memory_order_release);
568  __glibcxx_assert(__b2 != memory_order_acq_rel);
569  __glibcxx_assert(__b2 <= __b1);
570 
571  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
572  }
573 
574  bool
575  compare_exchange_strong(__int_type& __i1, __int_type __i2,
576  memory_order __m1,
577  memory_order __m2) volatile noexcept
578  {
579  memory_order __b2 = __m2 & __memory_order_mask;
580  memory_order __b1 = __m1 & __memory_order_mask;
581 
582  __glibcxx_assert(__b2 != memory_order_release);
583  __glibcxx_assert(__b2 != memory_order_acq_rel);
584  __glibcxx_assert(__b2 <= __b1);
585 
586  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
587  }
588 
589  bool
590  compare_exchange_strong(__int_type& __i1, __int_type __i2,
591  memory_order __m = memory_order_seq_cst) noexcept
592  {
593  return compare_exchange_strong(__i1, __i2, __m,
594  __cmpexch_failure_order(__m));
595  }
596 
597  bool
598  compare_exchange_strong(__int_type& __i1, __int_type __i2,
599  memory_order __m = memory_order_seq_cst) volatile noexcept
600  {
601  return compare_exchange_strong(__i1, __i2, __m,
602  __cmpexch_failure_order(__m));
603  }
604 
605  __int_type
606  fetch_add(__int_type __i,
607  memory_order __m = memory_order_seq_cst) noexcept
608  { return __atomic_fetch_add(&_M_i, __i, __m); }
609 
610  __int_type
611  fetch_add(__int_type __i,
612  memory_order __m = memory_order_seq_cst) volatile noexcept
613  { return __atomic_fetch_add(&_M_i, __i, __m); }
614 
615  __int_type
616  fetch_sub(__int_type __i,
617  memory_order __m = memory_order_seq_cst) noexcept
618  { return __atomic_fetch_sub(&_M_i, __i, __m); }
619 
620  __int_type
621  fetch_sub(__int_type __i,
622  memory_order __m = memory_order_seq_cst) volatile noexcept
623  { return __atomic_fetch_sub(&_M_i, __i, __m); }
624 
625  __int_type
626  fetch_and(__int_type __i,
627  memory_order __m = memory_order_seq_cst) noexcept
628  { return __atomic_fetch_and(&_M_i, __i, __m); }
629 
630  __int_type
631  fetch_and(__int_type __i,
632  memory_order __m = memory_order_seq_cst) volatile noexcept
633  { return __atomic_fetch_and(&_M_i, __i, __m); }
634 
635  __int_type
636  fetch_or(__int_type __i,
637  memory_order __m = memory_order_seq_cst) noexcept
638  { return __atomic_fetch_or(&_M_i, __i, __m); }
639 
640  __int_type
641  fetch_or(__int_type __i,
642  memory_order __m = memory_order_seq_cst) volatile noexcept
643  { return __atomic_fetch_or(&_M_i, __i, __m); }
644 
645  __int_type
646  fetch_xor(__int_type __i,
647  memory_order __m = memory_order_seq_cst) noexcept
648  { return __atomic_fetch_xor(&_M_i, __i, __m); }
649 
650  __int_type
651  fetch_xor(__int_type __i,
652  memory_order __m = memory_order_seq_cst) volatile noexcept
653  { return __atomic_fetch_xor(&_M_i, __i, __m); }
654  };
655 
656 
657  /// Partial specialization for pointer types.
658  template<typename _PTp>
659  struct __atomic_base<_PTp*>
660  {
661  private:
662  typedef _PTp* __pointer_type;
663 
664  __pointer_type _M_p;
665 
666  // Factored out to facilitate explicit specialization.
667  constexpr ptrdiff_t
668  _M_type_size(ptrdiff_t __d) { return __d * sizeof(_PTp); }
669 
670  constexpr ptrdiff_t
671  _M_type_size(ptrdiff_t __d) volatile { return __d * sizeof(_PTp); }
672 
673  public:
674  __atomic_base() noexcept = default;
675  ~__atomic_base() noexcept = default;
676  __atomic_base(const __atomic_base&) = delete;
677  __atomic_base& operator=(const __atomic_base&) = delete;
678  __atomic_base& operator=(const __atomic_base&) volatile = delete;
679 
680  // Requires __pointer_type convertible to _M_p.
681  constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
682 
683  operator __pointer_type() const noexcept
684  { return load(); }
685 
686  operator __pointer_type() const volatile noexcept
687  { return load(); }
688 
689  __pointer_type
690  operator=(__pointer_type __p) noexcept
691  {
692  store(__p);
693  return __p;
694  }
695 
696  __pointer_type
697  operator=(__pointer_type __p) volatile noexcept
698  {
699  store(__p);
700  return __p;
701  }
702 
703  __pointer_type
704  operator++(int) noexcept
705  { return fetch_add(1); }
706 
707  __pointer_type
708  operator++(int) volatile noexcept
709  { return fetch_add(1); }
710 
711  __pointer_type
712  operator--(int) noexcept
713  { return fetch_sub(1); }
714 
715  __pointer_type
716  operator--(int) volatile noexcept
717  { return fetch_sub(1); }
718 
719  __pointer_type
720  operator++() noexcept
721  { return __atomic_add_fetch(&_M_p, _M_type_size(1),
722  memory_order_seq_cst); }
723 
724  __pointer_type
725  operator++() volatile noexcept
726  { return __atomic_add_fetch(&_M_p, _M_type_size(1),
727  memory_order_seq_cst); }
728 
729  __pointer_type
730  operator--() noexcept
731  { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
732  memory_order_seq_cst); }
733 
734  __pointer_type
735  operator--() volatile noexcept
736  { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
737  memory_order_seq_cst); }
738 
739  __pointer_type
740  operator+=(ptrdiff_t __d) noexcept
741  { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
742  memory_order_seq_cst); }
743 
744  __pointer_type
745  operator+=(ptrdiff_t __d) volatile noexcept
746  { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
747  memory_order_seq_cst); }
748 
749  __pointer_type
750  operator-=(ptrdiff_t __d) noexcept
751  { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
752  memory_order_seq_cst); }
753 
754  __pointer_type
755  operator-=(ptrdiff_t __d) volatile noexcept
756  { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
757  memory_order_seq_cst); }
758 
759  bool
760  is_lock_free() const noexcept
761  { return __atomic_is_lock_free(_M_type_size(1), nullptr); }
762 
763  bool
764  is_lock_free() const volatile noexcept
765  { return __atomic_is_lock_free(_M_type_size(1), nullptr); }
766 
767  void
768  store(__pointer_type __p,
769  memory_order __m = memory_order_seq_cst) noexcept
770  {
771  memory_order __b = __m & __memory_order_mask;
772 
773  __glibcxx_assert(__b != memory_order_acquire);
774  __glibcxx_assert(__b != memory_order_acq_rel);
775  __glibcxx_assert(__b != memory_order_consume);
776 
777  __atomic_store_n(&_M_p, __p, __m);
778  }
779 
780  void
781  store(__pointer_type __p,
782  memory_order __m = memory_order_seq_cst) volatile noexcept
783  {
784  memory_order __b = __m & __memory_order_mask;
785  __glibcxx_assert(__b != memory_order_acquire);
786  __glibcxx_assert(__b != memory_order_acq_rel);
787  __glibcxx_assert(__b != memory_order_consume);
788 
789  __atomic_store_n(&_M_p, __p, __m);
790  }
791 
792  __pointer_type
793  load(memory_order __m = memory_order_seq_cst) const noexcept
794  {
795  memory_order __b = __m & __memory_order_mask;
796  __glibcxx_assert(__b != memory_order_release);
797  __glibcxx_assert(__b != memory_order_acq_rel);
798 
799  return __atomic_load_n(&_M_p, __m);
800  }
801 
802  __pointer_type
803  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
804  {
805  memory_order __b = __m & __memory_order_mask;
806  __glibcxx_assert(__b != memory_order_release);
807  __glibcxx_assert(__b != memory_order_acq_rel);
808 
809  return __atomic_load_n(&_M_p, __m);
810  }
811 
812  __pointer_type
813  exchange(__pointer_type __p,
814  memory_order __m = memory_order_seq_cst) noexcept
815  {
816  return __atomic_exchange_n(&_M_p, __p, __m);
817  }
818 
819 
820  __pointer_type
821  exchange(__pointer_type __p,
822  memory_order __m = memory_order_seq_cst) volatile noexcept
823  {
824  return __atomic_exchange_n(&_M_p, __p, __m);
825  }
826 
827  bool
828  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
829  memory_order __m1,
830  memory_order __m2) noexcept
831  {
832  memory_order __b2 = __m2 & __memory_order_mask;
833  memory_order __b1 = __m1 & __memory_order_mask;
834  __glibcxx_assert(__b2 != memory_order_release);
835  __glibcxx_assert(__b2 != memory_order_acq_rel);
836  __glibcxx_assert(__b2 <= __b1);
837 
838  return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
839  }
840 
841  bool
842  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
843  memory_order __m1,
844  memory_order __m2) volatile noexcept
845  {
846  memory_order __b2 = __m2 & __memory_order_mask;
847  memory_order __b1 = __m1 & __memory_order_mask;
848 
849  __glibcxx_assert(__b2 != memory_order_release);
850  __glibcxx_assert(__b2 != memory_order_acq_rel);
851  __glibcxx_assert(__b2 <= __b1);
852 
853  return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
854  }
855 
856  __pointer_type
857  fetch_add(ptrdiff_t __d,
858  memory_order __m = memory_order_seq_cst) noexcept
859  { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
860 
861  __pointer_type
862  fetch_add(ptrdiff_t __d,
863  memory_order __m = memory_order_seq_cst) volatile noexcept
864  { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
865 
866  __pointer_type
867  fetch_sub(ptrdiff_t __d,
868  memory_order __m = memory_order_seq_cst) noexcept
869  { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
870 
871  __pointer_type
872  fetch_sub(ptrdiff_t __d,
873  memory_order __m = memory_order_seq_cst) volatile noexcept
874  { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
875  };
876 
877  // @} group atomics
878 
879 _GLIBCXX_END_NAMESPACE_VERSION
880 } // namespace std
881 
882 #endif