Home | History | Annotate | Download | only in bits
      1 // -*- C++ -*- header.
      2 
      3 // Copyright (C) 2008-2014 Free Software Foundation, Inc.
      4 //
      5 // This file is part of the GNU ISO C++ Library.  This library is free
      6 // software; you can redistribute it and/or modify it under the
      7 // terms of the GNU General Public License as published by the
      8 // Free Software Foundation; either version 3, or (at your option)
      9 // any later version.
     10 
     11 // This library is distributed in the hope that it will be useful,
     12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
     13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     14 // GNU General Public License for more details.
     15 
     16 // Under Section 7 of GPL version 3, you are granted additional
     17 // permissions described in the GCC Runtime Library Exception, version
     18 // 3.1, as published by the Free Software Foundation.
     19 
     20 // You should have received a copy of the GNU General Public License and
     21 // a copy of the GCC Runtime Library Exception along with this program;
     22 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
     23 // <http://www.gnu.org/licenses/>.
     24 
     25 /** @file bits/atomic_base.h
     26  *  This is an internal header file, included by other library headers.
     27  *  Do not attempt to use it directly. @headername{atomic}
     28  */
     29 
     30 #ifndef _GLIBCXX_ATOMIC_BASE_H
     31 #define _GLIBCXX_ATOMIC_BASE_H 1
     32 
     33 #pragma GCC system_header
     34 
     35 #include <bits/c++config.h>
     36 #include <stdbool.h>
     37 #include <stdint.h>
     38 #include <bits/atomic_lockfree_defines.h>
     39 
     40 #ifndef _GLIBCXX_ALWAYS_INLINE
     41 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((always_inline))
     42 #endif
     43 
     44 namespace std _GLIBCXX_VISIBILITY(default)
     45 {
     46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
     47 
     48   /**
     49    * @defgroup atomics Atomics
     50    *
     51    * Components for performing atomic operations.
     52    * @{
     53    */
     54 
     55   /// Enumeration for memory_order
     56   typedef enum memory_order
     57     {
     58       memory_order_relaxed,
     59       memory_order_consume,
     60       memory_order_acquire,
     61       memory_order_release,
     62       memory_order_acq_rel,
     63       memory_order_seq_cst
     64     } memory_order;
     65 
     66   enum __memory_order_modifier
     67     {
     68       __memory_order_mask          = 0x0ffff,
     69       __memory_order_modifier_mask = 0xffff0000,
     70       __memory_order_hle_acquire   = 0x10000,
     71       __memory_order_hle_release   = 0x20000
     72     };
     73 
     74   constexpr memory_order
     75   operator|(memory_order __m, __memory_order_modifier __mod)
     76   {
     77     return memory_order(__m | int(__mod));
     78   }
     79 
     80   constexpr memory_order
     81   operator&(memory_order __m, __memory_order_modifier __mod)
     82   {
     83     return memory_order(__m & int(__mod));
     84   }
     85 
     86   // Drop release ordering as per [atomics.types.operations.req]/21
     87   constexpr memory_order
     88   __cmpexch_failure_order2(memory_order __m) noexcept
     89   {
     90     return __m == memory_order_acq_rel ? memory_order_acquire
     91       : __m == memory_order_release ? memory_order_relaxed : __m;
     92   }
     93 
     94   constexpr memory_order
     95   __cmpexch_failure_order(memory_order __m) noexcept
     96   {
     97     return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
     98       | (__m & __memory_order_modifier_mask));
     99   }
    100 
    101   _GLIBCXX_ALWAYS_INLINE void
    102   atomic_thread_fence(memory_order __m) noexcept
    103   { __atomic_thread_fence(__m); }
    104 
    105   _GLIBCXX_ALWAYS_INLINE void
    106   atomic_signal_fence(memory_order __m) noexcept
    107   { __atomic_signal_fence(__m); }
    108 
    109   /// kill_dependency
    110   template<typename _Tp>
    111     inline _Tp
    112     kill_dependency(_Tp __y) noexcept
    113     {
    114       _Tp __ret(__y);
    115       return __ret;
    116     }
    117 
    118 
    119   // Base types for atomics.
    120   template<typename _IntTp>
    121     struct __atomic_base;
    122 
    123   /// atomic_char
    124   typedef __atomic_base<char>  	       		atomic_char;
    125 
    126   /// atomic_schar
    127   typedef __atomic_base<signed char>	     	atomic_schar;
    128 
    129   /// atomic_uchar
    130   typedef __atomic_base<unsigned char>		atomic_uchar;
    131 
    132   /// atomic_short
    133   typedef __atomic_base<short>			atomic_short;
    134 
    135   /// atomic_ushort
    136   typedef __atomic_base<unsigned short>	 	atomic_ushort;
    137 
    138   /// atomic_int
    139   typedef __atomic_base<int>  	       		atomic_int;
    140 
    141   /// atomic_uint
    142   typedef __atomic_base<unsigned int>	     	atomic_uint;
    143 
    144   /// atomic_long
    145   typedef __atomic_base<long>  	       		atomic_long;
    146 
    147   /// atomic_ulong
    148   typedef __atomic_base<unsigned long>		atomic_ulong;
    149 
    150   /// atomic_llong
    151   typedef __atomic_base<long long>  		atomic_llong;
    152 
    153   /// atomic_ullong
    154   typedef __atomic_base<unsigned long long> 	atomic_ullong;
    155 
    156   /// atomic_wchar_t
    157   typedef __atomic_base<wchar_t>  		atomic_wchar_t;
    158 
    159   /// atomic_char16_t
    160   typedef __atomic_base<char16_t>  		atomic_char16_t;
    161 
    162   /// atomic_char32_t
    163   typedef __atomic_base<char32_t>  		atomic_char32_t;
    164 
    165   /// atomic_char32_t
    166   typedef __atomic_base<char32_t>  		atomic_char32_t;
    167 
    168 
    169   /// atomic_int_least8_t
    170   typedef __atomic_base<int_least8_t>  		atomic_int_least8_t;
    171 
    172   /// atomic_uint_least8_t
    173   typedef __atomic_base<uint_least8_t>	       	atomic_uint_least8_t;
    174 
    175   /// atomic_int_least16_t
    176   typedef __atomic_base<int_least16_t>	       	atomic_int_least16_t;
    177 
    178   /// atomic_uint_least16_t
    179   typedef __atomic_base<uint_least16_t>	       	atomic_uint_least16_t;
    180 
    181   /// atomic_int_least32_t
    182   typedef __atomic_base<int_least32_t>	       	atomic_int_least32_t;
    183 
    184   /// atomic_uint_least32_t
    185   typedef __atomic_base<uint_least32_t>	       	atomic_uint_least32_t;
    186 
    187   /// atomic_int_least64_t
    188   typedef __atomic_base<int_least64_t>	       	atomic_int_least64_t;
    189 
    190   /// atomic_uint_least64_t
    191   typedef __atomic_base<uint_least64_t>	       	atomic_uint_least64_t;
    192 
    193 
    194   /// atomic_int_fast8_t
    195   typedef __atomic_base<int_fast8_t>  		atomic_int_fast8_t;
    196 
    197   /// atomic_uint_fast8_t
    198   typedef __atomic_base<uint_fast8_t>	      	atomic_uint_fast8_t;
    199 
    200   /// atomic_int_fast16_t
    201   typedef __atomic_base<int_fast16_t>	      	atomic_int_fast16_t;
    202 
    203   /// atomic_uint_fast16_t
    204   typedef __atomic_base<uint_fast16_t>	      	atomic_uint_fast16_t;
    205 
    206   /// atomic_int_fast32_t
    207   typedef __atomic_base<int_fast32_t>	      	atomic_int_fast32_t;
    208 
    209   /// atomic_uint_fast32_t
    210   typedef __atomic_base<uint_fast32_t>	      	atomic_uint_fast32_t;
    211 
    212   /// atomic_int_fast64_t
    213   typedef __atomic_base<int_fast64_t>	      	atomic_int_fast64_t;
    214 
    215   /// atomic_uint_fast64_t
    216   typedef __atomic_base<uint_fast64_t>	      	atomic_uint_fast64_t;
    217 
    218 
    219   /// atomic_intptr_t
    220   typedef __atomic_base<intptr_t>  	       	atomic_intptr_t;
    221 
    222   /// atomic_uintptr_t
    223   typedef __atomic_base<uintptr_t>  	       	atomic_uintptr_t;
    224 
    225   /// atomic_size_t
    226   typedef __atomic_base<size_t>	 	       	atomic_size_t;
    227 
    228   /// atomic_intmax_t
    229   typedef __atomic_base<intmax_t>  	       	atomic_intmax_t;
    230 
    231   /// atomic_uintmax_t
    232   typedef __atomic_base<uintmax_t>  	       	atomic_uintmax_t;
    233 
    234   /// atomic_ptrdiff_t
    235   typedef __atomic_base<ptrdiff_t>  	       	atomic_ptrdiff_t;
    236 
    237 
    238 #define ATOMIC_VAR_INIT(_VI) { _VI }
    239 
    240   template<typename _Tp>
    241     struct atomic;
    242 
    243   template<typename _Tp>
    244     struct atomic<_Tp*>;
    245 
    246     /* The target's "set" value for test-and-set may not be exactly 1.  */
    247 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
    248     typedef bool __atomic_flag_data_type;
    249 #else
    250     typedef unsigned char __atomic_flag_data_type;
    251 #endif
    252 
    253   /**
    254    *  @brief Base type for atomic_flag.
    255    *
    256    *  Base type is POD with data, allowing atomic_flag to derive from
    257    *  it and meet the standard layout type requirement. In addition to
    258    *  compatibility with a C interface, this allows different
    259    *  implementations of atomic_flag to use the same atomic operation
    260    *  functions, via a standard conversion to the __atomic_flag_base
    261    *  argument.
    262   */
    263   _GLIBCXX_BEGIN_EXTERN_C
    264 
    265   struct __atomic_flag_base
    266   {
    267     __atomic_flag_data_type _M_i;
    268   };
    269 
    270   _GLIBCXX_END_EXTERN_C
    271 
    272 #define ATOMIC_FLAG_INIT { 0 }
    273 
    274   /// atomic_flag
    275   struct atomic_flag : public __atomic_flag_base
    276   {
    277     atomic_flag() noexcept = default;
    278     ~atomic_flag() noexcept = default;
    279     atomic_flag(const atomic_flag&) = delete;
    280     atomic_flag& operator=(const atomic_flag&) = delete;
    281     atomic_flag& operator=(const atomic_flag&) volatile = delete;
    282 
    283     // Conversion to ATOMIC_FLAG_INIT.
    284     constexpr atomic_flag(bool __i) noexcept
    285       : __atomic_flag_base{ _S_init(__i) }
    286     { }
    287 
    288     _GLIBCXX_ALWAYS_INLINE bool
    289     test_and_set(memory_order __m = memory_order_seq_cst) noexcept
    290     {
    291       return __atomic_test_and_set (&_M_i, __m);
    292     }
    293 
    294     _GLIBCXX_ALWAYS_INLINE bool
    295     test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
    296     {
    297       return __atomic_test_and_set (&_M_i, __m);
    298     }
    299 
    300     _GLIBCXX_ALWAYS_INLINE void
    301     clear(memory_order __m = memory_order_seq_cst) noexcept
    302     {
    303       memory_order __b = __m & __memory_order_mask;
    304       __glibcxx_assert(__b != memory_order_consume);
    305       __glibcxx_assert(__b != memory_order_acquire);
    306       __glibcxx_assert(__b != memory_order_acq_rel);
    307 
    308       __atomic_clear (&_M_i, __m);
    309     }
    310 
    311     _GLIBCXX_ALWAYS_INLINE void
    312     clear(memory_order __m = memory_order_seq_cst) volatile noexcept
    313     {
    314       memory_order __b = __m & __memory_order_mask;
    315       __glibcxx_assert(__b != memory_order_consume);
    316       __glibcxx_assert(__b != memory_order_acquire);
    317       __glibcxx_assert(__b != memory_order_acq_rel);
    318 
    319       __atomic_clear (&_M_i, __m);
    320     }
    321 
    322   private:
    323     static constexpr __atomic_flag_data_type
    324     _S_init(bool __i)
    325     { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
    326   };
    327 
    328 
    329   /// Base class for atomic integrals.
    330   //
    331   // For each of the integral types, define atomic_[integral type] struct
    332   //
    333   // atomic_bool     bool
    334   // atomic_char     char
    335   // atomic_schar    signed char
    336   // atomic_uchar    unsigned char
    337   // atomic_short    short
    338   // atomic_ushort   unsigned short
    339   // atomic_int      int
    340   // atomic_uint     unsigned int
    341   // atomic_long     long
    342   // atomic_ulong    unsigned long
    343   // atomic_llong    long long
    344   // atomic_ullong   unsigned long long
    345   // atomic_char16_t char16_t
    346   // atomic_char32_t char32_t
    347   // atomic_wchar_t  wchar_t
    348   //
    349   // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
    350   // 8 bytes, since that is what GCC built-in functions for atomic
    351   // memory access expect.
    352   template<typename _ITp>
    353     struct __atomic_base
    354     {
    355     private:
    356       typedef _ITp 	__int_type;
    357 
    358       static constexpr int _S_alignment =
    359         sizeof(_ITp) > alignof(_ITp) ? sizeof(_ITp) : alignof(_ITp);
    360 
    361       alignas(_S_alignment) __int_type _M_i;
    362 
    363     public:
    364       __atomic_base() noexcept = default;
    365       ~__atomic_base() noexcept = default;
    366       __atomic_base(const __atomic_base&) = delete;
    367       __atomic_base& operator=(const __atomic_base&) = delete;
    368       __atomic_base& operator=(const __atomic_base&) volatile = delete;
    369 
    370       // Requires __int_type convertible to _M_i.
    371       constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
    372 
    373       operator __int_type() const noexcept
    374       { return load(); }
    375 
    376       operator __int_type() const volatile noexcept
    377       { return load(); }
    378 
    379       __int_type
    380       operator=(__int_type __i) noexcept
    381       {
    382 	store(__i);
    383 	return __i;
    384       }
    385 
    386       __int_type
    387       operator=(__int_type __i) volatile noexcept
    388       {
    389 	store(__i);
    390 	return __i;
    391       }
    392 
    393       __int_type
    394       operator++(int) noexcept
    395       { return fetch_add(1); }
    396 
    397       __int_type
    398       operator++(int) volatile noexcept
    399       { return fetch_add(1); }
    400 
    401       __int_type
    402       operator--(int) noexcept
    403       { return fetch_sub(1); }
    404 
    405       __int_type
    406       operator--(int) volatile noexcept
    407       { return fetch_sub(1); }
    408 
    409       __int_type
    410       operator++() noexcept
    411       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
    412 
    413       __int_type
    414       operator++() volatile noexcept
    415       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
    416 
    417       __int_type
    418       operator--() noexcept
    419       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
    420 
    421       __int_type
    422       operator--() volatile noexcept
    423       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
    424 
    425       __int_type
    426       operator+=(__int_type __i) noexcept
    427       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
    428 
    429       __int_type
    430       operator+=(__int_type __i) volatile noexcept
    431       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
    432 
    433       __int_type
    434       operator-=(__int_type __i) noexcept
    435       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
    436 
    437       __int_type
    438       operator-=(__int_type __i) volatile noexcept
    439       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
    440 
    441       __int_type
    442       operator&=(__int_type __i) noexcept
    443       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
    444 
    445       __int_type
    446       operator&=(__int_type __i) volatile noexcept
    447       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
    448 
    449       __int_type
    450       operator|=(__int_type __i) noexcept
    451       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
    452 
    453       __int_type
    454       operator|=(__int_type __i) volatile noexcept
    455       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
    456 
    457       __int_type
    458       operator^=(__int_type __i) noexcept
    459       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
    460 
    461       __int_type
    462       operator^=(__int_type __i) volatile noexcept
    463       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
    464 
    465       bool
    466       is_lock_free() const noexcept
    467       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
    468 
    469       bool
    470       is_lock_free() const volatile noexcept
    471       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
    472 
    473       _GLIBCXX_ALWAYS_INLINE void
    474       store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
    475       {
    476         memory_order __b = __m & __memory_order_mask;
    477 	__glibcxx_assert(__b != memory_order_acquire);
    478 	__glibcxx_assert(__b != memory_order_acq_rel);
    479 	__glibcxx_assert(__b != memory_order_consume);
    480 
    481 	__atomic_store_n(&_M_i, __i, __m);
    482       }
    483 
    484       _GLIBCXX_ALWAYS_INLINE void
    485       store(__int_type __i,
    486 	    memory_order __m = memory_order_seq_cst) volatile noexcept
    487       {
    488         memory_order __b = __m & __memory_order_mask;
    489 	__glibcxx_assert(__b != memory_order_acquire);
    490 	__glibcxx_assert(__b != memory_order_acq_rel);
    491 	__glibcxx_assert(__b != memory_order_consume);
    492 
    493 	__atomic_store_n(&_M_i, __i, __m);
    494       }
    495 
    496       _GLIBCXX_ALWAYS_INLINE __int_type
    497       load(memory_order __m = memory_order_seq_cst) const noexcept
    498       {
    499        memory_order __b = __m & __memory_order_mask;
    500 	__glibcxx_assert(__b != memory_order_release);
    501 	__glibcxx_assert(__b != memory_order_acq_rel);
    502 
    503 	return __atomic_load_n(&_M_i, __m);
    504       }
    505 
    506       _GLIBCXX_ALWAYS_INLINE __int_type
    507       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    508       {
    509         memory_order __b = __m & __memory_order_mask;
    510 	__glibcxx_assert(__b != memory_order_release);
    511 	__glibcxx_assert(__b != memory_order_acq_rel);
    512 
    513 	return __atomic_load_n(&_M_i, __m);
    514       }
    515 
    516       _GLIBCXX_ALWAYS_INLINE __int_type
    517       exchange(__int_type __i,
    518 	       memory_order __m = memory_order_seq_cst) noexcept
    519       {
    520 	return __atomic_exchange_n(&_M_i, __i, __m);
    521       }
    522 
    523 
    524       _GLIBCXX_ALWAYS_INLINE __int_type
    525       exchange(__int_type __i,
    526 	       memory_order __m = memory_order_seq_cst) volatile noexcept
    527       {
    528 	return __atomic_exchange_n(&_M_i, __i, __m);
    529       }
    530 
    531       _GLIBCXX_ALWAYS_INLINE bool
    532       compare_exchange_weak(__int_type& __i1, __int_type __i2,
    533 			    memory_order __m1, memory_order __m2) noexcept
    534       {
    535        memory_order __b2 = __m2 & __memory_order_mask;
    536        memory_order __b1 = __m1 & __memory_order_mask;
    537 	__glibcxx_assert(__b2 != memory_order_release);
    538 	__glibcxx_assert(__b2 != memory_order_acq_rel);
    539 	__glibcxx_assert(__b2 <= __b1);
    540 
    541 	return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
    542       }
    543 
    544       _GLIBCXX_ALWAYS_INLINE bool
    545       compare_exchange_weak(__int_type& __i1, __int_type __i2,
    546 			    memory_order __m1,
    547 			    memory_order __m2) volatile noexcept
    548       {
    549        memory_order __b2 = __m2 & __memory_order_mask;
    550        memory_order __b1 = __m1 & __memory_order_mask;
    551 	__glibcxx_assert(__b2 != memory_order_release);
    552 	__glibcxx_assert(__b2 != memory_order_acq_rel);
    553 	__glibcxx_assert(__b2 <= __b1);
    554 
    555 	return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
    556       }
    557 
    558       _GLIBCXX_ALWAYS_INLINE bool
    559       compare_exchange_weak(__int_type& __i1, __int_type __i2,
    560 			    memory_order __m = memory_order_seq_cst) noexcept
    561       {
    562 	return compare_exchange_weak(__i1, __i2, __m,
    563 				     __cmpexch_failure_order(__m));
    564       }
    565 
    566       _GLIBCXX_ALWAYS_INLINE bool
    567       compare_exchange_weak(__int_type& __i1, __int_type __i2,
    568 		   memory_order __m = memory_order_seq_cst) volatile noexcept
    569       {
    570 	return compare_exchange_weak(__i1, __i2, __m,
    571 				     __cmpexch_failure_order(__m));
    572       }
    573 
    574       _GLIBCXX_ALWAYS_INLINE bool
    575       compare_exchange_strong(__int_type& __i1, __int_type __i2,
    576 			      memory_order __m1, memory_order __m2) noexcept
    577       {
    578         memory_order __b2 = __m2 & __memory_order_mask;
    579         memory_order __b1 = __m1 & __memory_order_mask;
    580 	__glibcxx_assert(__b2 != memory_order_release);
    581 	__glibcxx_assert(__b2 != memory_order_acq_rel);
    582 	__glibcxx_assert(__b2 <= __b1);
    583 
    584 	return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
    585       }
    586 
    587       _GLIBCXX_ALWAYS_INLINE bool
    588       compare_exchange_strong(__int_type& __i1, __int_type __i2,
    589 			      memory_order __m1,
    590 			      memory_order __m2) volatile noexcept
    591       {
    592         memory_order __b2 = __m2 & __memory_order_mask;
    593         memory_order __b1 = __m1 & __memory_order_mask;
    594 
    595 	__glibcxx_assert(__b2 != memory_order_release);
    596 	__glibcxx_assert(__b2 != memory_order_acq_rel);
    597 	__glibcxx_assert(__b2 <= __b1);
    598 
    599 	return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
    600       }
    601 
    602       _GLIBCXX_ALWAYS_INLINE bool
    603       compare_exchange_strong(__int_type& __i1, __int_type __i2,
    604 			      memory_order __m = memory_order_seq_cst) noexcept
    605       {
    606 	return compare_exchange_strong(__i1, __i2, __m,
    607 				       __cmpexch_failure_order(__m));
    608       }
    609 
    610       _GLIBCXX_ALWAYS_INLINE bool
    611       compare_exchange_strong(__int_type& __i1, __int_type __i2,
    612 		 memory_order __m = memory_order_seq_cst) volatile noexcept
    613       {
    614 	return compare_exchange_strong(__i1, __i2, __m,
    615 				       __cmpexch_failure_order(__m));
    616       }
    617 
    618       _GLIBCXX_ALWAYS_INLINE __int_type
    619       fetch_add(__int_type __i,
    620 		memory_order __m = memory_order_seq_cst) noexcept
    621       { return __atomic_fetch_add(&_M_i, __i, __m); }
    622 
    623       _GLIBCXX_ALWAYS_INLINE __int_type
    624       fetch_add(__int_type __i,
    625 		memory_order __m = memory_order_seq_cst) volatile noexcept
    626       { return __atomic_fetch_add(&_M_i, __i, __m); }
    627 
    628       _GLIBCXX_ALWAYS_INLINE __int_type
    629       fetch_sub(__int_type __i,
    630 		memory_order __m = memory_order_seq_cst) noexcept
    631       { return __atomic_fetch_sub(&_M_i, __i, __m); }
    632 
    633       _GLIBCXX_ALWAYS_INLINE __int_type
    634       fetch_sub(__int_type __i,
    635 		memory_order __m = memory_order_seq_cst) volatile noexcept
    636       { return __atomic_fetch_sub(&_M_i, __i, __m); }
    637 
    638       _GLIBCXX_ALWAYS_INLINE __int_type
    639       fetch_and(__int_type __i,
    640 		memory_order __m = memory_order_seq_cst) noexcept
    641       { return __atomic_fetch_and(&_M_i, __i, __m); }
    642 
    643       _GLIBCXX_ALWAYS_INLINE __int_type
    644       fetch_and(__int_type __i,
    645 		memory_order __m = memory_order_seq_cst) volatile noexcept
    646       { return __atomic_fetch_and(&_M_i, __i, __m); }
    647 
    648       _GLIBCXX_ALWAYS_INLINE __int_type
    649       fetch_or(__int_type __i,
    650 	       memory_order __m = memory_order_seq_cst) noexcept
    651       { return __atomic_fetch_or(&_M_i, __i, __m); }
    652 
    653       _GLIBCXX_ALWAYS_INLINE __int_type
    654       fetch_or(__int_type __i,
    655 	       memory_order __m = memory_order_seq_cst) volatile noexcept
    656       { return __atomic_fetch_or(&_M_i, __i, __m); }
    657 
    658       _GLIBCXX_ALWAYS_INLINE __int_type
    659       fetch_xor(__int_type __i,
    660 		memory_order __m = memory_order_seq_cst) noexcept
    661       { return __atomic_fetch_xor(&_M_i, __i, __m); }
    662 
    663       _GLIBCXX_ALWAYS_INLINE __int_type
    664       fetch_xor(__int_type __i,
    665 		memory_order __m = memory_order_seq_cst) volatile noexcept
    666       { return __atomic_fetch_xor(&_M_i, __i, __m); }
    667     };
    668 
    669 
    670   /// Partial specialization for pointer types.
    671   template<typename _PTp>
    672     struct __atomic_base<_PTp*>
    673     {
    674     private:
    675       typedef _PTp* 	__pointer_type;
    676 
    677       __pointer_type 	_M_p;
    678 
    679       // Factored out to facilitate explicit specialization.
    680       constexpr ptrdiff_t
    681       _M_type_size(ptrdiff_t __d) const { return __d * sizeof(_PTp); }
    682 
    683       constexpr ptrdiff_t
    684       _M_type_size(ptrdiff_t __d) const volatile { return __d * sizeof(_PTp); }
    685 
    686     public:
    687       __atomic_base() noexcept = default;
    688       ~__atomic_base() noexcept = default;
    689       __atomic_base(const __atomic_base&) = delete;
    690       __atomic_base& operator=(const __atomic_base&) = delete;
    691       __atomic_base& operator=(const __atomic_base&) volatile = delete;
    692 
    693       // Requires __pointer_type convertible to _M_p.
    694       constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
    695 
    696       operator __pointer_type() const noexcept
    697       { return load(); }
    698 
    699       operator __pointer_type() const volatile noexcept
    700       { return load(); }
    701 
    702       __pointer_type
    703       operator=(__pointer_type __p) noexcept
    704       {
    705 	store(__p);
    706 	return __p;
    707       }
    708 
    709       __pointer_type
    710       operator=(__pointer_type __p) volatile noexcept
    711       {
    712 	store(__p);
    713 	return __p;
    714       }
    715 
    716       __pointer_type
    717       operator++(int) noexcept
    718       { return fetch_add(1); }
    719 
    720       __pointer_type
    721       operator++(int) volatile noexcept
    722       { return fetch_add(1); }
    723 
    724       __pointer_type
    725       operator--(int) noexcept
    726       { return fetch_sub(1); }
    727 
    728       __pointer_type
    729       operator--(int) volatile noexcept
    730       { return fetch_sub(1); }
    731 
    732       __pointer_type
    733       operator++() noexcept
    734       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
    735 				  memory_order_seq_cst); }
    736 
    737       __pointer_type
    738       operator++() volatile noexcept
    739       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
    740 				  memory_order_seq_cst); }
    741 
    742       __pointer_type
    743       operator--() noexcept
    744       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
    745 				  memory_order_seq_cst); }
    746 
    747       __pointer_type
    748       operator--() volatile noexcept
    749       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
    750 				  memory_order_seq_cst); }
    751 
    752       __pointer_type
    753       operator+=(ptrdiff_t __d) noexcept
    754       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
    755 				  memory_order_seq_cst); }
    756 
    757       __pointer_type
    758       operator+=(ptrdiff_t __d) volatile noexcept
    759       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
    760 				  memory_order_seq_cst); }
    761 
    762       __pointer_type
    763       operator-=(ptrdiff_t __d) noexcept
    764       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
    765 				  memory_order_seq_cst); }
    766 
    767       __pointer_type
    768       operator-=(ptrdiff_t __d) volatile noexcept
    769       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
    770 				  memory_order_seq_cst); }
    771 
    772       bool
    773       is_lock_free() const noexcept
    774       { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
    775 
    776       bool
    777       is_lock_free() const volatile noexcept
    778       { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
    779 
    780       _GLIBCXX_ALWAYS_INLINE void
    781       store(__pointer_type __p,
    782 	    memory_order __m = memory_order_seq_cst) noexcept
    783       {
    784         memory_order __b = __m & __memory_order_mask;
    785 
    786 	__glibcxx_assert(__b != memory_order_acquire);
    787 	__glibcxx_assert(__b != memory_order_acq_rel);
    788 	__glibcxx_assert(__b != memory_order_consume);
    789 
    790 	__atomic_store_n(&_M_p, __p, __m);
    791       }
    792 
    793       _GLIBCXX_ALWAYS_INLINE void
    794       store(__pointer_type __p,
    795 	    memory_order __m = memory_order_seq_cst) volatile noexcept
    796       {
    797         memory_order __b = __m & __memory_order_mask;
    798 	__glibcxx_assert(__b != memory_order_acquire);
    799 	__glibcxx_assert(__b != memory_order_acq_rel);
    800 	__glibcxx_assert(__b != memory_order_consume);
    801 
    802 	__atomic_store_n(&_M_p, __p, __m);
    803       }
    804 
    805       _GLIBCXX_ALWAYS_INLINE __pointer_type
    806       load(memory_order __m = memory_order_seq_cst) const noexcept
    807       {
    808         memory_order __b = __m & __memory_order_mask;
    809 	__glibcxx_assert(__b != memory_order_release);
    810 	__glibcxx_assert(__b != memory_order_acq_rel);
    811 
    812 	return __atomic_load_n(&_M_p, __m);
    813       }
    814 
    815       _GLIBCXX_ALWAYS_INLINE __pointer_type
    816       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    817       {
    818         memory_order __b = __m & __memory_order_mask;
    819 	__glibcxx_assert(__b != memory_order_release);
    820 	__glibcxx_assert(__b != memory_order_acq_rel);
    821 
    822 	return __atomic_load_n(&_M_p, __m);
    823       }
    824 
    825       _GLIBCXX_ALWAYS_INLINE __pointer_type
    826       exchange(__pointer_type __p,
    827 	       memory_order __m = memory_order_seq_cst) noexcept
    828       {
    829 	return __atomic_exchange_n(&_M_p, __p, __m);
    830       }
    831 
    832 
    833       _GLIBCXX_ALWAYS_INLINE __pointer_type
    834       exchange(__pointer_type __p,
    835 	       memory_order __m = memory_order_seq_cst) volatile noexcept
    836       {
    837 	return __atomic_exchange_n(&_M_p, __p, __m);
    838       }
    839 
    840       _GLIBCXX_ALWAYS_INLINE bool
    841       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    842 			      memory_order __m1,
    843 			      memory_order __m2) noexcept
    844       {
    845         memory_order __b2 = __m2 & __memory_order_mask;
    846         memory_order __b1 = __m1 & __memory_order_mask;
    847 	__glibcxx_assert(__b2 != memory_order_release);
    848 	__glibcxx_assert(__b2 != memory_order_acq_rel);
    849 	__glibcxx_assert(__b2 <= __b1);
    850 
    851 	return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
    852       }
    853 
    854       _GLIBCXX_ALWAYS_INLINE bool
    855       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    856 			      memory_order __m1,
    857 			      memory_order __m2) volatile noexcept
    858       {
    859         memory_order __b2 = __m2 & __memory_order_mask;
    860         memory_order __b1 = __m1 & __memory_order_mask;
    861 
    862 	__glibcxx_assert(__b2 != memory_order_release);
    863 	__glibcxx_assert(__b2 != memory_order_acq_rel);
    864 	__glibcxx_assert(__b2 <= __b1);
    865 
    866 	return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
    867       }
    868 
    869       _GLIBCXX_ALWAYS_INLINE __pointer_type
    870       fetch_add(ptrdiff_t __d,
    871 		memory_order __m = memory_order_seq_cst) noexcept
    872       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
    873 
    874       _GLIBCXX_ALWAYS_INLINE __pointer_type
    875       fetch_add(ptrdiff_t __d,
    876 		memory_order __m = memory_order_seq_cst) volatile noexcept
    877       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
    878 
    879       _GLIBCXX_ALWAYS_INLINE __pointer_type
    880       fetch_sub(ptrdiff_t __d,
    881 		memory_order __m = memory_order_seq_cst) noexcept
    882       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
    883 
    884       _GLIBCXX_ALWAYS_INLINE __pointer_type
    885       fetch_sub(ptrdiff_t __d,
    886 		memory_order __m = memory_order_seq_cst) volatile noexcept
    887       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
    888     };
    889 
    890   // @} group atomics
    891 
    892 _GLIBCXX_END_NAMESPACE_VERSION
    893 } // namespace std
    894 
    895 #endif
    896