Home | History | Annotate | Download | only in include
      1 // -*- C++ -*- header.
      2 
      3 // Copyright (C) 2008-2014 Free Software Foundation, Inc.
      4 //
      5 // This file is part of the GNU ISO C++ Library.  This library is free
      6 // software; you can redistribute it and/or modify it under the
      7 // terms of the GNU General Public License as published by the
      8 // Free Software Foundation; either version 3, or (at your option)
      9 // any later version.
     10 
     11 // This library is distributed in the hope that it will be useful,
     12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
     13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     14 // GNU General Public License for more details.
     15 
     16 // Under Section 7 of GPL version 3, you are granted additional
     17 // permissions described in the GCC Runtime Library Exception, version
     18 // 3.1, as published by the Free Software Foundation.
     19 
     20 // You should have received a copy of the GNU General Public License and
     21 // a copy of the GCC Runtime Library Exception along with this program;
     22 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
     23 // <http://www.gnu.org/licenses/>.
     24 
     25 /** @file include/atomic
     26  *  This is a Standard C++ Library header.
     27  */
     28 
     29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
     30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
     31 
     32 #ifndef _GLIBCXX_ATOMIC
     33 #define _GLIBCXX_ATOMIC 1
     34 
     35 #pragma GCC system_header
     36 
     37 #if __cplusplus < 201103L
     38 # include <bits/c++0x_warning.h>
     39 #endif
     40 
     41 #include <bits/atomic_base.h>
     42 
     43 namespace std _GLIBCXX_VISIBILITY(default)
     44 {
     45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
     46 
     47   /**
     48    * @addtogroup atomics
     49    * @{
     50    */
     51 
     52   /// atomic_bool
     53   // NB: No operators or fetch-operations for this type.
     54   struct atomic_bool
     55   {
     56   private:
     57     __atomic_base<bool>	_M_base;
     58 
     59   public:
     60     atomic_bool() noexcept = default;
     61     ~atomic_bool() noexcept = default;
     62     atomic_bool(const atomic_bool&) = delete;
     63     atomic_bool& operator=(const atomic_bool&) = delete;
     64     atomic_bool& operator=(const atomic_bool&) volatile = delete;
     65 
     66     constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
     67 
     68     bool
     69     operator=(bool __i) noexcept
     70     { return _M_base.operator=(__i); }
     71 
     72     bool
     73     operator=(bool __i) volatile noexcept
     74     { return _M_base.operator=(__i); }
     75 
     76     operator bool() const noexcept
     77     { return _M_base.load(); }
     78 
     79     operator bool() const volatile noexcept
     80     { return _M_base.load(); }
     81 
     82     bool
     83     is_lock_free() const noexcept { return _M_base.is_lock_free(); }
     84 
     85     bool
     86     is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
     87 
     88     void
     89     store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
     90     { _M_base.store(__i, __m); }
     91 
     92     void
     93     store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
     94     { _M_base.store(__i, __m); }
     95 
     96     bool
     97     load(memory_order __m = memory_order_seq_cst) const noexcept
     98     { return _M_base.load(__m); }
     99 
    100     bool
    101     load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    102     { return _M_base.load(__m); }
    103 
    104     bool
    105     exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
    106     { return _M_base.exchange(__i, __m); }
    107 
    108     bool
    109     exchange(bool __i,
    110 	     memory_order __m = memory_order_seq_cst) volatile noexcept
    111     { return _M_base.exchange(__i, __m); }
    112 
    113     bool
    114     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
    115 			  memory_order __m2) noexcept
    116     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
    117 
    118     bool
    119     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
    120 			  memory_order __m2) volatile noexcept
    121     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
    122 
    123     bool
    124     compare_exchange_weak(bool& __i1, bool __i2,
    125 			  memory_order __m = memory_order_seq_cst) noexcept
    126     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
    127 
    128     bool
    129     compare_exchange_weak(bool& __i1, bool __i2,
    130 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    131     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
    132 
    133     bool
    134     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
    135 			    memory_order __m2) noexcept
    136     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
    137 
    138     bool
    139     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
    140 			    memory_order __m2) volatile noexcept
    141     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
    142 
    143     bool
    144     compare_exchange_strong(bool& __i1, bool __i2,
    145 			    memory_order __m = memory_order_seq_cst) noexcept
    146     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
    147 
    148     bool
    149     compare_exchange_strong(bool& __i1, bool __i2,
    150 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    151     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
    152   };
    153 
    154 
    155   /**
    156    *  @brief Generic atomic type, primary class template.
    157    *
    158    *  @tparam _Tp  Type to be made atomic, must be trivally copyable.
    159    */
    160   template<typename _Tp>
    161     struct atomic
    162     {
    163     private:
    164       // Align 1/2/4/8/16-byte types to at least their size.
    165       static constexpr int _S_min_alignment
    166 	= (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
    167 	? 0 : sizeof(_Tp);
    168 
    169       static constexpr int _S_alignment
    170         = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
    171 
    172       alignas(_S_alignment) _Tp _M_i;
    173 
    174     public:
    175       atomic() noexcept = default;
    176       ~atomic() noexcept = default;
    177       atomic(const atomic&) = delete;
    178       atomic& operator=(const atomic&) = delete;
    179       atomic& operator=(const atomic&) volatile = delete;
    180 
    181       constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
    182 
    183       operator _Tp() const noexcept
    184       { return load(); }
    185 
    186       operator _Tp() const volatile noexcept
    187       { return load(); }
    188 
    189       _Tp
    190       operator=(_Tp __i) noexcept 
    191       { store(__i); return __i; }
    192 
    193       _Tp
    194       operator=(_Tp __i) volatile noexcept 
    195       { store(__i); return __i; }
    196 
    197       bool
    198       is_lock_free() const noexcept
    199       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
    200 
    201       bool
    202       is_lock_free() const volatile noexcept
    203       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
    204 
    205       void
    206       store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
    207       { __atomic_store(&_M_i, &__i, _m); }
    208 
    209       void
    210       store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
    211       { __atomic_store(&_M_i, &__i, _m); }
    212 
    213       _Tp
    214       load(memory_order _m = memory_order_seq_cst) const noexcept
    215       { 
    216         _Tp tmp;
    217 	__atomic_load(&_M_i, &tmp, _m); 
    218 	return tmp;
    219       }
    220 
    221       _Tp
    222       load(memory_order _m = memory_order_seq_cst) const volatile noexcept
    223       { 
    224         _Tp tmp;
    225 	__atomic_load(&_M_i, &tmp, _m); 
    226 	return tmp;
    227       }
    228 
    229       _Tp
    230       exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
    231       { 
    232         _Tp tmp;
    233 	__atomic_exchange(&_M_i, &__i, &tmp, _m); 
    234 	return tmp;
    235       }
    236 
    237       _Tp
    238       exchange(_Tp __i, 
    239 	       memory_order _m = memory_order_seq_cst) volatile noexcept
    240       { 
    241         _Tp tmp;
    242 	__atomic_exchange(&_M_i, &__i, &tmp, _m); 
    243 	return tmp;
    244       }
    245 
    246       bool
    247       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 
    248 			    memory_order __f) noexcept
    249       {
    250 	return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 
    251       }
    252 
    253       bool
    254       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 
    255 			    memory_order __f) volatile noexcept
    256       {
    257 	return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 
    258       }
    259 
    260       bool
    261       compare_exchange_weak(_Tp& __e, _Tp __i,
    262 			    memory_order __m = memory_order_seq_cst) noexcept
    263       { return compare_exchange_weak(__e, __i, __m,
    264                                      __cmpexch_failure_order(__m)); }
    265 
    266       bool
    267       compare_exchange_weak(_Tp& __e, _Tp __i,
    268 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    269       { return compare_exchange_weak(__e, __i, __m,
    270                                      __cmpexch_failure_order(__m)); }
    271 
    272       bool
    273       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 
    274 			      memory_order __f) noexcept
    275       {
    276 	return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 
    277       }
    278 
    279       bool
    280       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 
    281 			      memory_order __f) volatile noexcept
    282       {
    283 	return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 
    284       }
    285 
    286       bool
    287       compare_exchange_strong(_Tp& __e, _Tp __i,
    288 			       memory_order __m = memory_order_seq_cst) noexcept
    289       { return compare_exchange_strong(__e, __i, __m,
    290                                        __cmpexch_failure_order(__m)); }
    291 
    292       bool
    293       compare_exchange_strong(_Tp& __e, _Tp __i,
    294 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    295       { return compare_exchange_strong(__e, __i, __m,
    296                                        __cmpexch_failure_order(__m)); }
    297     };
    298 
    299 
    300   /// Partial specialization for pointer types.
    301   template<typename _Tp>
    302     struct atomic<_Tp*>
    303     {
    304       typedef _Tp* 			__pointer_type;
    305       typedef __atomic_base<_Tp*>	__base_type;
    306       __base_type			_M_b;
    307 
    308       atomic() noexcept = default;
    309       ~atomic() noexcept = default;
    310       atomic(const atomic&) = delete;
    311       atomic& operator=(const atomic&) = delete;
    312       atomic& operator=(const atomic&) volatile = delete;
    313 
    314       constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
    315 
    316       operator __pointer_type() const noexcept
    317       { return __pointer_type(_M_b); }
    318 
    319       operator __pointer_type() const volatile noexcept
    320       { return __pointer_type(_M_b); }
    321 
    322       __pointer_type
    323       operator=(__pointer_type __p) noexcept
    324       { return _M_b.operator=(__p); }
    325 
    326       __pointer_type
    327       operator=(__pointer_type __p) volatile noexcept
    328       { return _M_b.operator=(__p); }
    329 
    330       __pointer_type
    331       operator++(int) noexcept
    332       { return _M_b++; }
    333 
    334       __pointer_type
    335       operator++(int) volatile noexcept
    336       { return _M_b++; }
    337 
    338       __pointer_type
    339       operator--(int) noexcept
    340       { return _M_b--; }
    341 
    342       __pointer_type
    343       operator--(int) volatile noexcept
    344       { return _M_b--; }
    345 
    346       __pointer_type
    347       operator++() noexcept
    348       { return ++_M_b; }
    349 
    350       __pointer_type
    351       operator++() volatile noexcept
    352       { return ++_M_b; }
    353 
    354       __pointer_type
    355       operator--() noexcept
    356       { return --_M_b; }
    357 
    358       __pointer_type
    359       operator--() volatile noexcept
    360       { return --_M_b; }
    361 
    362       __pointer_type
    363       operator+=(ptrdiff_t __d) noexcept
    364       { return _M_b.operator+=(__d); }
    365 
    366       __pointer_type
    367       operator+=(ptrdiff_t __d) volatile noexcept
    368       { return _M_b.operator+=(__d); }
    369 
    370       __pointer_type
    371       operator-=(ptrdiff_t __d) noexcept
    372       { return _M_b.operator-=(__d); }
    373 
    374       __pointer_type
    375       operator-=(ptrdiff_t __d) volatile noexcept
    376       { return _M_b.operator-=(__d); }
    377 
    378       bool
    379       is_lock_free() const noexcept
    380       { return _M_b.is_lock_free(); }
    381 
    382       bool
    383       is_lock_free() const volatile noexcept
    384       { return _M_b.is_lock_free(); }
    385 
    386       void
    387       store(__pointer_type __p,
    388 	    memory_order __m = memory_order_seq_cst) noexcept
    389       { return _M_b.store(__p, __m); }
    390 
    391       void
    392       store(__pointer_type __p,
    393 	    memory_order __m = memory_order_seq_cst) volatile noexcept
    394       { return _M_b.store(__p, __m); }
    395 
    396       __pointer_type
    397       load(memory_order __m = memory_order_seq_cst) const noexcept
    398       { return _M_b.load(__m); }
    399 
    400       __pointer_type
    401       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    402       { return _M_b.load(__m); }
    403 
    404       __pointer_type
    405       exchange(__pointer_type __p,
    406 	       memory_order __m = memory_order_seq_cst) noexcept
    407       { return _M_b.exchange(__p, __m); }
    408 
    409       __pointer_type
    410       exchange(__pointer_type __p,
    411 	       memory_order __m = memory_order_seq_cst) volatile noexcept
    412       { return _M_b.exchange(__p, __m); }
    413 
    414       bool
    415       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    416 			    memory_order __m1, memory_order __m2) noexcept
    417       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    418 
    419       bool
    420       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    421 			    memory_order __m1,
    422 			    memory_order __m2) volatile noexcept
    423       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    424 
    425       bool
    426       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    427 			    memory_order __m = memory_order_seq_cst) noexcept
    428       {
    429 	return compare_exchange_weak(__p1, __p2, __m,
    430 				     __cmpexch_failure_order(__m));
    431       }
    432 
    433       bool
    434       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    435 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    436       {
    437 	return compare_exchange_weak(__p1, __p2, __m,
    438 				     __cmpexch_failure_order(__m));
    439       }
    440 
    441       bool
    442       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    443 			      memory_order __m1, memory_order __m2) noexcept
    444       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    445 
    446       bool
    447       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    448 			      memory_order __m1,
    449 			      memory_order __m2) volatile noexcept
    450       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    451 
    452       bool
    453       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    454 			      memory_order __m = memory_order_seq_cst) noexcept
    455       {
    456 	return _M_b.compare_exchange_strong(__p1, __p2, __m,
    457 					    __cmpexch_failure_order(__m));
    458       }
    459 
    460       bool
    461       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    462 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    463       {
    464 	return _M_b.compare_exchange_strong(__p1, __p2, __m,
    465 					    __cmpexch_failure_order(__m));
    466       }
    467 
    468       __pointer_type
    469       fetch_add(ptrdiff_t __d,
    470 		memory_order __m = memory_order_seq_cst) noexcept
    471       { return _M_b.fetch_add(__d, __m); }
    472 
    473       __pointer_type
    474       fetch_add(ptrdiff_t __d,
    475 		memory_order __m = memory_order_seq_cst) volatile noexcept
    476       { return _M_b.fetch_add(__d, __m); }
    477 
    478       __pointer_type
    479       fetch_sub(ptrdiff_t __d,
    480 		memory_order __m = memory_order_seq_cst) noexcept
    481       { return _M_b.fetch_sub(__d, __m); }
    482 
    483       __pointer_type
    484       fetch_sub(ptrdiff_t __d,
    485 		memory_order __m = memory_order_seq_cst) volatile noexcept
    486       { return _M_b.fetch_sub(__d, __m); }
    487     };
    488 
    489 
    490   /// Explicit specialization for bool.
    491   template<>
    492     struct atomic<bool> : public atomic_bool
    493     {
    494       typedef bool 			__integral_type;
    495       typedef atomic_bool 		__base_type;
    496 
    497       atomic() noexcept = default;
    498       ~atomic() noexcept = default;
    499       atomic(const atomic&) = delete;
    500       atomic& operator=(const atomic&) = delete;
    501       atomic& operator=(const atomic&) volatile = delete;
    502 
    503       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    504 
    505       using __base_type::operator __integral_type;
    506       using __base_type::operator=;
    507     };
    508 
    509   /// Explicit specialization for char.
    510   template<>
    511     struct atomic<char> : public atomic_char
    512     {
    513       typedef char 			__integral_type;
    514       typedef atomic_char 		__base_type;
    515 
    516       atomic() noexcept = default;
    517       ~atomic() noexcept = default;
    518       atomic(const atomic&) = delete;
    519       atomic& operator=(const atomic&) = delete;
    520       atomic& operator=(const atomic&) volatile = delete;
    521 
    522       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    523 
    524       using __base_type::operator __integral_type;
    525       using __base_type::operator=;
    526     };
    527 
    528   /// Explicit specialization for signed char.
    529   template<>
    530     struct atomic<signed char> : public atomic_schar
    531     {
    532       typedef signed char 		__integral_type;
    533       typedef atomic_schar 		__base_type;
    534 
    535       atomic() noexcept= default;
    536       ~atomic() noexcept = default;
    537       atomic(const atomic&) = delete;
    538       atomic& operator=(const atomic&) = delete;
    539       atomic& operator=(const atomic&) volatile = delete;
    540 
    541       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    542 
    543       using __base_type::operator __integral_type;
    544       using __base_type::operator=;
    545     };
    546 
    547   /// Explicit specialization for unsigned char.
    548   template<>
    549     struct atomic<unsigned char> : public atomic_uchar
    550     {
    551       typedef unsigned char 		__integral_type;
    552       typedef atomic_uchar 		__base_type;
    553 
    554       atomic() noexcept= default;
    555       ~atomic() noexcept = default;
    556       atomic(const atomic&) = delete;
    557       atomic& operator=(const atomic&) = delete;
    558       atomic& operator=(const atomic&) volatile = delete;
    559 
    560       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    561 
    562       using __base_type::operator __integral_type;
    563       using __base_type::operator=;
    564     };
    565 
    566   /// Explicit specialization for short.
    567   template<>
    568     struct atomic<short> : public atomic_short
    569     {
    570       typedef short 			__integral_type;
    571       typedef atomic_short 		__base_type;
    572 
    573       atomic() noexcept = default;
    574       ~atomic() noexcept = default;
    575       atomic(const atomic&) = delete;
    576       atomic& operator=(const atomic&) = delete;
    577       atomic& operator=(const atomic&) volatile = delete;
    578 
    579       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    580 
    581       using __base_type::operator __integral_type;
    582       using __base_type::operator=;
    583     };
    584 
    585   /// Explicit specialization for unsigned short.
    586   template<>
    587     struct atomic<unsigned short> : public atomic_ushort
    588     {
    589       typedef unsigned short 	      	__integral_type;
    590       typedef atomic_ushort 		__base_type;
    591 
    592       atomic() noexcept = default;
    593       ~atomic() noexcept = default;
    594       atomic(const atomic&) = delete;
    595       atomic& operator=(const atomic&) = delete;
    596       atomic& operator=(const atomic&) volatile = delete;
    597 
    598       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    599 
    600       using __base_type::operator __integral_type;
    601       using __base_type::operator=;
    602     };
    603 
    604   /// Explicit specialization for int.
    605   template<>
    606     struct atomic<int> : atomic_int
    607     {
    608       typedef int 			__integral_type;
    609       typedef atomic_int 		__base_type;
    610 
    611       atomic() noexcept = default;
    612       ~atomic() noexcept = default;
    613       atomic(const atomic&) = delete;
    614       atomic& operator=(const atomic&) = delete;
    615       atomic& operator=(const atomic&) volatile = delete;
    616 
    617       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    618 
    619       using __base_type::operator __integral_type;
    620       using __base_type::operator=;
    621     };
    622 
    623   /// Explicit specialization for unsigned int.
    624   template<>
    625     struct atomic<unsigned int> : public atomic_uint
    626     {
    627       typedef unsigned int		__integral_type;
    628       typedef atomic_uint 		__base_type;
    629 
    630       atomic() noexcept = default;
    631       ~atomic() noexcept = default;
    632       atomic(const atomic&) = delete;
    633       atomic& operator=(const atomic&) = delete;
    634       atomic& operator=(const atomic&) volatile = delete;
    635 
    636       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    637 
    638       using __base_type::operator __integral_type;
    639       using __base_type::operator=;
    640     };
    641 
    642   /// Explicit specialization for long.
    643   template<>
    644     struct atomic<long> : public atomic_long
    645     {
    646       typedef long 			__integral_type;
    647       typedef atomic_long 		__base_type;
    648 
    649       atomic() noexcept = default;
    650       ~atomic() noexcept = default;
    651       atomic(const atomic&) = delete;
    652       atomic& operator=(const atomic&) = delete;
    653       atomic& operator=(const atomic&) volatile = delete;
    654 
    655       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    656 
    657       using __base_type::operator __integral_type;
    658       using __base_type::operator=;
    659     };
    660 
    661   /// Explicit specialization for unsigned long.
    662   template<>
    663     struct atomic<unsigned long> : public atomic_ulong
    664     {
    665       typedef unsigned long 		__integral_type;
    666       typedef atomic_ulong 		__base_type;
    667 
    668       atomic() noexcept = default;
    669       ~atomic() noexcept = default;
    670       atomic(const atomic&) = delete;
    671       atomic& operator=(const atomic&) = delete;
    672       atomic& operator=(const atomic&) volatile = delete;
    673 
    674       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    675 
    676       using __base_type::operator __integral_type;
    677       using __base_type::operator=;
    678     };
    679 
    680   /// Explicit specialization for long long.
    681   template<>
    682     struct atomic<long long> : public atomic_llong
    683     {
    684       typedef long long 		__integral_type;
    685       typedef atomic_llong 		__base_type;
    686 
    687       atomic() noexcept = default;
    688       ~atomic() noexcept = default;
    689       atomic(const atomic&) = delete;
    690       atomic& operator=(const atomic&) = delete;
    691       atomic& operator=(const atomic&) volatile = delete;
    692 
    693       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    694 
    695       using __base_type::operator __integral_type;
    696       using __base_type::operator=;
    697     };
    698 
    699   /// Explicit specialization for unsigned long long.
    700   template<>
    701     struct atomic<unsigned long long> : public atomic_ullong
    702     {
    703       typedef unsigned long long       	__integral_type;
    704       typedef atomic_ullong 		__base_type;
    705 
    706       atomic() noexcept = default;
    707       ~atomic() noexcept = default;
    708       atomic(const atomic&) = delete;
    709       atomic& operator=(const atomic&) = delete;
    710       atomic& operator=(const atomic&) volatile = delete;
    711 
    712       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    713 
    714       using __base_type::operator __integral_type;
    715       using __base_type::operator=;
    716     };
    717 
    718   /// Explicit specialization for wchar_t.
    719   template<>
    720     struct atomic<wchar_t> : public atomic_wchar_t
    721     {
    722       typedef wchar_t 			__integral_type;
    723       typedef atomic_wchar_t 		__base_type;
    724 
    725       atomic() noexcept = default;
    726       ~atomic() noexcept = default;
    727       atomic(const atomic&) = delete;
    728       atomic& operator=(const atomic&) = delete;
    729       atomic& operator=(const atomic&) volatile = delete;
    730 
    731       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    732 
    733       using __base_type::operator __integral_type;
    734       using __base_type::operator=;
    735     };
    736 
    737   /// Explicit specialization for char16_t.
    738   template<>
    739     struct atomic<char16_t> : public atomic_char16_t
    740     {
    741       typedef char16_t 			__integral_type;
    742       typedef atomic_char16_t 		__base_type;
    743 
    744       atomic() noexcept = default;
    745       ~atomic() noexcept = default;
    746       atomic(const atomic&) = delete;
    747       atomic& operator=(const atomic&) = delete;
    748       atomic& operator=(const atomic&) volatile = delete;
    749 
    750       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    751 
    752       using __base_type::operator __integral_type;
    753       using __base_type::operator=;
    754     };
    755 
    756   /// Explicit specialization for char32_t.
    757   template<>
    758     struct atomic<char32_t> : public atomic_char32_t
    759     {
    760       typedef char32_t 			__integral_type;
    761       typedef atomic_char32_t 		__base_type;
    762 
    763       atomic() noexcept = default;
    764       ~atomic() noexcept = default;
    765       atomic(const atomic&) = delete;
    766       atomic& operator=(const atomic&) = delete;
    767       atomic& operator=(const atomic&) volatile = delete;
    768 
    769       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    770 
    771       using __base_type::operator __integral_type;
    772       using __base_type::operator=;
    773     };
    774 
    775 
    776   // Function definitions, atomic_flag operations.
    777   inline bool
    778   atomic_flag_test_and_set_explicit(atomic_flag* __a,
    779 				    memory_order __m) noexcept
    780   { return __a->test_and_set(__m); }
    781 
    782   inline bool
    783   atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
    784 				    memory_order __m) noexcept
    785   { return __a->test_and_set(__m); }
    786 
    787   inline void
    788   atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
    789   { __a->clear(__m); }
    790 
    791   inline void
    792   atomic_flag_clear_explicit(volatile atomic_flag* __a,
    793 			     memory_order __m) noexcept
    794   { __a->clear(__m); }
    795 
    796   inline bool
    797   atomic_flag_test_and_set(atomic_flag* __a) noexcept
    798   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
    799 
    800   inline bool
    801   atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
    802   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
    803 
    804   inline void
    805   atomic_flag_clear(atomic_flag* __a) noexcept
    806   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
    807 
    808   inline void
    809   atomic_flag_clear(volatile atomic_flag* __a) noexcept
    810   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
    811 
    812 
    813   // Function templates generally applicable to atomic types.
    814   template<typename _ITp>
    815     inline bool
    816     atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
    817     { return __a->is_lock_free(); }
    818 
    819   template<typename _ITp>
    820     inline bool
    821     atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
    822     { return __a->is_lock_free(); }
    823 
    824   template<typename _ITp>
    825     inline void
    826     atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
    827 
    828   template<typename _ITp>
    829     inline void
    830     atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
    831 
    832   template<typename _ITp>
    833     inline void
    834     atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
    835 			  memory_order __m) noexcept
    836     { __a->store(__i, __m); }
    837 
    838   template<typename _ITp>
    839     inline void
    840     atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
    841 			  memory_order __m) noexcept
    842     { __a->store(__i, __m); }
    843 
    844   template<typename _ITp>
    845     inline _ITp
    846     atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
    847     { return __a->load(__m); }
    848 
    849   template<typename _ITp>
    850     inline _ITp
    851     atomic_load_explicit(const volatile atomic<_ITp>* __a,
    852 			 memory_order __m) noexcept
    853     { return __a->load(__m); }
    854 
    855   template<typename _ITp>
    856     inline _ITp
    857     atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
    858 			     memory_order __m) noexcept
    859     { return __a->exchange(__i, __m); }
    860 
    861   template<typename _ITp>
    862     inline _ITp
    863     atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
    864 			     memory_order __m) noexcept
    865     { return __a->exchange(__i, __m); }
    866 
    867   template<typename _ITp>
    868     inline bool
    869     atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
    870 					  _ITp* __i1, _ITp __i2,
    871 					  memory_order __m1,
    872 					  memory_order __m2) noexcept
    873     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
    874 
    875   template<typename _ITp>
    876     inline bool
    877     atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
    878 					  _ITp* __i1, _ITp __i2,
    879 					  memory_order __m1,
    880 					  memory_order __m2) noexcept
    881     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
    882 
    883   template<typename _ITp>
    884     inline bool
    885     atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
    886 					    _ITp* __i1, _ITp __i2,
    887 					    memory_order __m1,
    888 					    memory_order __m2) noexcept
    889     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
    890 
    891   template<typename _ITp>
    892     inline bool
    893     atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
    894 					    _ITp* __i1, _ITp __i2,
    895 					    memory_order __m1,
    896 					    memory_order __m2) noexcept
    897     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
    898 
    899 
    900   template<typename _ITp>
    901     inline void
    902     atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
    903     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
    904 
    905   template<typename _ITp>
    906     inline void
    907     atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
    908     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
    909 
    910   template<typename _ITp>
    911     inline _ITp
    912     atomic_load(const atomic<_ITp>* __a) noexcept
    913     { return atomic_load_explicit(__a, memory_order_seq_cst); }
    914 
    915   template<typename _ITp>
    916     inline _ITp
    917     atomic_load(const volatile atomic<_ITp>* __a) noexcept
    918     { return atomic_load_explicit(__a, memory_order_seq_cst); }
    919 
    920   template<typename _ITp>
    921     inline _ITp
    922     atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
    923     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
    924 
    925   template<typename _ITp>
    926     inline _ITp
    927     atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
    928     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
    929 
    930   template<typename _ITp>
    931     inline bool
    932     atomic_compare_exchange_weak(atomic<_ITp>* __a,
    933 				 _ITp* __i1, _ITp __i2) noexcept
    934     {
    935       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
    936 						   memory_order_seq_cst,
    937 						   memory_order_seq_cst);
    938     }
    939 
    940   template<typename _ITp>
    941     inline bool
    942     atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
    943 				 _ITp* __i1, _ITp __i2) noexcept
    944     {
    945       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
    946 						   memory_order_seq_cst,
    947 						   memory_order_seq_cst);
    948     }
    949 
    950   template<typename _ITp>
    951     inline bool
    952     atomic_compare_exchange_strong(atomic<_ITp>* __a,
    953 				   _ITp* __i1, _ITp __i2) noexcept
    954     {
    955       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
    956 						     memory_order_seq_cst,
    957 						     memory_order_seq_cst);
    958     }
    959 
    960   template<typename _ITp>
    961     inline bool
    962     atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
    963 				   _ITp* __i1, _ITp __i2) noexcept
    964     {
    965       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
    966 						     memory_order_seq_cst,
    967 						     memory_order_seq_cst);
    968     }
    969 
    970   // Function templates for atomic_integral operations only, using
    971   // __atomic_base. Template argument should be constricted to
    972   // intergral types as specified in the standard, excluding address
    973   // types.
    974   template<typename _ITp>
    975     inline _ITp
    976     atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
    977 			      memory_order __m) noexcept
    978     { return __a->fetch_add(__i, __m); }
    979 
    980   template<typename _ITp>
    981     inline _ITp
    982     atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
    983 			      memory_order __m) noexcept
    984     { return __a->fetch_add(__i, __m); }
    985 
    986   template<typename _ITp>
    987     inline _ITp
    988     atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
    989 			      memory_order __m) noexcept
    990     { return __a->fetch_sub(__i, __m); }
    991 
    992   template<typename _ITp>
    993     inline _ITp
    994     atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
    995 			      memory_order __m) noexcept
    996     { return __a->fetch_sub(__i, __m); }
    997 
    998   template<typename _ITp>
    999     inline _ITp
   1000     atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1001 			      memory_order __m) noexcept
   1002     { return __a->fetch_and(__i, __m); }
   1003 
   1004   template<typename _ITp>
   1005     inline _ITp
   1006     atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1007 			      memory_order __m) noexcept
   1008     { return __a->fetch_and(__i, __m); }
   1009 
   1010   template<typename _ITp>
   1011     inline _ITp
   1012     atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1013 			     memory_order __m) noexcept
   1014     { return __a->fetch_or(__i, __m); }
   1015 
   1016   template<typename _ITp>
   1017     inline _ITp
   1018     atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1019 			     memory_order __m) noexcept
   1020     { return __a->fetch_or(__i, __m); }
   1021 
   1022   template<typename _ITp>
   1023     inline _ITp
   1024     atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1025 			      memory_order __m) noexcept
   1026     { return __a->fetch_xor(__i, __m); }
   1027 
   1028   template<typename _ITp>
   1029     inline _ITp
   1030     atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1031 			      memory_order __m) noexcept
   1032     { return __a->fetch_xor(__i, __m); }
   1033 
   1034   template<typename _ITp>
   1035     inline _ITp
   1036     atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1037     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
   1038 
   1039   template<typename _ITp>
   1040     inline _ITp
   1041     atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1042     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
   1043 
   1044   template<typename _ITp>
   1045     inline _ITp
   1046     atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1047     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
   1048 
   1049   template<typename _ITp>
   1050     inline _ITp
   1051     atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1052     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
   1053 
   1054   template<typename _ITp>
   1055     inline _ITp
   1056     atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1057     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
   1058 
   1059   template<typename _ITp>
   1060     inline _ITp
   1061     atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1062     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
   1063 
   1064   template<typename _ITp>
   1065     inline _ITp
   1066     atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1067     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
   1068 
   1069   template<typename _ITp>
   1070     inline _ITp
   1071     atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1072     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
   1073 
   1074   template<typename _ITp>
   1075     inline _ITp
   1076     atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1077     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
   1078 
   1079   template<typename _ITp>
   1080     inline _ITp
   1081     atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1082     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
   1083 
   1084 
   1085   // Partial specializations for pointers.
   1086   template<typename _ITp>
   1087     inline _ITp*
   1088     atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
   1089 			      memory_order __m) noexcept
   1090     { return __a->fetch_add(__d, __m); }
   1091 
   1092   template<typename _ITp>
   1093     inline _ITp*
   1094     atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
   1095 			      memory_order __m) noexcept
   1096     { return __a->fetch_add(__d, __m); }
   1097 
   1098   template<typename _ITp>
   1099     inline _ITp*
   1100     atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1101     { return __a->fetch_add(__d); }
   1102 
   1103   template<typename _ITp>
   1104     inline _ITp*
   1105     atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1106     { return __a->fetch_add(__d); }
   1107 
   1108   template<typename _ITp>
   1109     inline _ITp*
   1110     atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
   1111 			      ptrdiff_t __d, memory_order __m) noexcept
   1112     { return __a->fetch_sub(__d, __m); }
   1113 
   1114   template<typename _ITp>
   1115     inline _ITp*
   1116     atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
   1117 			      memory_order __m) noexcept
   1118     { return __a->fetch_sub(__d, __m); }
   1119 
   1120   template<typename _ITp>
   1121     inline _ITp*
   1122     atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1123     { return __a->fetch_sub(__d); }
   1124 
   1125   template<typename _ITp>
   1126     inline _ITp*
   1127     atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1128     { return __a->fetch_sub(__d); }
   1129   // @} group atomics
   1130 
   1131 _GLIBCXX_END_NAMESPACE_VERSION
   1132 } // namespace
   1133 
   1134 #endif
   1135