Home | History | Annotate | Download | only in include
      1 // -*- C++ -*- header.
      2 
      3 // Copyright (C) 2008-2014 Free Software Foundation, Inc.
      4 //
      5 // This file is part of the GNU ISO C++ Library.  This library is free
      6 // software; you can redistribute it and/or modify it under the
      7 // terms of the GNU General Public License as published by the
      8 // Free Software Foundation; either version 3, or (at your option)
      9 // any later version.
     10 
     11 // This library is distributed in the hope that it will be useful,
     12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
     13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     14 // GNU General Public License for more details.
     15 
     16 // Under Section 7 of GPL version 3, you are granted additional
     17 // permissions described in the GCC Runtime Library Exception, version
     18 // 3.1, as published by the Free Software Foundation.
     19 
     20 // You should have received a copy of the GNU General Public License and
     21 // a copy of the GCC Runtime Library Exception along with this program;
     22 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
     23 // <http://www.gnu.org/licenses/>.
     24 
     25 /** @file include/atomic
     26  *  This is a Standard C++ Library header.
     27  */
     28 
     29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
     30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
     31 
     32 #ifndef _GLIBCXX_ATOMIC
     33 #define _GLIBCXX_ATOMIC 1
     34 
     35 #pragma GCC system_header
     36 
     37 #if __cplusplus < 201103L
     38 # include <bits/c++0x_warning.h>
     39 #endif
     40 
     41 #include <bits/atomic_base.h>
     42 
     43 namespace std _GLIBCXX_VISIBILITY(default)
     44 {
     45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
     46 
     47   /**
     48    * @addtogroup atomics
     49    * @{
     50    */
     51 
     52   /// atomic_bool
     53   // NB: No operators or fetch-operations for this type.
     54   struct atomic_bool
     55   {
     56   private:
     57     __atomic_base<bool>	_M_base;
     58 
     59   public:
     60     atomic_bool() noexcept = default;
     61     ~atomic_bool() noexcept = default;
     62     atomic_bool(const atomic_bool&) = delete;
     63     atomic_bool& operator=(const atomic_bool&) = delete;
     64     atomic_bool& operator=(const atomic_bool&) volatile = delete;
     65 
     66     constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
     67 
     68     bool
     69     operator=(bool __i) noexcept
     70     { return _M_base.operator=(__i); }
     71 
     72     bool
     73     operator=(bool __i) volatile noexcept
     74     { return _M_base.operator=(__i); }
     75 
     76     operator bool() const noexcept
     77     { return _M_base.load(); }
     78 
     79     operator bool() const volatile noexcept
     80     { return _M_base.load(); }
     81 
     82     bool
     83     is_lock_free() const noexcept { return _M_base.is_lock_free(); }
     84 
     85     bool
     86     is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
     87 
     88     void
     89     store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
     90     { _M_base.store(__i, __m); }
     91 
     92     void
     93     store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
     94     { _M_base.store(__i, __m); }
     95 
     96     bool
     97     load(memory_order __m = memory_order_seq_cst) const noexcept
     98     { return _M_base.load(__m); }
     99 
    100     bool
    101     load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    102     { return _M_base.load(__m); }
    103 
    104     bool
    105     exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
    106     { return _M_base.exchange(__i, __m); }
    107 
    108     bool
    109     exchange(bool __i,
    110 	     memory_order __m = memory_order_seq_cst) volatile noexcept
    111     { return _M_base.exchange(__i, __m); }
    112 
    113     bool
    114     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
    115 			  memory_order __m2) noexcept
    116     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
    117 
    118     bool
    119     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
    120 			  memory_order __m2) volatile noexcept
    121     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
    122 
    123     bool
    124     compare_exchange_weak(bool& __i1, bool __i2,
    125 			  memory_order __m = memory_order_seq_cst) noexcept
    126     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
    127 
    128     bool
    129     compare_exchange_weak(bool& __i1, bool __i2,
    130 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    131     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
    132 
    133     bool
    134     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
    135 			    memory_order __m2) noexcept
    136     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
    137 
    138     bool
    139     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
    140 			    memory_order __m2) volatile noexcept
    141     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
    142 
    143     bool
    144     compare_exchange_strong(bool& __i1, bool __i2,
    145 			    memory_order __m = memory_order_seq_cst) noexcept
    146     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
    147 
    148     bool
    149     compare_exchange_strong(bool& __i1, bool __i2,
    150 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    151     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
    152   };
    153 
    154 
    155   /**
    156    *  @brief Generic atomic type, primary class template.
    157    *
    158    *  @tparam _Tp  Type to be made atomic, must be trivally copyable.
    159    */
    160   template<typename _Tp>
    161     struct atomic
    162     {
    163     private:
    164       _Tp _M_i;
    165 
    166     public:
    167       atomic() noexcept = default;
    168       ~atomic() noexcept = default;
    169       atomic(const atomic&) = delete;
    170       atomic& operator=(const atomic&) = delete;
    171       atomic& operator=(const atomic&) volatile = delete;
    172 
    173       constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
    174 
    175       operator _Tp() const noexcept
    176       { return load(); }
    177 
    178       operator _Tp() const volatile noexcept
    179       { return load(); }
    180 
    181       _Tp
    182       operator=(_Tp __i) noexcept 
    183       { store(__i); return __i; }
    184 
    185       _Tp
    186       operator=(_Tp __i) volatile noexcept 
    187       { store(__i); return __i; }
    188 
    189       bool
    190       is_lock_free() const noexcept
    191       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
    192 
    193       bool
    194       is_lock_free() const volatile noexcept
    195       { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
    196 
    197       void
    198       store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
    199       { __atomic_store(&_M_i, &__i, _m); }
    200 
    201       void
    202       store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
    203       { __atomic_store(&_M_i, &__i, _m); }
    204 
    205       _Tp
    206       load(memory_order _m = memory_order_seq_cst) const noexcept
    207       { 
    208         _Tp tmp;
    209 	__atomic_load(&_M_i, &tmp, _m); 
    210 	return tmp;
    211       }
    212 
    213       _Tp
    214       load(memory_order _m = memory_order_seq_cst) const volatile noexcept
    215       { 
    216         _Tp tmp;
    217 	__atomic_load(&_M_i, &tmp, _m); 
    218 	return tmp;
    219       }
    220 
    221       _Tp
    222       exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
    223       { 
    224         _Tp tmp;
    225 	__atomic_exchange(&_M_i, &__i, &tmp, _m); 
    226 	return tmp;
    227       }
    228 
    229       _Tp
    230       exchange(_Tp __i, 
    231 	       memory_order _m = memory_order_seq_cst) volatile noexcept
    232       { 
    233         _Tp tmp;
    234 	__atomic_exchange(&_M_i, &__i, &tmp, _m); 
    235 	return tmp;
    236       }
    237 
    238       bool
    239       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 
    240 			    memory_order __f) noexcept
    241       {
    242 	return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 
    243       }
    244 
    245       bool
    246       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 
    247 			    memory_order __f) volatile noexcept
    248       {
    249 	return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 
    250       }
    251 
    252       bool
    253       compare_exchange_weak(_Tp& __e, _Tp __i,
    254 			    memory_order __m = memory_order_seq_cst) noexcept
    255       { return compare_exchange_weak(__e, __i, __m,
    256                                      __cmpexch_failure_order(__m)); }
    257 
    258       bool
    259       compare_exchange_weak(_Tp& __e, _Tp __i,
    260 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    261       { return compare_exchange_weak(__e, __i, __m,
    262                                      __cmpexch_failure_order(__m)); }
    263 
    264       bool
    265       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 
    266 			      memory_order __f) noexcept
    267       {
    268 	return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 
    269       }
    270 
    271       bool
    272       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 
    273 			      memory_order __f) volatile noexcept
    274       {
    275 	return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 
    276       }
    277 
    278       bool
    279       compare_exchange_strong(_Tp& __e, _Tp __i,
    280 			       memory_order __m = memory_order_seq_cst) noexcept
    281       { return compare_exchange_strong(__e, __i, __m,
    282                                        __cmpexch_failure_order(__m)); }
    283 
    284       bool
    285       compare_exchange_strong(_Tp& __e, _Tp __i,
    286 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    287       { return compare_exchange_strong(__e, __i, __m,
    288                                        __cmpexch_failure_order(__m)); }
    289     };
    290 
    291 
    292   /// Partial specialization for pointer types.
    293   template<typename _Tp>
    294     struct atomic<_Tp*>
    295     {
    296       typedef _Tp* 			__pointer_type;
    297       typedef __atomic_base<_Tp*>	__base_type;
    298       __base_type			_M_b;
    299 
    300       atomic() noexcept = default;
    301       ~atomic() noexcept = default;
    302       atomic(const atomic&) = delete;
    303       atomic& operator=(const atomic&) = delete;
    304       atomic& operator=(const atomic&) volatile = delete;
    305 
    306       constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
    307 
    308       operator __pointer_type() const noexcept
    309       { return __pointer_type(_M_b); }
    310 
    311       operator __pointer_type() const volatile noexcept
    312       { return __pointer_type(_M_b); }
    313 
    314       __pointer_type
    315       operator=(__pointer_type __p) noexcept
    316       { return _M_b.operator=(__p); }
    317 
    318       __pointer_type
    319       operator=(__pointer_type __p) volatile noexcept
    320       { return _M_b.operator=(__p); }
    321 
    322       __pointer_type
    323       operator++(int) noexcept
    324       { return _M_b++; }
    325 
    326       __pointer_type
    327       operator++(int) volatile noexcept
    328       { return _M_b++; }
    329 
    330       __pointer_type
    331       operator--(int) noexcept
    332       { return _M_b--; }
    333 
    334       __pointer_type
    335       operator--(int) volatile noexcept
    336       { return _M_b--; }
    337 
    338       __pointer_type
    339       operator++() noexcept
    340       { return ++_M_b; }
    341 
    342       __pointer_type
    343       operator++() volatile noexcept
    344       { return ++_M_b; }
    345 
    346       __pointer_type
    347       operator--() noexcept
    348       { return --_M_b; }
    349 
    350       __pointer_type
    351       operator--() volatile noexcept
    352       { return --_M_b; }
    353 
    354       __pointer_type
    355       operator+=(ptrdiff_t __d) noexcept
    356       { return _M_b.operator+=(__d); }
    357 
    358       __pointer_type
    359       operator+=(ptrdiff_t __d) volatile noexcept
    360       { return _M_b.operator+=(__d); }
    361 
    362       __pointer_type
    363       operator-=(ptrdiff_t __d) noexcept
    364       { return _M_b.operator-=(__d); }
    365 
    366       __pointer_type
    367       operator-=(ptrdiff_t __d) volatile noexcept
    368       { return _M_b.operator-=(__d); }
    369 
    370       bool
    371       is_lock_free() const noexcept
    372       { return _M_b.is_lock_free(); }
    373 
    374       bool
    375       is_lock_free() const volatile noexcept
    376       { return _M_b.is_lock_free(); }
    377 
    378       void
    379       store(__pointer_type __p,
    380 	    memory_order __m = memory_order_seq_cst) noexcept
    381       { return _M_b.store(__p, __m); }
    382 
    383       void
    384       store(__pointer_type __p,
    385 	    memory_order __m = memory_order_seq_cst) volatile noexcept
    386       { return _M_b.store(__p, __m); }
    387 
    388       __pointer_type
    389       load(memory_order __m = memory_order_seq_cst) const noexcept
    390       { return _M_b.load(__m); }
    391 
    392       __pointer_type
    393       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    394       { return _M_b.load(__m); }
    395 
    396       __pointer_type
    397       exchange(__pointer_type __p,
    398 	       memory_order __m = memory_order_seq_cst) noexcept
    399       { return _M_b.exchange(__p, __m); }
    400 
    401       __pointer_type
    402       exchange(__pointer_type __p,
    403 	       memory_order __m = memory_order_seq_cst) volatile noexcept
    404       { return _M_b.exchange(__p, __m); }
    405 
    406       bool
    407       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    408 			    memory_order __m1, memory_order __m2) noexcept
    409       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    410 
    411       bool
    412       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    413 			    memory_order __m1,
    414 			    memory_order __m2) volatile noexcept
    415       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    416 
    417       bool
    418       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    419 			    memory_order __m = memory_order_seq_cst) noexcept
    420       {
    421 	return compare_exchange_weak(__p1, __p2, __m,
    422 				     __cmpexch_failure_order(__m));
    423       }
    424 
    425       bool
    426       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    427 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    428       {
    429 	return compare_exchange_weak(__p1, __p2, __m,
    430 				     __cmpexch_failure_order(__m));
    431       }
    432 
    433       bool
    434       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    435 			      memory_order __m1, memory_order __m2) noexcept
    436       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    437 
    438       bool
    439       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    440 			      memory_order __m1,
    441 			      memory_order __m2) volatile noexcept
    442       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    443 
    444       bool
    445       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    446 			      memory_order __m = memory_order_seq_cst) noexcept
    447       {
    448 	return _M_b.compare_exchange_strong(__p1, __p2, __m,
    449 					    __cmpexch_failure_order(__m));
    450       }
    451 
    452       bool
    453       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    454 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    455       {
    456 	return _M_b.compare_exchange_strong(__p1, __p2, __m,
    457 					    __cmpexch_failure_order(__m));
    458       }
    459 
    460       __pointer_type
    461       fetch_add(ptrdiff_t __d,
    462 		memory_order __m = memory_order_seq_cst) noexcept
    463       { return _M_b.fetch_add(__d, __m); }
    464 
    465       __pointer_type
    466       fetch_add(ptrdiff_t __d,
    467 		memory_order __m = memory_order_seq_cst) volatile noexcept
    468       { return _M_b.fetch_add(__d, __m); }
    469 
    470       __pointer_type
    471       fetch_sub(ptrdiff_t __d,
    472 		memory_order __m = memory_order_seq_cst) noexcept
    473       { return _M_b.fetch_sub(__d, __m); }
    474 
    475       __pointer_type
    476       fetch_sub(ptrdiff_t __d,
    477 		memory_order __m = memory_order_seq_cst) volatile noexcept
    478       { return _M_b.fetch_sub(__d, __m); }
    479     };
    480 
    481 
    482   /// Explicit specialization for bool.
    483   template<>
    484     struct atomic<bool> : public atomic_bool
    485     {
    486       typedef bool 			__integral_type;
    487       typedef atomic_bool 		__base_type;
    488 
    489       atomic() noexcept = default;
    490       ~atomic() noexcept = default;
    491       atomic(const atomic&) = delete;
    492       atomic& operator=(const atomic&) = delete;
    493       atomic& operator=(const atomic&) volatile = delete;
    494 
    495       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    496 
    497       using __base_type::operator __integral_type;
    498       using __base_type::operator=;
    499     };
    500 
    501   /// Explicit specialization for char.
    502   template<>
    503     struct atomic<char> : public atomic_char
    504     {
    505       typedef char 			__integral_type;
    506       typedef atomic_char 		__base_type;
    507 
    508       atomic() noexcept = default;
    509       ~atomic() noexcept = default;
    510       atomic(const atomic&) = delete;
    511       atomic& operator=(const atomic&) = delete;
    512       atomic& operator=(const atomic&) volatile = delete;
    513 
    514       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    515 
    516       using __base_type::operator __integral_type;
    517       using __base_type::operator=;
    518     };
    519 
    520   /// Explicit specialization for signed char.
    521   template<>
    522     struct atomic<signed char> : public atomic_schar
    523     {
    524       typedef signed char 		__integral_type;
    525       typedef atomic_schar 		__base_type;
    526 
    527       atomic() noexcept= default;
    528       ~atomic() noexcept = default;
    529       atomic(const atomic&) = delete;
    530       atomic& operator=(const atomic&) = delete;
    531       atomic& operator=(const atomic&) volatile = delete;
    532 
    533       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    534 
    535       using __base_type::operator __integral_type;
    536       using __base_type::operator=;
    537     };
    538 
    539   /// Explicit specialization for unsigned char.
    540   template<>
    541     struct atomic<unsigned char> : public atomic_uchar
    542     {
    543       typedef unsigned char 		__integral_type;
    544       typedef atomic_uchar 		__base_type;
    545 
    546       atomic() noexcept= default;
    547       ~atomic() noexcept = default;
    548       atomic(const atomic&) = delete;
    549       atomic& operator=(const atomic&) = delete;
    550       atomic& operator=(const atomic&) volatile = delete;
    551 
    552       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    553 
    554       using __base_type::operator __integral_type;
    555       using __base_type::operator=;
    556     };
    557 
    558   /// Explicit specialization for short.
    559   template<>
    560     struct atomic<short> : public atomic_short
    561     {
    562       typedef short 			__integral_type;
    563       typedef atomic_short 		__base_type;
    564 
    565       atomic() noexcept = default;
    566       ~atomic() noexcept = default;
    567       atomic(const atomic&) = delete;
    568       atomic& operator=(const atomic&) = delete;
    569       atomic& operator=(const atomic&) volatile = delete;
    570 
    571       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    572 
    573       using __base_type::operator __integral_type;
    574       using __base_type::operator=;
    575     };
    576 
    577   /// Explicit specialization for unsigned short.
    578   template<>
    579     struct atomic<unsigned short> : public atomic_ushort
    580     {
    581       typedef unsigned short 	      	__integral_type;
    582       typedef atomic_ushort 		__base_type;
    583 
    584       atomic() noexcept = default;
    585       ~atomic() noexcept = default;
    586       atomic(const atomic&) = delete;
    587       atomic& operator=(const atomic&) = delete;
    588       atomic& operator=(const atomic&) volatile = delete;
    589 
    590       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    591 
    592       using __base_type::operator __integral_type;
    593       using __base_type::operator=;
    594     };
    595 
    596   /// Explicit specialization for int.
    597   template<>
    598     struct atomic<int> : atomic_int
    599     {
    600       typedef int 			__integral_type;
    601       typedef atomic_int 		__base_type;
    602 
    603       atomic() noexcept = default;
    604       ~atomic() noexcept = default;
    605       atomic(const atomic&) = delete;
    606       atomic& operator=(const atomic&) = delete;
    607       atomic& operator=(const atomic&) volatile = delete;
    608 
    609       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    610 
    611       using __base_type::operator __integral_type;
    612       using __base_type::operator=;
    613     };
    614 
    615   /// Explicit specialization for unsigned int.
    616   template<>
    617     struct atomic<unsigned int> : public atomic_uint
    618     {
    619       typedef unsigned int		__integral_type;
    620       typedef atomic_uint 		__base_type;
    621 
    622       atomic() noexcept = default;
    623       ~atomic() noexcept = default;
    624       atomic(const atomic&) = delete;
    625       atomic& operator=(const atomic&) = delete;
    626       atomic& operator=(const atomic&) volatile = delete;
    627 
    628       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    629 
    630       using __base_type::operator __integral_type;
    631       using __base_type::operator=;
    632     };
    633 
    634   /// Explicit specialization for long.
    635   template<>
    636     struct atomic<long> : public atomic_long
    637     {
    638       typedef long 			__integral_type;
    639       typedef atomic_long 		__base_type;
    640 
    641       atomic() noexcept = default;
    642       ~atomic() noexcept = default;
    643       atomic(const atomic&) = delete;
    644       atomic& operator=(const atomic&) = delete;
    645       atomic& operator=(const atomic&) volatile = delete;
    646 
    647       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    648 
    649       using __base_type::operator __integral_type;
    650       using __base_type::operator=;
    651     };
    652 
    653   /// Explicit specialization for unsigned long.
    654   template<>
    655     struct atomic<unsigned long> : public atomic_ulong
    656     {
    657       typedef unsigned long 		__integral_type;
    658       typedef atomic_ulong 		__base_type;
    659 
    660       atomic() noexcept = default;
    661       ~atomic() noexcept = default;
    662       atomic(const atomic&) = delete;
    663       atomic& operator=(const atomic&) = delete;
    664       atomic& operator=(const atomic&) volatile = delete;
    665 
    666       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    667 
    668       using __base_type::operator __integral_type;
    669       using __base_type::operator=;
    670     };
    671 
    672   /// Explicit specialization for long long.
    673   template<>
    674     struct atomic<long long> : public atomic_llong
    675     {
    676       typedef long long 		__integral_type;
    677       typedef atomic_llong 		__base_type;
    678 
    679       atomic() noexcept = default;
    680       ~atomic() noexcept = default;
    681       atomic(const atomic&) = delete;
    682       atomic& operator=(const atomic&) = delete;
    683       atomic& operator=(const atomic&) volatile = delete;
    684 
    685       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    686 
    687       using __base_type::operator __integral_type;
    688       using __base_type::operator=;
    689     };
    690 
    691   /// Explicit specialization for unsigned long long.
    692   template<>
    693     struct atomic<unsigned long long> : public atomic_ullong
    694     {
    695       typedef unsigned long long       	__integral_type;
    696       typedef atomic_ullong 		__base_type;
    697 
    698       atomic() noexcept = default;
    699       ~atomic() noexcept = default;
    700       atomic(const atomic&) = delete;
    701       atomic& operator=(const atomic&) = delete;
    702       atomic& operator=(const atomic&) volatile = delete;
    703 
    704       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    705 
    706       using __base_type::operator __integral_type;
    707       using __base_type::operator=;
    708     };
    709 
    710   /// Explicit specialization for wchar_t.
    711   template<>
    712     struct atomic<wchar_t> : public atomic_wchar_t
    713     {
    714       typedef wchar_t 			__integral_type;
    715       typedef atomic_wchar_t 		__base_type;
    716 
    717       atomic() noexcept = default;
    718       ~atomic() noexcept = default;
    719       atomic(const atomic&) = delete;
    720       atomic& operator=(const atomic&) = delete;
    721       atomic& operator=(const atomic&) volatile = delete;
    722 
    723       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    724 
    725       using __base_type::operator __integral_type;
    726       using __base_type::operator=;
    727     };
    728 
    729   /// Explicit specialization for char16_t.
    730   template<>
    731     struct atomic<char16_t> : public atomic_char16_t
    732     {
    733       typedef char16_t 			__integral_type;
    734       typedef atomic_char16_t 		__base_type;
    735 
    736       atomic() noexcept = default;
    737       ~atomic() noexcept = default;
    738       atomic(const atomic&) = delete;
    739       atomic& operator=(const atomic&) = delete;
    740       atomic& operator=(const atomic&) volatile = delete;
    741 
    742       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    743 
    744       using __base_type::operator __integral_type;
    745       using __base_type::operator=;
    746     };
    747 
    748   /// Explicit specialization for char32_t.
    749   template<>
    750     struct atomic<char32_t> : public atomic_char32_t
    751     {
    752       typedef char32_t 			__integral_type;
    753       typedef atomic_char32_t 		__base_type;
    754 
    755       atomic() noexcept = default;
    756       ~atomic() noexcept = default;
    757       atomic(const atomic&) = delete;
    758       atomic& operator=(const atomic&) = delete;
    759       atomic& operator=(const atomic&) volatile = delete;
    760 
    761       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    762 
    763       using __base_type::operator __integral_type;
    764       using __base_type::operator=;
    765     };
    766 
    767 
    768   // Function definitions, atomic_flag operations.
    769   inline bool
    770   atomic_flag_test_and_set_explicit(atomic_flag* __a,
    771 				    memory_order __m) noexcept
    772   { return __a->test_and_set(__m); }
    773 
    774   inline bool
    775   atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
    776 				    memory_order __m) noexcept
    777   { return __a->test_and_set(__m); }
    778 
    779   inline void
    780   atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
    781   { __a->clear(__m); }
    782 
    783   inline void
    784   atomic_flag_clear_explicit(volatile atomic_flag* __a,
    785 			     memory_order __m) noexcept
    786   { __a->clear(__m); }
    787 
    788   inline bool
    789   atomic_flag_test_and_set(atomic_flag* __a) noexcept
    790   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
    791 
    792   inline bool
    793   atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
    794   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
    795 
    796   inline void
    797   atomic_flag_clear(atomic_flag* __a) noexcept
    798   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
    799 
    800   inline void
    801   atomic_flag_clear(volatile atomic_flag* __a) noexcept
    802   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
    803 
    804 
    805   // Function templates generally applicable to atomic types.
    806   template<typename _ITp>
    807     inline bool
    808     atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
    809     { return __a->is_lock_free(); }
    810 
    811   template<typename _ITp>
    812     inline bool
    813     atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
    814     { return __a->is_lock_free(); }
    815 
    816   template<typename _ITp>
    817     inline void
    818     atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
    819 
    820   template<typename _ITp>
    821     inline void
    822     atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
    823 
    824   template<typename _ITp>
    825     inline void
    826     atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
    827 			  memory_order __m) noexcept
    828     { __a->store(__i, __m); }
    829 
    830   template<typename _ITp>
    831     inline void
    832     atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
    833 			  memory_order __m) noexcept
    834     { __a->store(__i, __m); }
    835 
    836   template<typename _ITp>
    837     inline _ITp
    838     atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
    839     { return __a->load(__m); }
    840 
    841   template<typename _ITp>
    842     inline _ITp
    843     atomic_load_explicit(const volatile atomic<_ITp>* __a,
    844 			 memory_order __m) noexcept
    845     { return __a->load(__m); }
    846 
    847   template<typename _ITp>
    848     inline _ITp
    849     atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
    850 			     memory_order __m) noexcept
    851     { return __a->exchange(__i, __m); }
    852 
    853   template<typename _ITp>
    854     inline _ITp
    855     atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
    856 			     memory_order __m) noexcept
    857     { return __a->exchange(__i, __m); }
    858 
    859   template<typename _ITp>
    860     inline bool
    861     atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
    862 					  _ITp* __i1, _ITp __i2,
    863 					  memory_order __m1,
    864 					  memory_order __m2) noexcept
    865     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
    866 
    867   template<typename _ITp>
    868     inline bool
    869     atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
    870 					  _ITp* __i1, _ITp __i2,
    871 					  memory_order __m1,
    872 					  memory_order __m2) noexcept
    873     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
    874 
    875   template<typename _ITp>
    876     inline bool
    877     atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
    878 					    _ITp* __i1, _ITp __i2,
    879 					    memory_order __m1,
    880 					    memory_order __m2) noexcept
    881     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
    882 
    883   template<typename _ITp>
    884     inline bool
    885     atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
    886 					    _ITp* __i1, _ITp __i2,
    887 					    memory_order __m1,
    888 					    memory_order __m2) noexcept
    889     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
    890 
    891 
    892   template<typename _ITp>
    893     inline void
    894     atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
    895     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
    896 
    897   template<typename _ITp>
    898     inline void
    899     atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
    900     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
    901 
    902   template<typename _ITp>
    903     inline _ITp
    904     atomic_load(const atomic<_ITp>* __a) noexcept
    905     { return atomic_load_explicit(__a, memory_order_seq_cst); }
    906 
    907   template<typename _ITp>
    908     inline _ITp
    909     atomic_load(const volatile atomic<_ITp>* __a) noexcept
    910     { return atomic_load_explicit(__a, memory_order_seq_cst); }
    911 
    912   template<typename _ITp>
    913     inline _ITp
    914     atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
    915     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
    916 
    917   template<typename _ITp>
    918     inline _ITp
    919     atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
    920     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
    921 
    922   template<typename _ITp>
    923     inline bool
    924     atomic_compare_exchange_weak(atomic<_ITp>* __a,
    925 				 _ITp* __i1, _ITp __i2) noexcept
    926     {
    927       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
    928 						   memory_order_seq_cst,
    929 						   memory_order_seq_cst);
    930     }
    931 
    932   template<typename _ITp>
    933     inline bool
    934     atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
    935 				 _ITp* __i1, _ITp __i2) noexcept
    936     {
    937       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
    938 						   memory_order_seq_cst,
    939 						   memory_order_seq_cst);
    940     }
    941 
    942   template<typename _ITp>
    943     inline bool
    944     atomic_compare_exchange_strong(atomic<_ITp>* __a,
    945 				   _ITp* __i1, _ITp __i2) noexcept
    946     {
    947       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
    948 						     memory_order_seq_cst,
    949 						     memory_order_seq_cst);
    950     }
    951 
    952   template<typename _ITp>
    953     inline bool
    954     atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
    955 				   _ITp* __i1, _ITp __i2) noexcept
    956     {
    957       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
    958 						     memory_order_seq_cst,
    959 						     memory_order_seq_cst);
    960     }
    961 
    962   // Function templates for atomic_integral operations only, using
    963   // __atomic_base. Template argument should be constricted to
    964   // intergral types as specified in the standard, excluding address
    965   // types.
    966   template<typename _ITp>
    967     inline _ITp
    968     atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
    969 			      memory_order __m) noexcept
    970     { return __a->fetch_add(__i, __m); }
    971 
    972   template<typename _ITp>
    973     inline _ITp
    974     atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
    975 			      memory_order __m) noexcept
    976     { return __a->fetch_add(__i, __m); }
    977 
    978   template<typename _ITp>
    979     inline _ITp
    980     atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
    981 			      memory_order __m) noexcept
    982     { return __a->fetch_sub(__i, __m); }
    983 
    984   template<typename _ITp>
    985     inline _ITp
    986     atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
    987 			      memory_order __m) noexcept
    988     { return __a->fetch_sub(__i, __m); }
    989 
    990   template<typename _ITp>
    991     inline _ITp
    992     atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
    993 			      memory_order __m) noexcept
    994     { return __a->fetch_and(__i, __m); }
    995 
    996   template<typename _ITp>
    997     inline _ITp
    998     atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
    999 			      memory_order __m) noexcept
   1000     { return __a->fetch_and(__i, __m); }
   1001 
   1002   template<typename _ITp>
   1003     inline _ITp
   1004     atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1005 			     memory_order __m) noexcept
   1006     { return __a->fetch_or(__i, __m); }
   1007 
   1008   template<typename _ITp>
   1009     inline _ITp
   1010     atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1011 			     memory_order __m) noexcept
   1012     { return __a->fetch_or(__i, __m); }
   1013 
   1014   template<typename _ITp>
   1015     inline _ITp
   1016     atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1017 			      memory_order __m) noexcept
   1018     { return __a->fetch_xor(__i, __m); }
   1019 
   1020   template<typename _ITp>
   1021     inline _ITp
   1022     atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1023 			      memory_order __m) noexcept
   1024     { return __a->fetch_xor(__i, __m); }
   1025 
   1026   template<typename _ITp>
   1027     inline _ITp
   1028     atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1029     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
   1030 
   1031   template<typename _ITp>
   1032     inline _ITp
   1033     atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1034     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
   1035 
   1036   template<typename _ITp>
   1037     inline _ITp
   1038     atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1039     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
   1040 
   1041   template<typename _ITp>
   1042     inline _ITp
   1043     atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1044     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
   1045 
   1046   template<typename _ITp>
   1047     inline _ITp
   1048     atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1049     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
   1050 
   1051   template<typename _ITp>
   1052     inline _ITp
   1053     atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1054     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
   1055 
   1056   template<typename _ITp>
   1057     inline _ITp
   1058     atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1059     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
   1060 
   1061   template<typename _ITp>
   1062     inline _ITp
   1063     atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1064     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
   1065 
   1066   template<typename _ITp>
   1067     inline _ITp
   1068     atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1069     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
   1070 
   1071   template<typename _ITp>
   1072     inline _ITp
   1073     atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1074     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
   1075 
   1076 
   1077   // Partial specializations for pointers.
   1078   template<typename _ITp>
   1079     inline _ITp*
   1080     atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
   1081 			      memory_order __m) noexcept
   1082     { return __a->fetch_add(__d, __m); }
   1083 
   1084   template<typename _ITp>
   1085     inline _ITp*
   1086     atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
   1087 			      memory_order __m) noexcept
   1088     { return __a->fetch_add(__d, __m); }
   1089 
   1090   template<typename _ITp>
   1091     inline _ITp*
   1092     atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1093     { return __a->fetch_add(__d); }
   1094 
   1095   template<typename _ITp>
   1096     inline _ITp*
   1097     atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1098     { return __a->fetch_add(__d); }
   1099 
   1100   template<typename _ITp>
   1101     inline _ITp*
   1102     atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
   1103 			      ptrdiff_t __d, memory_order __m) noexcept
   1104     { return __a->fetch_sub(__d, __m); }
   1105 
   1106   template<typename _ITp>
   1107     inline _ITp*
   1108     atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
   1109 			      memory_order __m) noexcept
   1110     { return __a->fetch_sub(__d, __m); }
   1111 
   1112   template<typename _ITp>
   1113     inline _ITp*
   1114     atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1115     { return __a->fetch_sub(__d); }
   1116 
   1117   template<typename _ITp>
   1118     inline _ITp*
   1119     atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1120     { return __a->fetch_sub(__d); }
   1121   // @} group atomics
   1122 
   1123 _GLIBCXX_END_NAMESPACE_VERSION
   1124 } // namespace
   1125 
   1126 #endif
   1127