Home | History | Annotate | Download | only in bits
      1 // -*- C++ -*- header.
      2 
      3 // Copyright (C) 2008, 2009, 2010, 2011
      4 // Free Software Foundation, Inc.
      5 //
      6 // This file is part of the GNU ISO C++ Library.  This library is free
      7 // software; you can redistribute it and/or modify it under the
      8 // terms of the GNU General Public License as published by the
      9 // Free Software Foundation; either version 3, or (at your option)
     10 // any later version.
     11 
     12 // This library is distributed in the hope that it will be useful,
     13 // but WITHOUT ANY WARRANTY; without even the implied warranty of
     14 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     15 // GNU General Public License for more details.
     16 
     17 // Under Section 7 of GPL version 3, you are granted additional
     18 // permissions described in the GCC Runtime Library Exception, version
     19 // 3.1, as published by the Free Software Foundation.
     20 
     21 // You should have received a copy of the GNU General Public License and
     22 // a copy of the GCC Runtime Library Exception along with this program;
     23 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
     24 // <http://www.gnu.org/licenses/>.
     25 
     26 /** @file bits/atomic_0.h
     27  *  This is an internal header file, included by other library headers.
     28  *  Do not attempt to use it directly. @headername{atomic}
     29  */
     30 
     31 #ifndef _GLIBCXX_ATOMIC_0_H
     32 #define _GLIBCXX_ATOMIC_0_H 1
     33 
     34 #pragma GCC system_header
     35 
     36 namespace std _GLIBCXX_VISIBILITY(default)
     37 {
     38 _GLIBCXX_BEGIN_NAMESPACE_VERSION
     39 
     40 // 0 == __atomic0 == Never lock-free
     41 namespace __atomic0
     42 {
     43   _GLIBCXX_BEGIN_EXTERN_C
     44 
     45   void
     46   atomic_flag_clear_explicit(__atomic_flag_base*, memory_order)
     47   _GLIBCXX_NOTHROW;
     48 
     49   void
     50   __atomic_flag_wait_explicit(__atomic_flag_base*, memory_order)
     51   _GLIBCXX_NOTHROW;
     52 
     53   _GLIBCXX_CONST __atomic_flag_base*
     54   __atomic_flag_for_address(const volatile void* __z) _GLIBCXX_NOTHROW;
     55 
     56   _GLIBCXX_END_EXTERN_C
     57 
     58   // Implementation specific defines.
     59 #define _ATOMIC_MEMBER_ _M_i
     60 
     61   // Implementation specific defines.
     62 #define _ATOMIC_LOAD_(__a, __x)						   \
     63   ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type;                          \
     64     __i_type* __p = &_ATOMIC_MEMBER_;	   				   \
     65     __atomic_flag_base* __g = __atomic_flag_for_address(__p);	  	   \
     66     __atomic_flag_wait_explicit(__g, __x);				   \
     67     __i_type __r = *__p;						   \
     68     atomic_flag_clear_explicit(__g, __x);		       		   \
     69     __r; })
     70 
     71 #define _ATOMIC_STORE_(__a, __n, __x)					   \
     72   ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type;                          \
     73     __i_type* __p = &_ATOMIC_MEMBER_;	   				   \
     74     __typeof__(__n) __w = (__n);			       		   \
     75     __atomic_flag_base* __g = __atomic_flag_for_address(__p);	  	   \
     76     __atomic_flag_wait_explicit(__g, __x);				   \
     77     *__p = __w;								   \
     78     atomic_flag_clear_explicit(__g, __x);		       		   \
     79     __w; })
     80 
     81 #define _ATOMIC_MODIFY_(__a, __o, __n, __x)				   \
     82   ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type;                          \
     83     __i_type* __p = &_ATOMIC_MEMBER_;	   				   \
     84     __typeof__(__n) __w = (__n);			       		   \
     85     __atomic_flag_base* __g = __atomic_flag_for_address(__p);	  	   \
     86     __atomic_flag_wait_explicit(__g, __x);				   \
     87     __i_type __r = *__p;		       				   \
     88     *__p __o __w;					       		   \
     89     atomic_flag_clear_explicit(__g, __x);		       		   \
     90     __r; })
     91 
     92 #define _ATOMIC_CMPEXCHNG_(__a, __e, __n, __x)				   \
     93   ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type;                          \
     94     __i_type* __p = &_ATOMIC_MEMBER_;	   				   \
     95     __typeof__(__e) __q = (__e);			       		   \
     96     __typeof__(__n) __w = (__n);			       		   \
     97     bool __r;						       		   \
     98     __atomic_flag_base* __g = __atomic_flag_for_address(__p);	   	   \
     99     __atomic_flag_wait_explicit(__g, __x);				   \
    100     __i_type __t = *__p;		       				   \
    101     if (*__q == __t) 							   \
    102       {									   \
    103 	*__p = (__i_type)__w;						   \
    104 	__r = true;							   \
    105       }									   \
    106     else { *__q = __t; __r = false; }		       			   \
    107     atomic_flag_clear_explicit(__g, __x);		       		   \
    108     __r; })
    109 
    110 
    111   /// atomic_flag
    112   struct atomic_flag : public __atomic_flag_base
    113   {
    114     atomic_flag() = default;
    115     ~atomic_flag() = default;
    116     atomic_flag(const atomic_flag&) = delete;
    117     atomic_flag& operator=(const atomic_flag&) = delete;
    118     atomic_flag& operator=(const atomic_flag&) volatile = delete;
    119 
    120     // Conversion to ATOMIC_FLAG_INIT.
    121     atomic_flag(bool __i): __atomic_flag_base({ __i }) { }
    122 
    123     bool
    124     test_and_set(memory_order __m = memory_order_seq_cst);
    125 
    126     bool
    127     test_and_set(memory_order __m = memory_order_seq_cst) volatile;
    128 
    129     void
    130     clear(memory_order __m = memory_order_seq_cst);
    131 
    132     void
    133     clear(memory_order __m = memory_order_seq_cst) volatile;
    134   };
    135 
    136 
    137   /// Base class for atomic integrals.
    138   //
    139   // For each of the integral types, define atomic_[integral type] struct
    140   //
    141   // atomic_bool     bool
    142   // atomic_char     char
    143   // atomic_schar    signed char
    144   // atomic_uchar    unsigned char
    145   // atomic_short    short
    146   // atomic_ushort   unsigned short
    147   // atomic_int      int
    148   // atomic_uint     unsigned int
    149   // atomic_long     long
    150   // atomic_ulong    unsigned long
    151   // atomic_llong    long long
    152   // atomic_ullong   unsigned long long
    153   // atomic_char16_t char16_t
    154   // atomic_char32_t char32_t
    155   // atomic_wchar_t  wchar_t
    156 
    157   // Base type.
    158   // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or 8 bytes,
    159   // since that is what GCC built-in functions for atomic memory access work on.
    160   template<typename _ITp>
    161     struct __atomic_base
    162     {
    163     private:
    164       typedef _ITp 	__int_type;
    165 
    166       __int_type 	_M_i;
    167 
    168     public:
    169       __atomic_base() = default;
    170       ~__atomic_base() = default;
    171       __atomic_base(const __atomic_base&) = delete;
    172       __atomic_base& operator=(const __atomic_base&) = delete;
    173       __atomic_base& operator=(const __atomic_base&) volatile = delete;
    174 
    175       // Requires __int_type convertible to _M_base._M_i.
    176       constexpr __atomic_base(__int_type __i): _M_i (__i) { }
    177 
    178       operator __int_type() const
    179       { return load(); }
    180 
    181       operator __int_type() const volatile
    182       { return load(); }
    183 
    184       __int_type
    185       operator=(__int_type __i)
    186       {
    187 	store(__i);
    188 	return __i;
    189       }
    190 
    191       __int_type
    192       operator=(__int_type __i) volatile
    193       {
    194 	store(__i);
    195 	return __i;
    196       }
    197 
    198       __int_type
    199       operator++(int)
    200       { return fetch_add(1); }
    201 
    202       __int_type
    203       operator++(int) volatile
    204       { return fetch_add(1); }
    205 
    206       __int_type
    207       operator--(int)
    208       { return fetch_sub(1); }
    209 
    210       __int_type
    211       operator--(int) volatile
    212       { return fetch_sub(1); }
    213 
    214       __int_type
    215       operator++()
    216       { return fetch_add(1) + 1; }
    217 
    218       __int_type
    219       operator++() volatile
    220       { return fetch_add(1) + 1; }
    221 
    222       __int_type
    223       operator--()
    224       { return fetch_sub(1) - 1; }
    225 
    226       __int_type
    227       operator--() volatile
    228       { return fetch_sub(1) - 1; }
    229 
    230       __int_type
    231       operator+=(__int_type __i)
    232       { return fetch_add(__i) + __i; }
    233 
    234       __int_type
    235       operator+=(__int_type __i) volatile
    236       { return fetch_add(__i) + __i; }
    237 
    238       __int_type
    239       operator-=(__int_type __i)
    240       { return fetch_sub(__i) - __i; }
    241 
    242       __int_type
    243       operator-=(__int_type __i) volatile
    244       { return fetch_sub(__i) - __i; }
    245 
    246       __int_type
    247       operator&=(__int_type __i)
    248       { return fetch_and(__i) & __i; }
    249 
    250       __int_type
    251       operator&=(__int_type __i) volatile
    252       { return fetch_and(__i) & __i; }
    253 
    254       __int_type
    255       operator|=(__int_type __i)
    256       { return fetch_or(__i) | __i; }
    257 
    258       __int_type
    259       operator|=(__int_type __i) volatile
    260       { return fetch_or(__i) | __i; }
    261 
    262       __int_type
    263       operator^=(__int_type __i)
    264       { return fetch_xor(__i) ^ __i; }
    265 
    266       __int_type
    267       operator^=(__int_type __i) volatile
    268       { return fetch_xor(__i) ^ __i; }
    269 
    270       bool
    271       is_lock_free() const
    272       { return false; }
    273 
    274       bool
    275       is_lock_free() const volatile
    276       { return false; }
    277 
    278       void
    279       store(__int_type __i, memory_order __m = memory_order_seq_cst)
    280       {
    281 	__glibcxx_assert(__m != memory_order_acquire);
    282 	__glibcxx_assert(__m != memory_order_acq_rel);
    283 	__glibcxx_assert(__m != memory_order_consume);
    284 	_ATOMIC_STORE_(this, __i, __m);
    285       }
    286 
    287       void
    288       store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
    289       {
    290 	__glibcxx_assert(__m != memory_order_acquire);
    291 	__glibcxx_assert(__m != memory_order_acq_rel);
    292 	__glibcxx_assert(__m != memory_order_consume);
    293 	_ATOMIC_STORE_(this, __i, __m);
    294       }
    295 
    296       __int_type
    297       load(memory_order __m = memory_order_seq_cst) const
    298       {
    299 	__glibcxx_assert(__m != memory_order_release);
    300 	__glibcxx_assert(__m != memory_order_acq_rel);
    301 	return _ATOMIC_LOAD_(this, __m);
    302       }
    303 
    304       __int_type
    305       load(memory_order __m = memory_order_seq_cst) const volatile
    306       {
    307 	__glibcxx_assert(__m != memory_order_release);
    308 	__glibcxx_assert(__m != memory_order_acq_rel);
    309 	return _ATOMIC_LOAD_(this, __m);
    310       }
    311 
    312       __int_type
    313       exchange(__int_type __i, memory_order __m = memory_order_seq_cst)
    314       { return _ATOMIC_MODIFY_(this, =, __i, __m); }
    315 
    316       __int_type
    317       exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
    318       { return _ATOMIC_MODIFY_(this, =, __i, __m); }
    319 
    320       bool
    321       compare_exchange_weak(__int_type& __i1, __int_type __i2,
    322 			    memory_order __m1, memory_order __m2)
    323       {
    324 	__glibcxx_assert(__m2 != memory_order_release);
    325 	__glibcxx_assert(__m2 != memory_order_acq_rel);
    326 	__glibcxx_assert(__m2 <= __m1);
    327 	return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
    328       }
    329 
    330       bool
    331       compare_exchange_weak(__int_type& __i1, __int_type __i2,
    332 			    memory_order __m1, memory_order __m2) volatile
    333       {
    334 	__glibcxx_assert(__m2 != memory_order_release);
    335 	__glibcxx_assert(__m2 != memory_order_acq_rel);
    336 	__glibcxx_assert(__m2 <= __m1);
    337 	return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
    338       }
    339 
    340       bool
    341       compare_exchange_weak(__int_type& __i1, __int_type __i2,
    342 			    memory_order __m = memory_order_seq_cst)
    343       {
    344 	return compare_exchange_weak(__i1, __i2, __m,
    345 				     __calculate_memory_order(__m));
    346       }
    347 
    348       bool
    349       compare_exchange_weak(__int_type& __i1, __int_type __i2,
    350 			    memory_order __m = memory_order_seq_cst) volatile
    351       {
    352 	return compare_exchange_weak(__i1, __i2, __m,
    353 				     __calculate_memory_order(__m));
    354       }
    355 
    356       bool
    357       compare_exchange_strong(__int_type& __i1, __int_type __i2,
    358 			      memory_order __m1, memory_order __m2)
    359       {
    360 	__glibcxx_assert(__m2 != memory_order_release);
    361 	__glibcxx_assert(__m2 != memory_order_acq_rel);
    362 	__glibcxx_assert(__m2 <= __m1);
    363 	return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
    364       }
    365 
    366       bool
    367       compare_exchange_strong(__int_type& __i1, __int_type __i2,
    368 			      memory_order __m1, memory_order __m2) volatile
    369       {
    370 	__glibcxx_assert(__m2 != memory_order_release);
    371 	__glibcxx_assert(__m2 != memory_order_acq_rel);
    372 	__glibcxx_assert(__m2 <= __m1);
    373 	return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
    374       }
    375 
    376       bool
    377       compare_exchange_strong(__int_type& __i1, __int_type __i2,
    378 			      memory_order __m = memory_order_seq_cst)
    379       {
    380 	return compare_exchange_strong(__i1, __i2, __m,
    381 				       __calculate_memory_order(__m));
    382       }
    383 
    384       bool
    385       compare_exchange_strong(__int_type& __i1, __int_type __i2,
    386 			      memory_order __m = memory_order_seq_cst) volatile
    387       {
    388 	return compare_exchange_strong(__i1, __i2, __m,
    389 				       __calculate_memory_order(__m));
    390       }
    391 
    392       __int_type
    393       fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst)
    394       { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
    395 
    396       __int_type
    397       fetch_add(__int_type __i,
    398 		memory_order __m = memory_order_seq_cst) volatile
    399       { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
    400 
    401       __int_type
    402       fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst)
    403       { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
    404 
    405       __int_type
    406       fetch_sub(__int_type __i,
    407 		memory_order __m = memory_order_seq_cst) volatile
    408       { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
    409 
    410       __int_type
    411       fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst)
    412       { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
    413 
    414       __int_type
    415       fetch_and(__int_type __i,
    416 		memory_order __m = memory_order_seq_cst) volatile
    417       { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
    418 
    419       __int_type
    420       fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst)
    421       { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
    422 
    423       __int_type
    424       fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
    425       { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
    426 
    427       __int_type
    428       fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst)
    429       { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
    430 
    431       __int_type
    432       fetch_xor(__int_type __i,
    433 		memory_order __m = memory_order_seq_cst) volatile
    434       { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
    435     };
    436 
    437 
    438   /// Partial specialization for pointer types.
    439   template<typename _PTp>
    440     struct __atomic_base<_PTp*>
    441     {
    442     private:
    443       typedef _PTp* 	__return_pointer_type;
    444       typedef void* 	__pointer_type;
    445       __pointer_type 	_M_i;
    446 
    447     public:
    448       __atomic_base() = default;
    449       ~__atomic_base() = default;
    450       __atomic_base(const __atomic_base&) = delete;
    451       __atomic_base& operator=(const __atomic_base&) = delete;
    452       __atomic_base& operator=(const __atomic_base&) volatile = delete;
    453 
    454       // Requires __pointer_type convertible to _M_i.
    455       constexpr __atomic_base(__return_pointer_type __p): _M_i (__p) { }
    456 
    457       operator __return_pointer_type() const
    458       { return reinterpret_cast<__return_pointer_type>(load()); }
    459 
    460       operator __return_pointer_type() const volatile
    461       { return reinterpret_cast<__return_pointer_type>(load()); }
    462 
    463       __return_pointer_type
    464       operator=(__pointer_type __p)
    465       {
    466 	store(__p);
    467 	return reinterpret_cast<__return_pointer_type>(__p);
    468       }
    469 
    470       __return_pointer_type
    471       operator=(__pointer_type __p) volatile
    472       {
    473 	store(__p);
    474 	return reinterpret_cast<__return_pointer_type>(__p);
    475       }
    476 
    477       __return_pointer_type
    478       operator++(int)
    479       { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); }
    480 
    481       __return_pointer_type
    482       operator++(int) volatile
    483       { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); }
    484 
    485       __return_pointer_type
    486       operator--(int)
    487       { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); }
    488 
    489       __return_pointer_type
    490       operator--(int) volatile
    491       { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); }
    492 
    493       __return_pointer_type
    494       operator++()
    495       { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); }
    496 
    497       __return_pointer_type
    498       operator++() volatile
    499       { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); }
    500 
    501       __return_pointer_type
    502       operator--()
    503       { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); }
    504 
    505       __return_pointer_type
    506       operator--() volatile
    507       { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); }
    508 
    509       __return_pointer_type
    510       operator+=(ptrdiff_t __d)
    511       { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); }
    512 
    513       __return_pointer_type
    514       operator+=(ptrdiff_t __d) volatile
    515       { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); }
    516 
    517       __return_pointer_type
    518       operator-=(ptrdiff_t __d)
    519       { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); }
    520 
    521       __return_pointer_type
    522       operator-=(ptrdiff_t __d) volatile
    523       { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); }
    524 
    525       bool
    526       is_lock_free() const
    527       { return true; }
    528 
    529       bool
    530       is_lock_free() const volatile
    531       { return true; }
    532 
    533       void
    534       store(__pointer_type __p, memory_order __m = memory_order_seq_cst)
    535       {
    536 	__glibcxx_assert(__m != memory_order_acquire);
    537 	__glibcxx_assert(__m != memory_order_acq_rel);
    538 	__glibcxx_assert(__m != memory_order_consume);
    539 	_ATOMIC_STORE_(this, __p, __m);
    540       }
    541 
    542       void
    543       store(__pointer_type __p,
    544 	    memory_order __m = memory_order_seq_cst) volatile
    545       {
    546 	__glibcxx_assert(__m != memory_order_acquire);
    547 	__glibcxx_assert(__m != memory_order_acq_rel);
    548 	__glibcxx_assert(__m != memory_order_consume);
    549 	volatile __pointer_type* __p2 = &_M_i;
    550 	__typeof__(__p) __w = (__p);
    551 	__atomic_flag_base* __g = __atomic_flag_for_address(__p2);
    552 	__atomic_flag_wait_explicit(__g, __m);
    553 	*__p2 = reinterpret_cast<__pointer_type>(__w);
    554 	atomic_flag_clear_explicit(__g, __m);
    555 	__w;
    556       }
    557 
    558       __return_pointer_type
    559       load(memory_order __m = memory_order_seq_cst) const
    560       {
    561 	__glibcxx_assert(__m != memory_order_release);
    562 	__glibcxx_assert(__m != memory_order_acq_rel);
    563 	void* __v = _ATOMIC_LOAD_(this, __m);
    564 	return reinterpret_cast<__return_pointer_type>(__v);
    565       }
    566 
    567       __return_pointer_type
    568       load(memory_order __m = memory_order_seq_cst) const volatile
    569       {
    570 	__glibcxx_assert(__m != memory_order_release);
    571 	__glibcxx_assert(__m != memory_order_acq_rel);
    572 	void* __v = _ATOMIC_LOAD_(this, __m);
    573 	return reinterpret_cast<__return_pointer_type>(__v);
    574       }
    575 
    576       __return_pointer_type
    577       exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst)
    578       {
    579 	void* __v = _ATOMIC_MODIFY_(this, =, __p, __m);
    580 	return reinterpret_cast<__return_pointer_type>(__v);
    581       }
    582 
    583       __return_pointer_type
    584       exchange(__pointer_type __p,
    585 	       memory_order __m = memory_order_seq_cst) volatile
    586       {
    587 	volatile __pointer_type* __p2 = &_M_i;
    588 	__typeof__(__p) __w = (__p);
    589 	__atomic_flag_base* __g = __atomic_flag_for_address(__p2);
    590 	__atomic_flag_wait_explicit(__g, __m);
    591 	__pointer_type __r = *__p2;
    592 	*__p2 = __w;
    593 	atomic_flag_clear_explicit(__g, __m);
    594 	__r;
    595 	return reinterpret_cast<__return_pointer_type>(_M_i);
    596       }
    597 
    598       bool
    599       compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
    600 			      memory_order __m1, memory_order __m2)
    601       {
    602 	__glibcxx_assert(__m2 != memory_order_release);
    603 	__glibcxx_assert(__m2 != memory_order_acq_rel);
    604 	__glibcxx_assert(__m2 <= __m1);
    605 	__pointer_type& __p1 = reinterpret_cast<void*&>(__rp1);
    606 	return _ATOMIC_CMPEXCHNG_(this, &__p1, __p2, __m1);
    607       }
    608 
    609       bool
    610       compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
    611 			      memory_order __m1, memory_order __m2) volatile
    612       {
    613 	__glibcxx_assert(__m2 != memory_order_release);
    614 	__glibcxx_assert(__m2 != memory_order_acq_rel);
    615 	__glibcxx_assert(__m2 <= __m1);
    616 	__pointer_type& __p1 = reinterpret_cast<void*&>(__rp1);
    617 	return _ATOMIC_CMPEXCHNG_(this, &__p1, __p2, __m1);
    618       }
    619 
    620       __return_pointer_type
    621       fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
    622       {
    623 	void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m);
    624 	return reinterpret_cast<__return_pointer_type>(__v);
    625       }
    626 
    627       __return_pointer_type
    628       fetch_add(ptrdiff_t __d,
    629 		memory_order __m = memory_order_seq_cst) volatile
    630       {
    631 	void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m);
    632 	return reinterpret_cast<__return_pointer_type>(__v);
    633       }
    634 
    635       __return_pointer_type
    636       fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
    637       {
    638 	void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m);
    639 	return reinterpret_cast<__return_pointer_type>(__v);
    640       }
    641 
    642       __return_pointer_type
    643       fetch_sub(ptrdiff_t __d,
    644 		memory_order __m = memory_order_seq_cst) volatile
    645       {
    646 	void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m);
    647 	return reinterpret_cast<__return_pointer_type>(__v);
    648       }
    649     };
    650 
    651 #undef _ATOMIC_LOAD_
    652 #undef _ATOMIC_STORE_
    653 #undef _ATOMIC_MODIFY_
    654 #undef _ATOMIC_CMPEXCHNG_
    655 } // namespace __atomic0
    656 
    657 _GLIBCXX_END_NAMESPACE_VERSION
    658 } // namespace std
    659 
    660 #endif
    661