Home | History | Annotate | Download | only in include
      1 // -*- C++ -*-
      2 //===--------------------------- atomic -----------------------------------===//
      3 //
      4 //                     The LLVM Compiler Infrastructure
      5 //
      6 // This file is distributed under the University of Illinois Open Source
      7 // License. See LICENSE.TXT for details.
      8 //
      9 //===----------------------------------------------------------------------===//
     10 
     11 #ifndef _LIBCPP_ATOMIC
     12 #define _LIBCPP_ATOMIC
     13 
     14 /*
     15     atomic synopsis
     16 
     17 namespace std
     18 {
     19 
     20 // order and consistency
     21 
     22 typedef enum memory_order
     23 {
     24     memory_order_relaxed,
     25     memory_order_consume,  // load-consume
     26     memory_order_acquire,  // load-acquire
     27     memory_order_release,  // store-release
     28     memory_order_acq_rel,  // store-release load-acquire
     29     memory_order_seq_cst   // store-release load-acquire
     30 } memory_order;
     31 
     32 template <class T> T kill_dependency(T y) noexcept;
     33 
     34 // lock-free property
     35 
     36 #define ATOMIC_BOOL_LOCK_FREE unspecified
     37 #define ATOMIC_CHAR_LOCK_FREE unspecified
     38 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
     39 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
     40 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
     41 #define ATOMIC_SHORT_LOCK_FREE unspecified
     42 #define ATOMIC_INT_LOCK_FREE unspecified
     43 #define ATOMIC_LONG_LOCK_FREE unspecified
     44 #define ATOMIC_LLONG_LOCK_FREE unspecified
     45 #define ATOMIC_POINTER_LOCK_FREE unspecified
     46 
     47 // flag type and operations
     48 
     49 typedef struct atomic_flag
     50 {
     51     bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
     52     bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
     53     void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
     54     void clear(memory_order m = memory_order_seq_cst) noexcept;
     55     atomic_flag()  noexcept = default;
     56     atomic_flag(const atomic_flag&) = delete;
     57     atomic_flag& operator=(const atomic_flag&) = delete;
     58     atomic_flag& operator=(const atomic_flag&) volatile = delete;
     59 } atomic_flag;
     60 
     61 bool
     62     atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
     63 
     64 bool
     65     atomic_flag_test_and_set(atomic_flag* obj) noexcept;
     66 
     67 bool
     68     atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
     69                                       memory_order m) noexcept;
     70 
     71 bool
     72     atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
     73 
     74 void
     75     atomic_flag_clear(volatile atomic_flag* obj) noexcept;
     76 
     77 void
     78     atomic_flag_clear(atomic_flag* obj) noexcept;
     79 
     80 void
     81     atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
     82 
     83 void
     84     atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
     85 
     86 #define ATOMIC_FLAG_INIT see below
     87 #define ATOMIC_VAR_INIT(value) see below
     88 
     89 template <class T>
     90 struct atomic
     91 {
     92     bool is_lock_free() const volatile noexcept;
     93     bool is_lock_free() const noexcept;
     94     void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
     95     void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
     96     T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
     97     T load(memory_order m = memory_order_seq_cst) const noexcept;
     98     operator T() const volatile noexcept;
     99     operator T() const noexcept;
    100     T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    101     T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
    102     bool compare_exchange_weak(T& expc, T desr,
    103                                memory_order s, memory_order f) volatile noexcept;
    104     bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
    105     bool compare_exchange_strong(T& expc, T desr,
    106                                  memory_order s, memory_order f) volatile noexcept;
    107     bool compare_exchange_strong(T& expc, T desr,
    108                                  memory_order s, memory_order f) noexcept;
    109     bool compare_exchange_weak(T& expc, T desr,
    110                                memory_order m = memory_order_seq_cst) volatile noexcept;
    111     bool compare_exchange_weak(T& expc, T desr,
    112                                memory_order m = memory_order_seq_cst) noexcept;
    113     bool compare_exchange_strong(T& expc, T desr,
    114                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    115     bool compare_exchange_strong(T& expc, T desr,
    116                                  memory_order m = memory_order_seq_cst) noexcept;
    117 
    118     atomic() noexcept = default;
    119     constexpr atomic(T desr) noexcept;
    120     atomic(const atomic&) = delete;
    121     atomic& operator=(const atomic&) = delete;
    122     atomic& operator=(const atomic&) volatile = delete;
    123     T operator=(T) volatile noexcept;
    124     T operator=(T) noexcept;
    125 };
    126 
    127 template <>
    128 struct atomic<integral>
    129 {
    130     bool is_lock_free() const volatile noexcept;
    131     bool is_lock_free() const noexcept;
    132     void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    133     void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    134     integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    135     integral load(memory_order m = memory_order_seq_cst) const noexcept;
    136     operator integral() const volatile noexcept;
    137     operator integral() const noexcept;
    138     integral exchange(integral desr,
    139                       memory_order m = memory_order_seq_cst) volatile noexcept;
    140     integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    141     bool compare_exchange_weak(integral& expc, integral desr,
    142                                memory_order s, memory_order f) volatile noexcept;
    143     bool compare_exchange_weak(integral& expc, integral desr,
    144                                memory_order s, memory_order f) noexcept;
    145     bool compare_exchange_strong(integral& expc, integral desr,
    146                                  memory_order s, memory_order f) volatile noexcept;
    147     bool compare_exchange_strong(integral& expc, integral desr,
    148                                  memory_order s, memory_order f) noexcept;
    149     bool compare_exchange_weak(integral& expc, integral desr,
    150                                memory_order m = memory_order_seq_cst) volatile noexcept;
    151     bool compare_exchange_weak(integral& expc, integral desr,
    152                                memory_order m = memory_order_seq_cst) noexcept;
    153     bool compare_exchange_strong(integral& expc, integral desr,
    154                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    155     bool compare_exchange_strong(integral& expc, integral desr,
    156                                  memory_order m = memory_order_seq_cst) noexcept;
    157 
    158     integral
    159         fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    160     integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
    161     integral
    162         fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    163     integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
    164     integral
    165         fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    166     integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
    167     integral
    168         fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    169     integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
    170     integral
    171         fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    172     integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
    173 
    174     atomic() noexcept = default;
    175     constexpr atomic(integral desr) noexcept;
    176     atomic(const atomic&) = delete;
    177     atomic& operator=(const atomic&) = delete;
    178     atomic& operator=(const atomic&) volatile = delete;
    179     integral operator=(integral desr) volatile noexcept;
    180     integral operator=(integral desr) noexcept;
    181 
    182     integral operator++(int) volatile noexcept;
    183     integral operator++(int) noexcept;
    184     integral operator--(int) volatile noexcept;
    185     integral operator--(int) noexcept;
    186     integral operator++() volatile noexcept;
    187     integral operator++() noexcept;
    188     integral operator--() volatile noexcept;
    189     integral operator--() noexcept;
    190     integral operator+=(integral op) volatile noexcept;
    191     integral operator+=(integral op) noexcept;
    192     integral operator-=(integral op) volatile noexcept;
    193     integral operator-=(integral op) noexcept;
    194     integral operator&=(integral op) volatile noexcept;
    195     integral operator&=(integral op) noexcept;
    196     integral operator|=(integral op) volatile noexcept;
    197     integral operator|=(integral op) noexcept;
    198     integral operator^=(integral op) volatile noexcept;
    199     integral operator^=(integral op) noexcept;
    200 };
    201 
    202 template <class T>
    203 struct atomic<T*>
    204 {
    205     bool is_lock_free() const volatile noexcept;
    206     bool is_lock_free() const noexcept;
    207     void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    208     void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    209     T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    210     T* load(memory_order m = memory_order_seq_cst) const noexcept;
    211     operator T*() const volatile noexcept;
    212     operator T*() const noexcept;
    213     T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    214     T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    215     bool compare_exchange_weak(T*& expc, T* desr,
    216                                memory_order s, memory_order f) volatile noexcept;
    217     bool compare_exchange_weak(T*& expc, T* desr,
    218                                memory_order s, memory_order f) noexcept;
    219     bool compare_exchange_strong(T*& expc, T* desr,
    220                                  memory_order s, memory_order f) volatile noexcept;
    221     bool compare_exchange_strong(T*& expc, T* desr,
    222                                  memory_order s, memory_order f) noexcept;
    223     bool compare_exchange_weak(T*& expc, T* desr,
    224                                memory_order m = memory_order_seq_cst) volatile noexcept;
    225     bool compare_exchange_weak(T*& expc, T* desr,
    226                                memory_order m = memory_order_seq_cst) noexcept;
    227     bool compare_exchange_strong(T*& expc, T* desr,
    228                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    229     bool compare_exchange_strong(T*& expc, T* desr,
    230                                  memory_order m = memory_order_seq_cst) noexcept;
    231     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    232     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    233     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    234     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    235 
    236     atomic() noexcept = default;
    237     constexpr atomic(T* desr) noexcept;
    238     atomic(const atomic&) = delete;
    239     atomic& operator=(const atomic&) = delete;
    240     atomic& operator=(const atomic&) volatile = delete;
    241 
    242     T* operator=(T*) volatile noexcept;
    243     T* operator=(T*) noexcept;
    244     T* operator++(int) volatile noexcept;
    245     T* operator++(int) noexcept;
    246     T* operator--(int) volatile noexcept;
    247     T* operator--(int) noexcept;
    248     T* operator++() volatile noexcept;
    249     T* operator++() noexcept;
    250     T* operator--() volatile noexcept;
    251     T* operator--() noexcept;
    252     T* operator+=(ptrdiff_t op) volatile noexcept;
    253     T* operator+=(ptrdiff_t op) noexcept;
    254     T* operator-=(ptrdiff_t op) volatile noexcept;
    255     T* operator-=(ptrdiff_t op) noexcept;
    256 };
    257 
    258 
    259 template <class T>
    260     bool
    261     atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
    262 
    263 template <class T>
    264     bool
    265     atomic_is_lock_free(const atomic<T>* obj) noexcept;
    266 
    267 template <class T>
    268     void
    269     atomic_init(volatile atomic<T>* obj, T desr) noexcept;
    270 
    271 template <class T>
    272     void
    273     atomic_init(atomic<T>* obj, T desr) noexcept;
    274 
    275 template <class T>
    276     void
    277     atomic_store(volatile atomic<T>* obj, T desr) noexcept;
    278 
    279 template <class T>
    280     void
    281     atomic_store(atomic<T>* obj, T desr) noexcept;
    282 
    283 template <class T>
    284     void
    285     atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    286 
    287 template <class T>
    288     void
    289     atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    290 
    291 template <class T>
    292     T
    293     atomic_load(const volatile atomic<T>* obj) noexcept;
    294 
    295 template <class T>
    296     T
    297     atomic_load(const atomic<T>* obj) noexcept;
    298 
    299 template <class T>
    300     T
    301     atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
    302 
    303 template <class T>
    304     T
    305     atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
    306 
    307 template <class T>
    308     T
    309     atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
    310 
    311 template <class T>
    312     T
    313     atomic_exchange(atomic<T>* obj, T desr) noexcept;
    314 
    315 template <class T>
    316     T
    317     atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    318 
    319 template <class T>
    320     T
    321     atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    322 
    323 template <class T>
    324     bool
    325     atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    326 
    327 template <class T>
    328     bool
    329     atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
    330 
    331 template <class T>
    332     bool
    333     atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    334 
    335 template <class T>
    336     bool
    337     atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
    338 
    339 template <class T>
    340     bool
    341     atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
    342                                           T desr,
    343                                           memory_order s, memory_order f) noexcept;
    344 
    345 template <class T>
    346     bool
    347     atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
    348                                           memory_order s, memory_order f) noexcept;
    349 
    350 template <class T>
    351     bool
    352     atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
    353                                             T* expc, T desr,
    354                                             memory_order s, memory_order f) noexcept;
    355 
    356 template <class T>
    357     bool
    358     atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
    359                                             T desr,
    360                                             memory_order s, memory_order f) noexcept;
    361 
    362 template <class Integral>
    363     Integral
    364     atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
    365 
    366 template <class Integral>
    367     Integral
    368     atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
    369 
    370 template <class Integral>
    371     Integral
    372     atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
    373                               memory_order m) noexcept;
    374 template <class Integral>
    375     Integral
    376     atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
    377                               memory_order m) noexcept;
    378 template <class Integral>
    379     Integral
    380     atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
    381 
    382 template <class Integral>
    383     Integral
    384     atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
    385 
    386 template <class Integral>
    387     Integral
    388     atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
    389                               memory_order m) noexcept;
    390 template <class Integral>
    391     Integral
    392     atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
    393                               memory_order m) noexcept;
    394 template <class Integral>
    395     Integral
    396     atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
    397 
    398 template <class Integral>
    399     Integral
    400     atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
    401 
    402 template <class Integral>
    403     Integral
    404     atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
    405                               memory_order m) noexcept;
    406 template <class Integral>
    407     Integral
    408     atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
    409                               memory_order m) noexcept;
    410 template <class Integral>
    411     Integral
    412     atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
    413 
    414 template <class Integral>
    415     Integral
    416     atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
    417 
    418 template <class Integral>
    419     Integral
    420     atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
    421                              memory_order m) noexcept;
    422 template <class Integral>
    423     Integral
    424     atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
    425                              memory_order m) noexcept;
    426 template <class Integral>
    427     Integral
    428     atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
    429 
    430 template <class Integral>
    431     Integral
    432     atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
    433 
    434 template <class Integral>
    435     Integral
    436     atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
    437                               memory_order m) noexcept;
    438 template <class Integral>
    439     Integral
    440     atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
    441                               memory_order m) noexcept;
    442 
    443 template <class T>
    444     T*
    445     atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    446 
    447 template <class T>
    448     T*
    449     atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
    450 
    451 template <class T>
    452     T*
    453     atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    454                               memory_order m) noexcept;
    455 template <class T>
    456     T*
    457     atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    458 
    459 template <class T>
    460     T*
    461     atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    462 
    463 template <class T>
    464     T*
    465     atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
    466 
    467 template <class T>
    468     T*
    469     atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    470                               memory_order m) noexcept;
    471 template <class T>
    472     T*
    473     atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    474 
    475 // Atomics for standard typedef types
    476 
    477 typedef atomic<bool>               atomic_bool;
    478 typedef atomic<char>               atomic_char;
    479 typedef atomic<signed char>        atomic_schar;
    480 typedef atomic<unsigned char>      atomic_uchar;
    481 typedef atomic<short>              atomic_short;
    482 typedef atomic<unsigned short>     atomic_ushort;
    483 typedef atomic<int>                atomic_int;
    484 typedef atomic<unsigned int>       atomic_uint;
    485 typedef atomic<long>               atomic_long;
    486 typedef atomic<unsigned long>      atomic_ulong;
    487 typedef atomic<long long>          atomic_llong;
    488 typedef atomic<unsigned long long> atomic_ullong;
    489 typedef atomic<char16_t>           atomic_char16_t;
    490 typedef atomic<char32_t>           atomic_char32_t;
    491 typedef atomic<wchar_t>            atomic_wchar_t;
    492 
    493 typedef atomic<int_least8_t>   atomic_int_least8_t;
    494 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
    495 typedef atomic<int_least16_t>  atomic_int_least16_t;
    496 typedef atomic<uint_least16_t> atomic_uint_least16_t;
    497 typedef atomic<int_least32_t>  atomic_int_least32_t;
    498 typedef atomic<uint_least32_t> atomic_uint_least32_t;
    499 typedef atomic<int_least64_t>  atomic_int_least64_t;
    500 typedef atomic<uint_least64_t> atomic_uint_least64_t;
    501 
    502 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
    503 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
    504 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
    505 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
    506 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
    507 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
    508 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
    509 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
    510 
    511 typedef atomic<intptr_t>  atomic_intptr_t;
    512 typedef atomic<uintptr_t> atomic_uintptr_t;
    513 typedef atomic<size_t>    atomic_size_t;
    514 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
    515 typedef atomic<intmax_t>  atomic_intmax_t;
    516 typedef atomic<uintmax_t> atomic_uintmax_t;
    517 
    518 // fences
    519 
    520 void atomic_thread_fence(memory_order m) noexcept;
    521 void atomic_signal_fence(memory_order m) noexcept;
    522 
    523 }  // std
    524 
    525 */
    526 
    527 #include <__config>
    528 #include <cstddef>
    529 #include <cstdint>
    530 #include <type_traits>
    531 
    532 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
    533 #pragma GCC system_header
    534 #endif
    535 
    536 #ifdef _LIBCPP_HAS_NO_THREADS
    537 #error <atomic> is not supported on this single threaded system
    538 #else // !_LIBCPP_HAS_NO_THREADS
    539 
    540 _LIBCPP_BEGIN_NAMESPACE_STD
    541 
    542 #if !__has_feature(cxx_atomic) && _GNUC_VER < 407
    543 #error <atomic> is not implemented
    544 #else
    545 
    546 typedef enum memory_order
    547 {
    548     memory_order_relaxed, memory_order_consume, memory_order_acquire,
    549     memory_order_release, memory_order_acq_rel, memory_order_seq_cst
    550 } memory_order;
    551 
    552 #if _GNUC_VER >= 407
    553 namespace __gcc_atomic {
    554 template <typename _Tp>
    555 struct __gcc_atomic_t {
    556   __gcc_atomic_t() _NOEXCEPT {}
    557   _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
    558     : __a_value(value) {}
    559   _Tp __a_value;
    560 };
    561 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
    562 
    563 template <typename _Tp> _Tp __create();
    564 
    565 template <typename _Tp, typename _Td>
    566 typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
    567     __test_atomic_assignable(int);
    568 template <typename _Tp, typename _Up>
    569 __two __test_atomic_assignable(...);
    570 
    571 template <typename _Tp, typename _Td>
    572 struct __can_assign {
    573   static const bool value =
    574       sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
    575 };
    576 
    577 static inline constexpr int __to_gcc_order(memory_order __order) {
    578   // Avoid switch statement to make this a constexpr.
    579   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
    580          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
    581           (__order == memory_order_release ? __ATOMIC_RELEASE:
    582            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
    583             (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
    584               __ATOMIC_CONSUME))));
    585 }
    586 
    587 static inline constexpr int __to_gcc_failure_order(memory_order __order) {
    588   // Avoid switch statement to make this a constexpr.
    589   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
    590          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
    591           (__order == memory_order_release ? __ATOMIC_RELAXED:
    592            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
    593             (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
    594               __ATOMIC_CONSUME))));
    595 }
    596 
    597 } // namespace __gcc_atomic
    598 
    599 template <typename _Tp>
    600 static inline
    601 typename enable_if<
    602     __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
    603 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    604   __a->__a_value = __val;
    605 }
    606 
    607 template <typename _Tp>
    608 static inline
    609 typename enable_if<
    610     !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
    611      __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
    612 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    613   // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
    614   // the default operator= in an object is not volatile, a byte-by-byte copy
    615   // is required.
    616   volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
    617   volatile char* end = to + sizeof(_Tp);
    618   char* from = reinterpret_cast<char*>(&__val);
    619   while (to != end) {
    620     *to++ = *from++;
    621   }
    622 }
    623 
    624 template <typename _Tp>
    625 static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
    626   __a->__a_value = __val;
    627 }
    628 
    629 static inline void __c11_atomic_thread_fence(memory_order __order) {
    630   __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
    631 }
    632 
    633 static inline void __c11_atomic_signal_fence(memory_order __order) {
    634   __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
    635 }
    636 
    637 template <typename _Tp>
    638 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
    639                                       memory_order __order) {
    640   return __atomic_store(&__a->__a_value, &__val,
    641                         __gcc_atomic::__to_gcc_order(__order));
    642 }
    643 
    644 template <typename _Tp>
    645 static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
    646                                       memory_order __order) {
    647   __atomic_store(&__a->__a_value, &__val,
    648                  __gcc_atomic::__to_gcc_order(__order));
    649 }
    650 
    651 template <typename _Tp>
    652 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
    653                                     memory_order __order) {
    654   _Tp __ret;
    655   __atomic_load(&__a->__a_value, &__ret,
    656                 __gcc_atomic::__to_gcc_order(__order));
    657   return __ret;
    658 }
    659 
    660 template <typename _Tp>
    661 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
    662   _Tp __ret;
    663   __atomic_load(&__a->__a_value, &__ret,
    664                 __gcc_atomic::__to_gcc_order(__order));
    665   return __ret;
    666 }
    667 
    668 template <typename _Tp>
    669 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
    670                                         _Tp __value, memory_order __order) {
    671   _Tp __ret;
    672   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    673                     __gcc_atomic::__to_gcc_order(__order));
    674   return __ret;
    675 }
    676 
    677 template <typename _Tp>
    678 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
    679                                         memory_order __order) {
    680   _Tp __ret;
    681   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    682                     __gcc_atomic::__to_gcc_order(__order));
    683   return __ret;
    684 }
    685 
    686 template <typename _Tp>
    687 static inline bool __c11_atomic_compare_exchange_strong(
    688     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    689     memory_order __success, memory_order __failure) {
    690   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    691                                    false,
    692                                    __gcc_atomic::__to_gcc_order(__success),
    693                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    694 }
    695 
    696 template <typename _Tp>
    697 static inline bool __c11_atomic_compare_exchange_strong(
    698     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    699     memory_order __failure) {
    700   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    701                                    false,
    702                                    __gcc_atomic::__to_gcc_order(__success),
    703                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    704 }
    705 
    706 template <typename _Tp>
    707 static inline bool __c11_atomic_compare_exchange_weak(
    708     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    709     memory_order __success, memory_order __failure) {
    710   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    711                                    true,
    712                                    __gcc_atomic::__to_gcc_order(__success),
    713                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    714 }
    715 
    716 template <typename _Tp>
    717 static inline bool __c11_atomic_compare_exchange_weak(
    718     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    719     memory_order __failure) {
    720   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    721                                    true,
    722                                    __gcc_atomic::__to_gcc_order(__success),
    723                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    724 }
    725 
    726 template <typename _Tp>
    727 struct __skip_amt { enum {value = 1}; };
    728 
    729 template <typename _Tp>
    730 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
    731 
    732 // FIXME: Haven't figured out what the spec says about using arrays with
    733 // atomic_fetch_add. Force a failure rather than creating bad behavior.
    734 template <typename _Tp>
    735 struct __skip_amt<_Tp[]> { };
    736 template <typename _Tp, int n>
    737 struct __skip_amt<_Tp[n]> { };
    738 
    739 template <typename _Tp, typename _Td>
    740 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
    741                                          _Td __delta, memory_order __order) {
    742   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    743                             __gcc_atomic::__to_gcc_order(__order));
    744 }
    745 
    746 template <typename _Tp, typename _Td>
    747 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
    748                                          memory_order __order) {
    749   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    750                             __gcc_atomic::__to_gcc_order(__order));
    751 }
    752 
    753 template <typename _Tp, typename _Td>
    754 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
    755                                          _Td __delta, memory_order __order) {
    756   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    757                             __gcc_atomic::__to_gcc_order(__order));
    758 }
    759 
    760 template <typename _Tp, typename _Td>
    761 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
    762                                          memory_order __order) {
    763   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    764                             __gcc_atomic::__to_gcc_order(__order));
    765 }
    766 
    767 template <typename _Tp>
    768 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
    769                                          _Tp __pattern, memory_order __order) {
    770   return __atomic_fetch_and(&__a->__a_value, __pattern,
    771                             __gcc_atomic::__to_gcc_order(__order));
    772 }
    773 
    774 template <typename _Tp>
    775 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
    776                                          _Tp __pattern, memory_order __order) {
    777   return __atomic_fetch_and(&__a->__a_value, __pattern,
    778                             __gcc_atomic::__to_gcc_order(__order));
    779 }
    780 
    781 template <typename _Tp>
    782 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
    783                                         _Tp __pattern, memory_order __order) {
    784   return __atomic_fetch_or(&__a->__a_value, __pattern,
    785                            __gcc_atomic::__to_gcc_order(__order));
    786 }
    787 
    788 template <typename _Tp>
    789 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
    790                                         memory_order __order) {
    791   return __atomic_fetch_or(&__a->__a_value, __pattern,
    792                            __gcc_atomic::__to_gcc_order(__order));
    793 }
    794 
    795 template <typename _Tp>
    796 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
    797                                          _Tp __pattern, memory_order __order) {
    798   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    799                             __gcc_atomic::__to_gcc_order(__order));
    800 }
    801 
    802 template <typename _Tp>
    803 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
    804                                          memory_order __order) {
    805   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    806                             __gcc_atomic::__to_gcc_order(__order));
    807 }
    808 #endif // _GNUC_VER >= 407
    809 
    810 template <class _Tp>
    811 inline _LIBCPP_INLINE_VISIBILITY
    812 _Tp
    813 kill_dependency(_Tp __y) _NOEXCEPT
    814 {
    815     return __y;
    816 }
    817 
    818 // general atomic<T>
    819 
    820 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
    821 struct __atomic_base  // false
    822 {
    823     mutable _Atomic(_Tp) __a_;
    824 
    825     _LIBCPP_INLINE_VISIBILITY
    826     bool is_lock_free() const volatile _NOEXCEPT
    827     {
    828 #if __has_feature(cxx_atomic)
    829     return __c11_atomic_is_lock_free(sizeof(_Tp));
    830 #else
    831     return __atomic_is_lock_free(sizeof(_Tp), 0);
    832 #endif
    833     }
    834     _LIBCPP_INLINE_VISIBILITY
    835     bool is_lock_free() const _NOEXCEPT
    836         {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
    837     _LIBCPP_INLINE_VISIBILITY
    838     void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    839         {__c11_atomic_store(&__a_, __d, __m);}
    840     _LIBCPP_INLINE_VISIBILITY
    841     void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    842         {__c11_atomic_store(&__a_, __d, __m);}
    843     _LIBCPP_INLINE_VISIBILITY
    844     _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
    845         {return __c11_atomic_load(&__a_, __m);}
    846     _LIBCPP_INLINE_VISIBILITY
    847     _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
    848         {return __c11_atomic_load(&__a_, __m);}
    849     _LIBCPP_INLINE_VISIBILITY
    850     operator _Tp() const volatile _NOEXCEPT {return load();}
    851     _LIBCPP_INLINE_VISIBILITY
    852     operator _Tp() const _NOEXCEPT          {return load();}
    853     _LIBCPP_INLINE_VISIBILITY
    854     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    855         {return __c11_atomic_exchange(&__a_, __d, __m);}
    856     _LIBCPP_INLINE_VISIBILITY
    857     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    858         {return __c11_atomic_exchange(&__a_, __d, __m);}
    859     _LIBCPP_INLINE_VISIBILITY
    860     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    861                                memory_order __s, memory_order __f) volatile _NOEXCEPT
    862         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    863     _LIBCPP_INLINE_VISIBILITY
    864     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    865                                memory_order __s, memory_order __f) _NOEXCEPT
    866         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    867     _LIBCPP_INLINE_VISIBILITY
    868     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    869                                  memory_order __s, memory_order __f) volatile _NOEXCEPT
    870         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    871     _LIBCPP_INLINE_VISIBILITY
    872     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    873                                  memory_order __s, memory_order __f) _NOEXCEPT
    874         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    875     _LIBCPP_INLINE_VISIBILITY
    876     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    877                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    878         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    879     _LIBCPP_INLINE_VISIBILITY
    880     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    881                                memory_order __m = memory_order_seq_cst) _NOEXCEPT
    882         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    883     _LIBCPP_INLINE_VISIBILITY
    884     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    885                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    886         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    887     _LIBCPP_INLINE_VISIBILITY
    888     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    889                                  memory_order __m = memory_order_seq_cst) _NOEXCEPT
    890         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    891 
    892     _LIBCPP_INLINE_VISIBILITY
    893 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    894     __atomic_base() _NOEXCEPT = default;
    895 #else
    896     __atomic_base() _NOEXCEPT : __a_() {}
    897 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    898 
    899     _LIBCPP_INLINE_VISIBILITY
    900     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
    901 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    902     __atomic_base(const __atomic_base&) = delete;
    903     __atomic_base& operator=(const __atomic_base&) = delete;
    904     __atomic_base& operator=(const __atomic_base&) volatile = delete;
    905 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    906 private:
    907     __atomic_base(const __atomic_base&);
    908     __atomic_base& operator=(const __atomic_base&);
    909     __atomic_base& operator=(const __atomic_base&) volatile;
    910 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    911 };
    912 
    913 // atomic<Integral>
    914 
    915 template <class _Tp>
    916 struct __atomic_base<_Tp, true>
    917     : public __atomic_base<_Tp, false>
    918 {
    919     typedef __atomic_base<_Tp, false> __base;
    920     _LIBCPP_INLINE_VISIBILITY
    921     __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
    922     _LIBCPP_INLINE_VISIBILITY
    923     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
    924 
    925     _LIBCPP_INLINE_VISIBILITY
    926     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    927         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    928     _LIBCPP_INLINE_VISIBILITY
    929     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    930         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    931     _LIBCPP_INLINE_VISIBILITY
    932     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    933         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    934     _LIBCPP_INLINE_VISIBILITY
    935     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    936         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    937     _LIBCPP_INLINE_VISIBILITY
    938     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    939         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    940     _LIBCPP_INLINE_VISIBILITY
    941     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    942         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    943     _LIBCPP_INLINE_VISIBILITY
    944     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    945         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    946     _LIBCPP_INLINE_VISIBILITY
    947     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    948         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    949     _LIBCPP_INLINE_VISIBILITY
    950     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    951         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    952     _LIBCPP_INLINE_VISIBILITY
    953     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    954         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    955 
    956     _LIBCPP_INLINE_VISIBILITY
    957     _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
    958     _LIBCPP_INLINE_VISIBILITY
    959     _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
    960     _LIBCPP_INLINE_VISIBILITY
    961     _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
    962     _LIBCPP_INLINE_VISIBILITY
    963     _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
    964     _LIBCPP_INLINE_VISIBILITY
    965     _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
    966     _LIBCPP_INLINE_VISIBILITY
    967     _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
    968     _LIBCPP_INLINE_VISIBILITY
    969     _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
    970     _LIBCPP_INLINE_VISIBILITY
    971     _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
    972     _LIBCPP_INLINE_VISIBILITY
    973     _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
    974     _LIBCPP_INLINE_VISIBILITY
    975     _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
    976     _LIBCPP_INLINE_VISIBILITY
    977     _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
    978     _LIBCPP_INLINE_VISIBILITY
    979     _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
    980     _LIBCPP_INLINE_VISIBILITY
    981     _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
    982     _LIBCPP_INLINE_VISIBILITY
    983     _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
    984     _LIBCPP_INLINE_VISIBILITY
    985     _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
    986     _LIBCPP_INLINE_VISIBILITY
    987     _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
    988     _LIBCPP_INLINE_VISIBILITY
    989     _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
    990     _LIBCPP_INLINE_VISIBILITY
    991     _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
    992 };
    993 
    994 // atomic<T>
    995 
    996 template <class _Tp>
    997 struct atomic
    998     : public __atomic_base<_Tp>
    999 {
   1000     typedef __atomic_base<_Tp> __base;
   1001     _LIBCPP_INLINE_VISIBILITY
   1002     atomic() _NOEXCEPT _LIBCPP_DEFAULT
   1003     _LIBCPP_INLINE_VISIBILITY
   1004     _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
   1005 
   1006     _LIBCPP_INLINE_VISIBILITY
   1007     _Tp operator=(_Tp __d) volatile _NOEXCEPT
   1008         {__base::store(__d); return __d;}
   1009     _LIBCPP_INLINE_VISIBILITY
   1010     _Tp operator=(_Tp __d) _NOEXCEPT
   1011         {__base::store(__d); return __d;}
   1012 };
   1013 
   1014 // atomic<T*>
   1015 
   1016 template <class _Tp>
   1017 struct atomic<_Tp*>
   1018     : public __atomic_base<_Tp*>
   1019 {
   1020     typedef __atomic_base<_Tp*> __base;
   1021     _LIBCPP_INLINE_VISIBILITY
   1022     atomic() _NOEXCEPT _LIBCPP_DEFAULT
   1023     _LIBCPP_INLINE_VISIBILITY
   1024     _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
   1025 
   1026     _LIBCPP_INLINE_VISIBILITY
   1027     _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
   1028         {__base::store(__d); return __d;}
   1029     _LIBCPP_INLINE_VISIBILITY
   1030     _Tp* operator=(_Tp* __d) _NOEXCEPT
   1031         {__base::store(__d); return __d;}
   1032 
   1033     _LIBCPP_INLINE_VISIBILITY
   1034     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1035                                                                         volatile _NOEXCEPT
   1036         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1037     _LIBCPP_INLINE_VISIBILITY
   1038     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1039         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1040     _LIBCPP_INLINE_VISIBILITY
   1041     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1042                                                                         volatile _NOEXCEPT
   1043         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1044     _LIBCPP_INLINE_VISIBILITY
   1045     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1046         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1047 
   1048     _LIBCPP_INLINE_VISIBILITY
   1049     _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
   1050     _LIBCPP_INLINE_VISIBILITY
   1051     _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
   1052     _LIBCPP_INLINE_VISIBILITY
   1053     _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
   1054     _LIBCPP_INLINE_VISIBILITY
   1055     _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
   1056     _LIBCPP_INLINE_VISIBILITY
   1057     _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
   1058     _LIBCPP_INLINE_VISIBILITY
   1059     _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
   1060     _LIBCPP_INLINE_VISIBILITY
   1061     _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
   1062     _LIBCPP_INLINE_VISIBILITY
   1063     _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
   1064     _LIBCPP_INLINE_VISIBILITY
   1065     _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
   1066     _LIBCPP_INLINE_VISIBILITY
   1067     _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
   1068     _LIBCPP_INLINE_VISIBILITY
   1069     _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
   1070     _LIBCPP_INLINE_VISIBILITY
   1071     _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
   1072 };
   1073 
   1074 // atomic_is_lock_free
   1075 
   1076 template <class _Tp>
   1077 inline _LIBCPP_INLINE_VISIBILITY
   1078 bool
   1079 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1080 {
   1081     return __o->is_lock_free();
   1082 }
   1083 
   1084 template <class _Tp>
   1085 inline _LIBCPP_INLINE_VISIBILITY
   1086 bool
   1087 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
   1088 {
   1089     return __o->is_lock_free();
   1090 }
   1091 
   1092 // atomic_init
   1093 
   1094 template <class _Tp>
   1095 inline _LIBCPP_INLINE_VISIBILITY
   1096 void
   1097 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1098 {
   1099     __c11_atomic_init(&__o->__a_, __d);
   1100 }
   1101 
   1102 template <class _Tp>
   1103 inline _LIBCPP_INLINE_VISIBILITY
   1104 void
   1105 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1106 {
   1107     __c11_atomic_init(&__o->__a_, __d);
   1108 }
   1109 
   1110 // atomic_store
   1111 
   1112 template <class _Tp>
   1113 inline _LIBCPP_INLINE_VISIBILITY
   1114 void
   1115 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1116 {
   1117     __o->store(__d);
   1118 }
   1119 
   1120 template <class _Tp>
   1121 inline _LIBCPP_INLINE_VISIBILITY
   1122 void
   1123 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1124 {
   1125     __o->store(__d);
   1126 }
   1127 
   1128 // atomic_store_explicit
   1129 
   1130 template <class _Tp>
   1131 inline _LIBCPP_INLINE_VISIBILITY
   1132 void
   1133 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1134 {
   1135     __o->store(__d, __m);
   1136 }
   1137 
   1138 template <class _Tp>
   1139 inline _LIBCPP_INLINE_VISIBILITY
   1140 void
   1141 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1142 {
   1143     __o->store(__d, __m);
   1144 }
   1145 
   1146 // atomic_load
   1147 
   1148 template <class _Tp>
   1149 inline _LIBCPP_INLINE_VISIBILITY
   1150 _Tp
   1151 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1152 {
   1153     return __o->load();
   1154 }
   1155 
   1156 template <class _Tp>
   1157 inline _LIBCPP_INLINE_VISIBILITY
   1158 _Tp
   1159 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
   1160 {
   1161     return __o->load();
   1162 }
   1163 
   1164 // atomic_load_explicit
   1165 
   1166 template <class _Tp>
   1167 inline _LIBCPP_INLINE_VISIBILITY
   1168 _Tp
   1169 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1170 {
   1171     return __o->load(__m);
   1172 }
   1173 
   1174 template <class _Tp>
   1175 inline _LIBCPP_INLINE_VISIBILITY
   1176 _Tp
   1177 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1178 {
   1179     return __o->load(__m);
   1180 }
   1181 
   1182 // atomic_exchange
   1183 
   1184 template <class _Tp>
   1185 inline _LIBCPP_INLINE_VISIBILITY
   1186 _Tp
   1187 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1188 {
   1189     return __o->exchange(__d);
   1190 }
   1191 
   1192 template <class _Tp>
   1193 inline _LIBCPP_INLINE_VISIBILITY
   1194 _Tp
   1195 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1196 {
   1197     return __o->exchange(__d);
   1198 }
   1199 
   1200 // atomic_exchange_explicit
   1201 
   1202 template <class _Tp>
   1203 inline _LIBCPP_INLINE_VISIBILITY
   1204 _Tp
   1205 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1206 {
   1207     return __o->exchange(__d, __m);
   1208 }
   1209 
   1210 template <class _Tp>
   1211 inline _LIBCPP_INLINE_VISIBILITY
   1212 _Tp
   1213 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1214 {
   1215     return __o->exchange(__d, __m);
   1216 }
   1217 
   1218 // atomic_compare_exchange_weak
   1219 
   1220 template <class _Tp>
   1221 inline _LIBCPP_INLINE_VISIBILITY
   1222 bool
   1223 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1224 {
   1225     return __o->compare_exchange_weak(*__e, __d);
   1226 }
   1227 
   1228 template <class _Tp>
   1229 inline _LIBCPP_INLINE_VISIBILITY
   1230 bool
   1231 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1232 {
   1233     return __o->compare_exchange_weak(*__e, __d);
   1234 }
   1235 
   1236 // atomic_compare_exchange_strong
   1237 
   1238 template <class _Tp>
   1239 inline _LIBCPP_INLINE_VISIBILITY
   1240 bool
   1241 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1242 {
   1243     return __o->compare_exchange_strong(*__e, __d);
   1244 }
   1245 
   1246 template <class _Tp>
   1247 inline _LIBCPP_INLINE_VISIBILITY
   1248 bool
   1249 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1250 {
   1251     return __o->compare_exchange_strong(*__e, __d);
   1252 }
   1253 
   1254 // atomic_compare_exchange_weak_explicit
   1255 
   1256 template <class _Tp>
   1257 inline _LIBCPP_INLINE_VISIBILITY
   1258 bool
   1259 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
   1260                                       _Tp __d,
   1261                                       memory_order __s, memory_order __f) _NOEXCEPT
   1262 {
   1263     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1264 }
   1265 
   1266 template <class _Tp>
   1267 inline _LIBCPP_INLINE_VISIBILITY
   1268 bool
   1269 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
   1270                                       memory_order __s, memory_order __f) _NOEXCEPT
   1271 {
   1272     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1273 }
   1274 
   1275 // atomic_compare_exchange_strong_explicit
   1276 
   1277 template <class _Tp>
   1278 inline _LIBCPP_INLINE_VISIBILITY
   1279 bool
   1280 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
   1281                                         _Tp* __e, _Tp __d,
   1282                                         memory_order __s, memory_order __f) _NOEXCEPT
   1283 {
   1284     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1285 }
   1286 
   1287 template <class _Tp>
   1288 inline _LIBCPP_INLINE_VISIBILITY
   1289 bool
   1290 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
   1291                                         _Tp __d,
   1292                                         memory_order __s, memory_order __f) _NOEXCEPT
   1293 {
   1294     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1295 }
   1296 
   1297 // atomic_fetch_add
   1298 
   1299 template <class _Tp>
   1300 inline _LIBCPP_INLINE_VISIBILITY
   1301 typename enable_if
   1302 <
   1303     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1304     _Tp
   1305 >::type
   1306 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1307 {
   1308     return __o->fetch_add(__op);
   1309 }
   1310 
   1311 template <class _Tp>
   1312 inline _LIBCPP_INLINE_VISIBILITY
   1313 typename enable_if
   1314 <
   1315     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1316     _Tp
   1317 >::type
   1318 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1319 {
   1320     return __o->fetch_add(__op);
   1321 }
   1322 
   1323 template <class _Tp>
   1324 inline _LIBCPP_INLINE_VISIBILITY
   1325 _Tp*
   1326 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1327 {
   1328     return __o->fetch_add(__op);
   1329 }
   1330 
   1331 template <class _Tp>
   1332 inline _LIBCPP_INLINE_VISIBILITY
   1333 _Tp*
   1334 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1335 {
   1336     return __o->fetch_add(__op);
   1337 }
   1338 
   1339 // atomic_fetch_add_explicit
   1340 
   1341 template <class _Tp>
   1342 inline _LIBCPP_INLINE_VISIBILITY
   1343 typename enable_if
   1344 <
   1345     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1346     _Tp
   1347 >::type
   1348 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1349 {
   1350     return __o->fetch_add(__op, __m);
   1351 }
   1352 
   1353 template <class _Tp>
   1354 inline _LIBCPP_INLINE_VISIBILITY
   1355 typename enable_if
   1356 <
   1357     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1358     _Tp
   1359 >::type
   1360 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1361 {
   1362     return __o->fetch_add(__op, __m);
   1363 }
   1364 
   1365 template <class _Tp>
   1366 inline _LIBCPP_INLINE_VISIBILITY
   1367 _Tp*
   1368 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1369                           memory_order __m) _NOEXCEPT
   1370 {
   1371     return __o->fetch_add(__op, __m);
   1372 }
   1373 
   1374 template <class _Tp>
   1375 inline _LIBCPP_INLINE_VISIBILITY
   1376 _Tp*
   1377 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1378 {
   1379     return __o->fetch_add(__op, __m);
   1380 }
   1381 
   1382 // atomic_fetch_sub
   1383 
   1384 template <class _Tp>
   1385 inline _LIBCPP_INLINE_VISIBILITY
   1386 typename enable_if
   1387 <
   1388     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1389     _Tp
   1390 >::type
   1391 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1392 {
   1393     return __o->fetch_sub(__op);
   1394 }
   1395 
   1396 template <class _Tp>
   1397 inline _LIBCPP_INLINE_VISIBILITY
   1398 typename enable_if
   1399 <
   1400     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1401     _Tp
   1402 >::type
   1403 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1404 {
   1405     return __o->fetch_sub(__op);
   1406 }
   1407 
   1408 template <class _Tp>
   1409 inline _LIBCPP_INLINE_VISIBILITY
   1410 _Tp*
   1411 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1412 {
   1413     return __o->fetch_sub(__op);
   1414 }
   1415 
   1416 template <class _Tp>
   1417 inline _LIBCPP_INLINE_VISIBILITY
   1418 _Tp*
   1419 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1420 {
   1421     return __o->fetch_sub(__op);
   1422 }
   1423 
   1424 // atomic_fetch_sub_explicit
   1425 
   1426 template <class _Tp>
   1427 inline _LIBCPP_INLINE_VISIBILITY
   1428 typename enable_if
   1429 <
   1430     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1431     _Tp
   1432 >::type
   1433 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1434 {
   1435     return __o->fetch_sub(__op, __m);
   1436 }
   1437 
   1438 template <class _Tp>
   1439 inline _LIBCPP_INLINE_VISIBILITY
   1440 typename enable_if
   1441 <
   1442     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1443     _Tp
   1444 >::type
   1445 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1446 {
   1447     return __o->fetch_sub(__op, __m);
   1448 }
   1449 
   1450 template <class _Tp>
   1451 inline _LIBCPP_INLINE_VISIBILITY
   1452 _Tp*
   1453 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1454                           memory_order __m) _NOEXCEPT
   1455 {
   1456     return __o->fetch_sub(__op, __m);
   1457 }
   1458 
   1459 template <class _Tp>
   1460 inline _LIBCPP_INLINE_VISIBILITY
   1461 _Tp*
   1462 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1463 {
   1464     return __o->fetch_sub(__op, __m);
   1465 }
   1466 
   1467 // atomic_fetch_and
   1468 
   1469 template <class _Tp>
   1470 inline _LIBCPP_INLINE_VISIBILITY
   1471 typename enable_if
   1472 <
   1473     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1474     _Tp
   1475 >::type
   1476 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1477 {
   1478     return __o->fetch_and(__op);
   1479 }
   1480 
   1481 template <class _Tp>
   1482 inline _LIBCPP_INLINE_VISIBILITY
   1483 typename enable_if
   1484 <
   1485     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1486     _Tp
   1487 >::type
   1488 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1489 {
   1490     return __o->fetch_and(__op);
   1491 }
   1492 
   1493 // atomic_fetch_and_explicit
   1494 
   1495 template <class _Tp>
   1496 inline _LIBCPP_INLINE_VISIBILITY
   1497 typename enable_if
   1498 <
   1499     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1500     _Tp
   1501 >::type
   1502 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1503 {
   1504     return __o->fetch_and(__op, __m);
   1505 }
   1506 
   1507 template <class _Tp>
   1508 inline _LIBCPP_INLINE_VISIBILITY
   1509 typename enable_if
   1510 <
   1511     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1512     _Tp
   1513 >::type
   1514 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1515 {
   1516     return __o->fetch_and(__op, __m);
   1517 }
   1518 
   1519 // atomic_fetch_or
   1520 
   1521 template <class _Tp>
   1522 inline _LIBCPP_INLINE_VISIBILITY
   1523 typename enable_if
   1524 <
   1525     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1526     _Tp
   1527 >::type
   1528 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1529 {
   1530     return __o->fetch_or(__op);
   1531 }
   1532 
   1533 template <class _Tp>
   1534 inline _LIBCPP_INLINE_VISIBILITY
   1535 typename enable_if
   1536 <
   1537     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1538     _Tp
   1539 >::type
   1540 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1541 {
   1542     return __o->fetch_or(__op);
   1543 }
   1544 
   1545 // atomic_fetch_or_explicit
   1546 
   1547 template <class _Tp>
   1548 inline _LIBCPP_INLINE_VISIBILITY
   1549 typename enable_if
   1550 <
   1551     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1552     _Tp
   1553 >::type
   1554 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1555 {
   1556     return __o->fetch_or(__op, __m);
   1557 }
   1558 
   1559 template <class _Tp>
   1560 inline _LIBCPP_INLINE_VISIBILITY
   1561 typename enable_if
   1562 <
   1563     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1564     _Tp
   1565 >::type
   1566 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1567 {
   1568     return __o->fetch_or(__op, __m);
   1569 }
   1570 
   1571 // atomic_fetch_xor
   1572 
   1573 template <class _Tp>
   1574 inline _LIBCPP_INLINE_VISIBILITY
   1575 typename enable_if
   1576 <
   1577     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1578     _Tp
   1579 >::type
   1580 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1581 {
   1582     return __o->fetch_xor(__op);
   1583 }
   1584 
   1585 template <class _Tp>
   1586 inline _LIBCPP_INLINE_VISIBILITY
   1587 typename enable_if
   1588 <
   1589     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1590     _Tp
   1591 >::type
   1592 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1593 {
   1594     return __o->fetch_xor(__op);
   1595 }
   1596 
   1597 // atomic_fetch_xor_explicit
   1598 
   1599 template <class _Tp>
   1600 inline _LIBCPP_INLINE_VISIBILITY
   1601 typename enable_if
   1602 <
   1603     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1604     _Tp
   1605 >::type
   1606 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1607 {
   1608     return __o->fetch_xor(__op, __m);
   1609 }
   1610 
   1611 template <class _Tp>
   1612 inline _LIBCPP_INLINE_VISIBILITY
   1613 typename enable_if
   1614 <
   1615     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1616     _Tp
   1617 >::type
   1618 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1619 {
   1620     return __o->fetch_xor(__op, __m);
   1621 }
   1622 
   1623 // flag type and operations
   1624 
   1625 typedef struct atomic_flag
   1626 {
   1627     _Atomic(bool) __a_;
   1628 
   1629     _LIBCPP_INLINE_VISIBILITY
   1630     bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1631         {return __c11_atomic_exchange(&__a_, true, __m);}
   1632     _LIBCPP_INLINE_VISIBILITY
   1633     bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1634         {return __c11_atomic_exchange(&__a_, true, __m);}
   1635     _LIBCPP_INLINE_VISIBILITY
   1636     void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1637         {__c11_atomic_store(&__a_, false, __m);}
   1638     _LIBCPP_INLINE_VISIBILITY
   1639     void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1640         {__c11_atomic_store(&__a_, false, __m);}
   1641 
   1642     _LIBCPP_INLINE_VISIBILITY
   1643 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1644     atomic_flag() _NOEXCEPT = default;
   1645 #else
   1646     atomic_flag() _NOEXCEPT : __a_() {}
   1647 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1648 
   1649     _LIBCPP_INLINE_VISIBILITY
   1650     atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
   1651 
   1652 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1653     atomic_flag(const atomic_flag&) = delete;
   1654     atomic_flag& operator=(const atomic_flag&) = delete;
   1655     atomic_flag& operator=(const atomic_flag&) volatile = delete;
   1656 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1657 private:
   1658     atomic_flag(const atomic_flag&);
   1659     atomic_flag& operator=(const atomic_flag&);
   1660     atomic_flag& operator=(const atomic_flag&) volatile;
   1661 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1662 } atomic_flag;
   1663 
   1664 inline _LIBCPP_INLINE_VISIBILITY
   1665 bool
   1666 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
   1667 {
   1668     return __o->test_and_set();
   1669 }
   1670 
   1671 inline _LIBCPP_INLINE_VISIBILITY
   1672 bool
   1673 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
   1674 {
   1675     return __o->test_and_set();
   1676 }
   1677 
   1678 inline _LIBCPP_INLINE_VISIBILITY
   1679 bool
   1680 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1681 {
   1682     return __o->test_and_set(__m);
   1683 }
   1684 
   1685 inline _LIBCPP_INLINE_VISIBILITY
   1686 bool
   1687 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1688 {
   1689     return __o->test_and_set(__m);
   1690 }
   1691 
   1692 inline _LIBCPP_INLINE_VISIBILITY
   1693 void
   1694 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
   1695 {
   1696     __o->clear();
   1697 }
   1698 
   1699 inline _LIBCPP_INLINE_VISIBILITY
   1700 void
   1701 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
   1702 {
   1703     __o->clear();
   1704 }
   1705 
   1706 inline _LIBCPP_INLINE_VISIBILITY
   1707 void
   1708 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1709 {
   1710     __o->clear(__m);
   1711 }
   1712 
   1713 inline _LIBCPP_INLINE_VISIBILITY
   1714 void
   1715 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1716 {
   1717     __o->clear(__m);
   1718 }
   1719 
   1720 // fences
   1721 
   1722 inline _LIBCPP_INLINE_VISIBILITY
   1723 void
   1724 atomic_thread_fence(memory_order __m) _NOEXCEPT
   1725 {
   1726     __c11_atomic_thread_fence(__m);
   1727 }
   1728 
   1729 inline _LIBCPP_INLINE_VISIBILITY
   1730 void
   1731 atomic_signal_fence(memory_order __m) _NOEXCEPT
   1732 {
   1733     __c11_atomic_signal_fence(__m);
   1734 }
   1735 
   1736 // Atomics for standard typedef types
   1737 
   1738 typedef atomic<bool>               atomic_bool;
   1739 typedef atomic<char>               atomic_char;
   1740 typedef atomic<signed char>        atomic_schar;
   1741 typedef atomic<unsigned char>      atomic_uchar;
   1742 typedef atomic<short>              atomic_short;
   1743 typedef atomic<unsigned short>     atomic_ushort;
   1744 typedef atomic<int>                atomic_int;
   1745 typedef atomic<unsigned int>       atomic_uint;
   1746 typedef atomic<long>               atomic_long;
   1747 typedef atomic<unsigned long>      atomic_ulong;
   1748 typedef atomic<long long>          atomic_llong;
   1749 typedef atomic<unsigned long long> atomic_ullong;
   1750 typedef atomic<char16_t>           atomic_char16_t;
   1751 typedef atomic<char32_t>           atomic_char32_t;
   1752 typedef atomic<wchar_t>            atomic_wchar_t;
   1753 
   1754 typedef atomic<int_least8_t>   atomic_int_least8_t;
   1755 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
   1756 typedef atomic<int_least16_t>  atomic_int_least16_t;
   1757 typedef atomic<uint_least16_t> atomic_uint_least16_t;
   1758 typedef atomic<int_least32_t>  atomic_int_least32_t;
   1759 typedef atomic<uint_least32_t> atomic_uint_least32_t;
   1760 typedef atomic<int_least64_t>  atomic_int_least64_t;
   1761 typedef atomic<uint_least64_t> atomic_uint_least64_t;
   1762 
   1763 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
   1764 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
   1765 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
   1766 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
   1767 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
   1768 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
   1769 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
   1770 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
   1771 
   1772 typedef atomic<intptr_t>  atomic_intptr_t;
   1773 typedef atomic<uintptr_t> atomic_uintptr_t;
   1774 typedef atomic<size_t>    atomic_size_t;
   1775 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
   1776 typedef atomic<intmax_t>  atomic_intmax_t;
   1777 typedef atomic<uintmax_t> atomic_uintmax_t;
   1778 
   1779 #define ATOMIC_FLAG_INIT {false}
   1780 #define ATOMIC_VAR_INIT(__v) {__v}
   1781 
   1782 // lock-free property
   1783 
   1784 #define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
   1785 #define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
   1786 #define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
   1787 #define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
   1788 #define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
   1789 #define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
   1790 #define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
   1791 #define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
   1792 #define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
   1793 #define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
   1794 
   1795 #endif  //  !__has_feature(cxx_atomic)
   1796 
   1797 _LIBCPP_END_NAMESPACE_STD
   1798 
   1799 #endif  // !_LIBCPP_HAS_NO_THREADS
   1800 
   1801 #endif  // _LIBCPP_ATOMIC
   1802