Home | History | Annotate | Download | only in include
      1 // -*- C++ -*-
      2 //===--------------------------- atomic -----------------------------------===//
      3 //
      4 //                     The LLVM Compiler Infrastructure
      5 //
      6 // This file is distributed under the University of Illinois Open Source
      7 // License. See LICENSE.TXT for details.
      8 //
      9 //===----------------------------------------------------------------------===//
     10 
     11 #ifndef _LIBCPP_ATOMIC
     12 #define _LIBCPP_ATOMIC
     13 
     14 /*
     15     atomic synopsis
     16 
     17 namespace std
     18 {
     19 
     20 // order and consistency
     21 
     22 typedef enum memory_order
     23 {
     24     memory_order_relaxed,
     25     memory_order_consume,  // load-consume
     26     memory_order_acquire,  // load-acquire
     27     memory_order_release,  // store-release
     28     memory_order_acq_rel,  // store-release load-acquire
     29     memory_order_seq_cst   // store-release load-acquire
     30 } memory_order;
     31 
     32 template <class T> T kill_dependency(T y) noexcept;
     33 
     34 // lock-free property
     35 
     36 #define ATOMIC_BOOL_LOCK_FREE unspecified
     37 #define ATOMIC_CHAR_LOCK_FREE unspecified
     38 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
     39 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
     40 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
     41 #define ATOMIC_SHORT_LOCK_FREE unspecified
     42 #define ATOMIC_INT_LOCK_FREE unspecified
     43 #define ATOMIC_LONG_LOCK_FREE unspecified
     44 #define ATOMIC_LLONG_LOCK_FREE unspecified
     45 #define ATOMIC_POINTER_LOCK_FREE unspecified
     46 
     47 // flag type and operations
     48 
     49 typedef struct atomic_flag
     50 {
     51     bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
     52     bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
     53     void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
     54     void clear(memory_order m = memory_order_seq_cst) noexcept;
     55     atomic_flag()  noexcept = default;
     56     atomic_flag(const atomic_flag&) = delete;
     57     atomic_flag& operator=(const atomic_flag&) = delete;
     58     atomic_flag& operator=(const atomic_flag&) volatile = delete;
     59 } atomic_flag;
     60 
     61 bool
     62     atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
     63 
     64 bool
     65     atomic_flag_test_and_set(atomic_flag* obj) noexcept;
     66 
     67 bool
     68     atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
     69                                       memory_order m) noexcept;
     70 
     71 bool
     72     atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
     73 
     74 void
     75     atomic_flag_clear(volatile atomic_flag* obj) noexcept;
     76 
     77 void
     78     atomic_flag_clear(atomic_flag* obj) noexcept;
     79 
     80 void
     81     atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
     82 
     83 void
     84     atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
     85 
     86 #define ATOMIC_FLAG_INIT see below
     87 #define ATOMIC_VAR_INIT(value) see below
     88 
     89 template <class T>
     90 struct atomic
     91 {
     92     bool is_lock_free() const volatile noexcept;
     93     bool is_lock_free() const noexcept;
     94     void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
     95     void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
     96     T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
     97     T load(memory_order m = memory_order_seq_cst) const noexcept;
     98     operator T() const volatile noexcept;
     99     operator T() const noexcept;
    100     T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    101     T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
    102     bool compare_exchange_weak(T& expc, T desr,
    103                                memory_order s, memory_order f) volatile noexcept;
    104     bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
    105     bool compare_exchange_strong(T& expc, T desr,
    106                                  memory_order s, memory_order f) volatile noexcept;
    107     bool compare_exchange_strong(T& expc, T desr,
    108                                  memory_order s, memory_order f) noexcept;
    109     bool compare_exchange_weak(T& expc, T desr,
    110                                memory_order m = memory_order_seq_cst) volatile noexcept;
    111     bool compare_exchange_weak(T& expc, T desr,
    112                                memory_order m = memory_order_seq_cst) noexcept;
    113     bool compare_exchange_strong(T& expc, T desr,
    114                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    115     bool compare_exchange_strong(T& expc, T desr,
    116                                  memory_order m = memory_order_seq_cst) noexcept;
    117 
    118     atomic() noexcept = default;
    119     constexpr atomic(T desr) noexcept;
    120     atomic(const atomic&) = delete;
    121     atomic& operator=(const atomic&) = delete;
    122     atomic& operator=(const atomic&) volatile = delete;
    123     T operator=(T) volatile noexcept;
    124     T operator=(T) noexcept;
    125 };
    126 
    127 template <>
    128 struct atomic<integral>
    129 {
    130     bool is_lock_free() const volatile noexcept;
    131     bool is_lock_free() const noexcept;
    132     void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    133     void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    134     integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    135     integral load(memory_order m = memory_order_seq_cst) const noexcept;
    136     operator integral() const volatile noexcept;
    137     operator integral() const noexcept;
    138     integral exchange(integral desr,
    139                       memory_order m = memory_order_seq_cst) volatile noexcept;
    140     integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    141     bool compare_exchange_weak(integral& expc, integral desr,
    142                                memory_order s, memory_order f) volatile noexcept;
    143     bool compare_exchange_weak(integral& expc, integral desr,
    144                                memory_order s, memory_order f) noexcept;
    145     bool compare_exchange_strong(integral& expc, integral desr,
    146                                  memory_order s, memory_order f) volatile noexcept;
    147     bool compare_exchange_strong(integral& expc, integral desr,
    148                                  memory_order s, memory_order f) noexcept;
    149     bool compare_exchange_weak(integral& expc, integral desr,
    150                                memory_order m = memory_order_seq_cst) volatile noexcept;
    151     bool compare_exchange_weak(integral& expc, integral desr,
    152                                memory_order m = memory_order_seq_cst) noexcept;
    153     bool compare_exchange_strong(integral& expc, integral desr,
    154                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    155     bool compare_exchange_strong(integral& expc, integral desr,
    156                                  memory_order m = memory_order_seq_cst) noexcept;
    157 
    158     integral
    159         fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    160     integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
    161     integral
    162         fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    163     integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
    164     integral
    165         fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    166     integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
    167     integral
    168         fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    169     integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
    170     integral
    171         fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    172     integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
    173 
    174     atomic() noexcept = default;
    175     constexpr atomic(integral desr) noexcept;
    176     atomic(const atomic&) = delete;
    177     atomic& operator=(const atomic&) = delete;
    178     atomic& operator=(const atomic&) volatile = delete;
    179     integral operator=(integral desr) volatile noexcept;
    180     integral operator=(integral desr) noexcept;
    181 
    182     integral operator++(int) volatile noexcept;
    183     integral operator++(int) noexcept;
    184     integral operator--(int) volatile noexcept;
    185     integral operator--(int) noexcept;
    186     integral operator++() volatile noexcept;
    187     integral operator++() noexcept;
    188     integral operator--() volatile noexcept;
    189     integral operator--() noexcept;
    190     integral operator+=(integral op) volatile noexcept;
    191     integral operator+=(integral op) noexcept;
    192     integral operator-=(integral op) volatile noexcept;
    193     integral operator-=(integral op) noexcept;
    194     integral operator&=(integral op) volatile noexcept;
    195     integral operator&=(integral op) noexcept;
    196     integral operator|=(integral op) volatile noexcept;
    197     integral operator|=(integral op) noexcept;
    198     integral operator^=(integral op) volatile noexcept;
    199     integral operator^=(integral op) noexcept;
    200 };
    201 
    202 template <class T>
    203 struct atomic<T*>
    204 {
    205     bool is_lock_free() const volatile noexcept;
    206     bool is_lock_free() const noexcept;
    207     void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    208     void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    209     T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    210     T* load(memory_order m = memory_order_seq_cst) const noexcept;
    211     operator T*() const volatile noexcept;
    212     operator T*() const noexcept;
    213     T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    214     T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    215     bool compare_exchange_weak(T*& expc, T* desr,
    216                                memory_order s, memory_order f) volatile noexcept;
    217     bool compare_exchange_weak(T*& expc, T* desr,
    218                                memory_order s, memory_order f) noexcept;
    219     bool compare_exchange_strong(T*& expc, T* desr,
    220                                  memory_order s, memory_order f) volatile noexcept;
    221     bool compare_exchange_strong(T*& expc, T* desr,
    222                                  memory_order s, memory_order f) noexcept;
    223     bool compare_exchange_weak(T*& expc, T* desr,
    224                                memory_order m = memory_order_seq_cst) volatile noexcept;
    225     bool compare_exchange_weak(T*& expc, T* desr,
    226                                memory_order m = memory_order_seq_cst) noexcept;
    227     bool compare_exchange_strong(T*& expc, T* desr,
    228                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    229     bool compare_exchange_strong(T*& expc, T* desr,
    230                                  memory_order m = memory_order_seq_cst) noexcept;
    231     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    232     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    233     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    234     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    235 
    236     atomic() noexcept = default;
    237     constexpr atomic(T* desr) noexcept;
    238     atomic(const atomic&) = delete;
    239     atomic& operator=(const atomic&) = delete;
    240     atomic& operator=(const atomic&) volatile = delete;
    241 
    242     T* operator=(T*) volatile noexcept;
    243     T* operator=(T*) noexcept;
    244     T* operator++(int) volatile noexcept;
    245     T* operator++(int) noexcept;
    246     T* operator--(int) volatile noexcept;
    247     T* operator--(int) noexcept;
    248     T* operator++() volatile noexcept;
    249     T* operator++() noexcept;
    250     T* operator--() volatile noexcept;
    251     T* operator--() noexcept;
    252     T* operator+=(ptrdiff_t op) volatile noexcept;
    253     T* operator+=(ptrdiff_t op) noexcept;
    254     T* operator-=(ptrdiff_t op) volatile noexcept;
    255     T* operator-=(ptrdiff_t op) noexcept;
    256 };
    257 
    258 
    259 template <class T>
    260     bool
    261     atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
    262 
    263 template <class T>
    264     bool
    265     atomic_is_lock_free(const atomic<T>* obj) noexcept;
    266 
    267 template <class T>
    268     void
    269     atomic_init(volatile atomic<T>* obj, T desr) noexcept;
    270 
    271 template <class T>
    272     void
    273     atomic_init(atomic<T>* obj, T desr) noexcept;
    274 
    275 template <class T>
    276     void
    277     atomic_store(volatile atomic<T>* obj, T desr) noexcept;
    278 
    279 template <class T>
    280     void
    281     atomic_store(atomic<T>* obj, T desr) noexcept;
    282 
    283 template <class T>
    284     void
    285     atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    286 
    287 template <class T>
    288     void
    289     atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    290 
    291 template <class T>
    292     T
    293     atomic_load(const volatile atomic<T>* obj) noexcept;
    294 
    295 template <class T>
    296     T
    297     atomic_load(const atomic<T>* obj) noexcept;
    298 
    299 template <class T>
    300     T
    301     atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
    302 
    303 template <class T>
    304     T
    305     atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
    306 
    307 template <class T>
    308     T
    309     atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
    310 
    311 template <class T>
    312     T
    313     atomic_exchange(atomic<T>* obj, T desr) noexcept;
    314 
    315 template <class T>
    316     T
    317     atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    318 
    319 template <class T>
    320     T
    321     atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    322 
    323 template <class T>
    324     bool
    325     atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    326 
    327 template <class T>
    328     bool
    329     atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
    330 
    331 template <class T>
    332     bool
    333     atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    334 
    335 template <class T>
    336     bool
    337     atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
    338 
    339 template <class T>
    340     bool
    341     atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
    342                                           T desr,
    343                                           memory_order s, memory_order f) noexcept;
    344 
    345 template <class T>
    346     bool
    347     atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
    348                                           memory_order s, memory_order f) noexcept;
    349 
    350 template <class T>
    351     bool
    352     atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
    353                                             T* expc, T desr,
    354                                             memory_order s, memory_order f) noexcept;
    355 
    356 template <class T>
    357     bool
    358     atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
    359                                             T desr,
    360                                             memory_order s, memory_order f) noexcept;
    361 
    362 template <class Integral>
    363     Integral
    364     atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
    365 
    366 template <class Integral>
    367     Integral
    368     atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
    369 
    370 template <class Integral>
    371     Integral
    372     atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
    373                               memory_order m) noexcept;
    374 template <class Integral>
    375     Integral
    376     atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
    377                               memory_order m) noexcept;
    378 template <class Integral>
    379     Integral
    380     atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
    381 
    382 template <class Integral>
    383     Integral
    384     atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
    385 
    386 template <class Integral>
    387     Integral
    388     atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
    389                               memory_order m) noexcept;
    390 template <class Integral>
    391     Integral
    392     atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
    393                               memory_order m) noexcept;
    394 template <class Integral>
    395     Integral
    396     atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
    397 
    398 template <class Integral>
    399     Integral
    400     atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
    401 
    402 template <class Integral>
    403     Integral
    404     atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
    405                               memory_order m) noexcept;
    406 template <class Integral>
    407     Integral
    408     atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
    409                               memory_order m) noexcept;
    410 template <class Integral>
    411     Integral
    412     atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
    413 
    414 template <class Integral>
    415     Integral
    416     atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
    417 
    418 template <class Integral>
    419     Integral
    420     atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
    421                              memory_order m) noexcept;
    422 template <class Integral>
    423     Integral
    424     atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
    425                              memory_order m) noexcept;
    426 template <class Integral>
    427     Integral
    428     atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
    429 
    430 template <class Integral>
    431     Integral
    432     atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
    433 
    434 template <class Integral>
    435     Integral
    436     atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
    437                               memory_order m) noexcept;
    438 template <class Integral>
    439     Integral
    440     atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
    441                               memory_order m) noexcept;
    442 
    443 template <class T>
    444     T*
    445     atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    446 
    447 template <class T>
    448     T*
    449     atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
    450 
    451 template <class T>
    452     T*
    453     atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    454                               memory_order m) noexcept;
    455 template <class T>
    456     T*
    457     atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    458 
    459 template <class T>
    460     T*
    461     atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    462 
    463 template <class T>
    464     T*
    465     atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
    466 
    467 template <class T>
    468     T*
    469     atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    470                               memory_order m) noexcept;
    471 template <class T>
    472     T*
    473     atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    474 
    475 // Atomics for standard typedef types
    476 
    477 typedef atomic<bool>               atomic_bool;
    478 typedef atomic<char>               atomic_char;
    479 typedef atomic<signed char>        atomic_schar;
    480 typedef atomic<unsigned char>      atomic_uchar;
    481 typedef atomic<short>              atomic_short;
    482 typedef atomic<unsigned short>     atomic_ushort;
    483 typedef atomic<int>                atomic_int;
    484 typedef atomic<unsigned int>       atomic_uint;
    485 typedef atomic<long>               atomic_long;
    486 typedef atomic<unsigned long>      atomic_ulong;
    487 typedef atomic<long long>          atomic_llong;
    488 typedef atomic<unsigned long long> atomic_ullong;
    489 typedef atomic<char16_t>           atomic_char16_t;
    490 typedef atomic<char32_t>           atomic_char32_t;
    491 typedef atomic<wchar_t>            atomic_wchar_t;
    492 
    493 typedef atomic<int_least8_t>   atomic_int_least8_t;
    494 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
    495 typedef atomic<int_least16_t>  atomic_int_least16_t;
    496 typedef atomic<uint_least16_t> atomic_uint_least16_t;
    497 typedef atomic<int_least32_t>  atomic_int_least32_t;
    498 typedef atomic<uint_least32_t> atomic_uint_least32_t;
    499 typedef atomic<int_least64_t>  atomic_int_least64_t;
    500 typedef atomic<uint_least64_t> atomic_uint_least64_t;
    501 
    502 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
    503 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
    504 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
    505 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
    506 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
    507 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
    508 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
    509 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
    510 
    511 typedef atomic<intptr_t>  atomic_intptr_t;
    512 typedef atomic<uintptr_t> atomic_uintptr_t;
    513 typedef atomic<size_t>    atomic_size_t;
    514 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
    515 typedef atomic<intmax_t>  atomic_intmax_t;
    516 typedef atomic<uintmax_t> atomic_uintmax_t;
    517 
    518 // fences
    519 
    520 void atomic_thread_fence(memory_order m) noexcept;
    521 void atomic_signal_fence(memory_order m) noexcept;
    522 
    523 }  // std
    524 
    525 */
    526 
    527 #include <__config>
    528 #include <cstddef>
    529 #include <cstdint>
    530 #include <type_traits>
    531 
    532 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
    533 #pragma GCC system_header
    534 #endif
    535 
    536 #ifdef _LIBCPP_HAS_NO_THREADS
    537 #error <atomic> is not supported on this single threaded system
    538 #else // !_LIBCPP_HAS_NO_THREADS
    539 
    540 _LIBCPP_BEGIN_NAMESPACE_STD
    541 
    542 #if !__has_feature(cxx_atomic) && _GNUC_VER < 407
    543 #error <atomic> is not implemented
    544 #else
    545 
    546 typedef enum memory_order
    547 {
    548     memory_order_relaxed, memory_order_consume, memory_order_acquire,
    549     memory_order_release, memory_order_acq_rel, memory_order_seq_cst
    550 } memory_order;
    551 
    552 #if _GNUC_VER >= 407
    553 namespace __gcc_atomic {
    554 template <typename _Tp>
    555 struct __gcc_atomic_t {
    556   __gcc_atomic_t() _NOEXCEPT {}
    557   explicit __gcc_atomic_t(_Tp value) _NOEXCEPT : __a_value(value) {}
    558   _Tp __a_value;
    559 };
    560 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
    561 
    562 template <typename _Tp> _Tp __create();
    563 
    564 template <typename _Tp, typename _Td>
    565 typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
    566     __test_atomic_assignable(int);
    567 template <typename _Tp, typename _Up>
    568 __two __test_atomic_assignable(...);
    569 
    570 template <typename _Tp, typename _Td>
    571 struct __can_assign {
    572   static const bool value =
    573       sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
    574 };
    575 
    576 static inline constexpr int __to_gcc_order(memory_order __order) {
    577   // Avoid switch statement to make this a constexpr.
    578   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
    579          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
    580           (__order == memory_order_release ? __ATOMIC_RELEASE:
    581            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
    582             (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
    583               __ATOMIC_CONSUME))));
    584 }
    585 
    586 static inline constexpr int __to_gcc_failure_order(memory_order __order) {
    587   // Avoid switch statement to make this a constexpr.
    588   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
    589          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
    590           (__order == memory_order_release ? __ATOMIC_RELAXED:
    591            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
    592             (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
    593               __ATOMIC_CONSUME))));
    594 }
    595 
    596 } // namespace __gcc_atomic
    597 
    598 template <typename _Tp>
    599 static inline
    600 typename enable_if<
    601     __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
    602 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    603   __a->__a_value = __val;
    604 }
    605 
    606 template <typename _Tp>
    607 static inline
    608 typename enable_if<
    609     !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
    610      __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
    611 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    612   // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
    613   // the default operator= in an object is not volatile, a byte-by-byte copy
    614   // is required.
    615   volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
    616   volatile char* end = to + sizeof(_Tp);
    617   char* from = reinterpret_cast<char*>(&__val);
    618   while (to != end) {
    619     *to++ = *from++;
    620   }
    621 }
    622 
    623 template <typename _Tp>
    624 static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
    625   __a->__a_value = __val;
    626 }
    627 
    628 static inline void __c11_atomic_thread_fence(memory_order __order) {
    629   __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
    630 }
    631 
    632 static inline void __c11_atomic_signal_fence(memory_order __order) {
    633   __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
    634 }
    635 
    636 static inline bool __c11_atomic_is_lock_free(size_t __size) {
    637   return __atomic_is_lock_free(__size, 0);
    638 }
    639 
    640 template <typename _Tp>
    641 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
    642                                       memory_order __order) {
    643   return __atomic_store(&__a->__a_value, &__val,
    644                         __gcc_atomic::__to_gcc_order(__order));
    645 }
    646 
    647 template <typename _Tp>
    648 static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
    649                                       memory_order __order) {
    650   __atomic_store(&__a->__a_value, &__val,
    651                  __gcc_atomic::__to_gcc_order(__order));
    652 }
    653 
    654 template <typename _Tp>
    655 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
    656                                     memory_order __order) {
    657   _Tp __ret;
    658   __atomic_load(&__a->__a_value, &__ret,
    659                 __gcc_atomic::__to_gcc_order(__order));
    660   return __ret;
    661 }
    662 
    663 template <typename _Tp>
    664 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
    665   _Tp __ret;
    666   __atomic_load(&__a->__a_value, &__ret,
    667                 __gcc_atomic::__to_gcc_order(__order));
    668   return __ret;
    669 }
    670 
    671 template <typename _Tp>
    672 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
    673                                         _Tp __value, memory_order __order) {
    674   _Tp __ret;
    675   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    676                     __gcc_atomic::__to_gcc_order(__order));
    677   return __ret;
    678 }
    679 
    680 template <typename _Tp>
    681 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
    682                                         memory_order __order) {
    683   _Tp __ret;
    684   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    685                     __gcc_atomic::__to_gcc_order(__order));
    686   return __ret;
    687 }
    688 
    689 template <typename _Tp>
    690 static inline bool __c11_atomic_compare_exchange_strong(
    691     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    692     memory_order __success, memory_order __failure) {
    693   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    694                                    false,
    695                                    __gcc_atomic::__to_gcc_order(__success),
    696                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    697 }
    698 
    699 template <typename _Tp>
    700 static inline bool __c11_atomic_compare_exchange_strong(
    701     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    702     memory_order __failure) {
    703   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    704                                    false,
    705                                    __gcc_atomic::__to_gcc_order(__success),
    706                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    707 }
    708 
    709 template <typename _Tp>
    710 static inline bool __c11_atomic_compare_exchange_weak(
    711     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    712     memory_order __success, memory_order __failure) {
    713   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    714                                    true,
    715                                    __gcc_atomic::__to_gcc_order(__success),
    716                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    717 }
    718 
    719 template <typename _Tp>
    720 static inline bool __c11_atomic_compare_exchange_weak(
    721     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    722     memory_order __failure) {
    723   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    724                                    true,
    725                                    __gcc_atomic::__to_gcc_order(__success),
    726                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    727 }
    728 
    729 template <typename _Tp>
    730 struct __skip_amt { enum {value = 1}; };
    731 
    732 template <typename _Tp>
    733 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
    734 
    735 // FIXME: Haven't figured out what the spec says about using arrays with
    736 // atomic_fetch_add. Force a failure rather than creating bad behavior.
    737 template <typename _Tp>
    738 struct __skip_amt<_Tp[]> { };
    739 template <typename _Tp, int n>
    740 struct __skip_amt<_Tp[n]> { };
    741 
    742 template <typename _Tp, typename _Td>
    743 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
    744                                          _Td __delta, memory_order __order) {
    745   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    746                             __gcc_atomic::__to_gcc_order(__order));
    747 }
    748 
    749 template <typename _Tp, typename _Td>
    750 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
    751                                          memory_order __order) {
    752   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    753                             __gcc_atomic::__to_gcc_order(__order));
    754 }
    755 
    756 template <typename _Tp, typename _Td>
    757 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
    758                                          _Td __delta, memory_order __order) {
    759   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    760                             __gcc_atomic::__to_gcc_order(__order));
    761 }
    762 
    763 template <typename _Tp, typename _Td>
    764 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
    765                                          memory_order __order) {
    766   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    767                             __gcc_atomic::__to_gcc_order(__order));
    768 }
    769 
    770 template <typename _Tp>
    771 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
    772                                          _Tp __pattern, memory_order __order) {
    773   return __atomic_fetch_and(&__a->__a_value, __pattern,
    774                             __gcc_atomic::__to_gcc_order(__order));
    775 }
    776 
    777 template <typename _Tp>
    778 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
    779                                          _Tp __pattern, memory_order __order) {
    780   return __atomic_fetch_and(&__a->__a_value, __pattern,
    781                             __gcc_atomic::__to_gcc_order(__order));
    782 }
    783 
    784 template <typename _Tp>
    785 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
    786                                         _Tp __pattern, memory_order __order) {
    787   return __atomic_fetch_or(&__a->__a_value, __pattern,
    788                            __gcc_atomic::__to_gcc_order(__order));
    789 }
    790 
    791 template <typename _Tp>
    792 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
    793                                         memory_order __order) {
    794   return __atomic_fetch_or(&__a->__a_value, __pattern,
    795                            __gcc_atomic::__to_gcc_order(__order));
    796 }
    797 
    798 template <typename _Tp>
    799 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
    800                                          _Tp __pattern, memory_order __order) {
    801   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    802                             __gcc_atomic::__to_gcc_order(__order));
    803 }
    804 
    805 template <typename _Tp>
    806 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
    807                                          memory_order __order) {
    808   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    809                             __gcc_atomic::__to_gcc_order(__order));
    810 }
    811 #endif // _GNUC_VER >= 407
    812 
    813 template <class _Tp>
    814 inline _LIBCPP_INLINE_VISIBILITY
    815 _Tp
    816 kill_dependency(_Tp __y) _NOEXCEPT
    817 {
    818     return __y;
    819 }
    820 
    821 // general atomic<T>
    822 
    823 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
    824 struct __atomic_base  // false
    825 {
    826     mutable _Atomic(_Tp) __a_;
    827 
    828     _LIBCPP_INLINE_VISIBILITY
    829     bool is_lock_free() const volatile _NOEXCEPT
    830         {return __c11_atomic_is_lock_free(sizeof(_Tp));}
    831     _LIBCPP_INLINE_VISIBILITY
    832     bool is_lock_free() const _NOEXCEPT
    833         {return __c11_atomic_is_lock_free(sizeof(_Tp));}
    834     _LIBCPP_INLINE_VISIBILITY
    835     void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    836         {__c11_atomic_store(&__a_, __d, __m);}
    837     _LIBCPP_INLINE_VISIBILITY
    838     void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    839         {__c11_atomic_store(&__a_, __d, __m);}
    840     _LIBCPP_INLINE_VISIBILITY
    841     _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
    842         {return __c11_atomic_load(&__a_, __m);}
    843     _LIBCPP_INLINE_VISIBILITY
    844     _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
    845         {return __c11_atomic_load(&__a_, __m);}
    846     _LIBCPP_INLINE_VISIBILITY
    847     operator _Tp() const volatile _NOEXCEPT {return load();}
    848     _LIBCPP_INLINE_VISIBILITY
    849     operator _Tp() const _NOEXCEPT          {return load();}
    850     _LIBCPP_INLINE_VISIBILITY
    851     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    852         {return __c11_atomic_exchange(&__a_, __d, __m);}
    853     _LIBCPP_INLINE_VISIBILITY
    854     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    855         {return __c11_atomic_exchange(&__a_, __d, __m);}
    856     _LIBCPP_INLINE_VISIBILITY
    857     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    858                                memory_order __s, memory_order __f) volatile _NOEXCEPT
    859         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    860     _LIBCPP_INLINE_VISIBILITY
    861     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    862                                memory_order __s, memory_order __f) _NOEXCEPT
    863         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    864     _LIBCPP_INLINE_VISIBILITY
    865     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    866                                  memory_order __s, memory_order __f) volatile _NOEXCEPT
    867         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    868     _LIBCPP_INLINE_VISIBILITY
    869     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    870                                  memory_order __s, memory_order __f) _NOEXCEPT
    871         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    872     _LIBCPP_INLINE_VISIBILITY
    873     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    874                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    875         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    876     _LIBCPP_INLINE_VISIBILITY
    877     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    878                                memory_order __m = memory_order_seq_cst) _NOEXCEPT
    879         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    880     _LIBCPP_INLINE_VISIBILITY
    881     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    882                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    883         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    884     _LIBCPP_INLINE_VISIBILITY
    885     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    886                                  memory_order __m = memory_order_seq_cst) _NOEXCEPT
    887         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    888 
    889     _LIBCPP_INLINE_VISIBILITY
    890 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    891     __atomic_base() _NOEXCEPT = default;
    892 #else
    893     __atomic_base() _NOEXCEPT : __a_() {}
    894 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    895 
    896     _LIBCPP_INLINE_VISIBILITY
    897     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
    898 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    899     __atomic_base(const __atomic_base&) = delete;
    900     __atomic_base& operator=(const __atomic_base&) = delete;
    901     __atomic_base& operator=(const __atomic_base&) volatile = delete;
    902 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    903 private:
    904     __atomic_base(const __atomic_base&);
    905     __atomic_base& operator=(const __atomic_base&);
    906     __atomic_base& operator=(const __atomic_base&) volatile;
    907 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    908 };
    909 
    910 // atomic<Integral>
    911 
    912 template <class _Tp>
    913 struct __atomic_base<_Tp, true>
    914     : public __atomic_base<_Tp, false>
    915 {
    916     typedef __atomic_base<_Tp, false> __base;
    917     _LIBCPP_INLINE_VISIBILITY
    918     __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
    919     _LIBCPP_INLINE_VISIBILITY
    920     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
    921 
    922     _LIBCPP_INLINE_VISIBILITY
    923     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    924         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    925     _LIBCPP_INLINE_VISIBILITY
    926     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    927         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    928     _LIBCPP_INLINE_VISIBILITY
    929     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    930         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    931     _LIBCPP_INLINE_VISIBILITY
    932     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    933         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    934     _LIBCPP_INLINE_VISIBILITY
    935     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    936         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    937     _LIBCPP_INLINE_VISIBILITY
    938     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    939         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    940     _LIBCPP_INLINE_VISIBILITY
    941     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    942         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    943     _LIBCPP_INLINE_VISIBILITY
    944     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    945         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    946     _LIBCPP_INLINE_VISIBILITY
    947     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    948         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    949     _LIBCPP_INLINE_VISIBILITY
    950     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    951         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    952 
    953     _LIBCPP_INLINE_VISIBILITY
    954     _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
    955     _LIBCPP_INLINE_VISIBILITY
    956     _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
    957     _LIBCPP_INLINE_VISIBILITY
    958     _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
    959     _LIBCPP_INLINE_VISIBILITY
    960     _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
    961     _LIBCPP_INLINE_VISIBILITY
    962     _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
    963     _LIBCPP_INLINE_VISIBILITY
    964     _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
    965     _LIBCPP_INLINE_VISIBILITY
    966     _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
    967     _LIBCPP_INLINE_VISIBILITY
    968     _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
    969     _LIBCPP_INLINE_VISIBILITY
    970     _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
    971     _LIBCPP_INLINE_VISIBILITY
    972     _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
    973     _LIBCPP_INLINE_VISIBILITY
    974     _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
    975     _LIBCPP_INLINE_VISIBILITY
    976     _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
    977     _LIBCPP_INLINE_VISIBILITY
    978     _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
    979     _LIBCPP_INLINE_VISIBILITY
    980     _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
    981     _LIBCPP_INLINE_VISIBILITY
    982     _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
    983     _LIBCPP_INLINE_VISIBILITY
    984     _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
    985     _LIBCPP_INLINE_VISIBILITY
    986     _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
    987     _LIBCPP_INLINE_VISIBILITY
    988     _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
    989 };
    990 
    991 // atomic<T>
    992 
    993 template <class _Tp>
    994 struct atomic
    995     : public __atomic_base<_Tp>
    996 {
    997     typedef __atomic_base<_Tp> __base;
    998     _LIBCPP_INLINE_VISIBILITY
    999     atomic() _NOEXCEPT _LIBCPP_DEFAULT
   1000     _LIBCPP_INLINE_VISIBILITY
   1001     _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
   1002 
   1003     _LIBCPP_INLINE_VISIBILITY
   1004     _Tp operator=(_Tp __d) volatile _NOEXCEPT
   1005         {__base::store(__d); return __d;}
   1006     _LIBCPP_INLINE_VISIBILITY
   1007     _Tp operator=(_Tp __d) _NOEXCEPT
   1008         {__base::store(__d); return __d;}
   1009 };
   1010 
   1011 // atomic<T*>
   1012 
   1013 template <class _Tp>
   1014 struct atomic<_Tp*>
   1015     : public __atomic_base<_Tp*>
   1016 {
   1017     typedef __atomic_base<_Tp*> __base;
   1018     _LIBCPP_INLINE_VISIBILITY
   1019     atomic() _NOEXCEPT _LIBCPP_DEFAULT
   1020     _LIBCPP_INLINE_VISIBILITY
   1021     _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
   1022 
   1023     _LIBCPP_INLINE_VISIBILITY
   1024     _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
   1025         {__base::store(__d); return __d;}
   1026     _LIBCPP_INLINE_VISIBILITY
   1027     _Tp* operator=(_Tp* __d) _NOEXCEPT
   1028         {__base::store(__d); return __d;}
   1029 
   1030     _LIBCPP_INLINE_VISIBILITY
   1031     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1032                                                                         volatile _NOEXCEPT
   1033         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1034     _LIBCPP_INLINE_VISIBILITY
   1035     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1036         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1037     _LIBCPP_INLINE_VISIBILITY
   1038     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1039                                                                         volatile _NOEXCEPT
   1040         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1041     _LIBCPP_INLINE_VISIBILITY
   1042     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1043         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1044 
   1045     _LIBCPP_INLINE_VISIBILITY
   1046     _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
   1047     _LIBCPP_INLINE_VISIBILITY
   1048     _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
   1049     _LIBCPP_INLINE_VISIBILITY
   1050     _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
   1051     _LIBCPP_INLINE_VISIBILITY
   1052     _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
   1053     _LIBCPP_INLINE_VISIBILITY
   1054     _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
   1055     _LIBCPP_INLINE_VISIBILITY
   1056     _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
   1057     _LIBCPP_INLINE_VISIBILITY
   1058     _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
   1059     _LIBCPP_INLINE_VISIBILITY
   1060     _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
   1061     _LIBCPP_INLINE_VISIBILITY
   1062     _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
   1063     _LIBCPP_INLINE_VISIBILITY
   1064     _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
   1065     _LIBCPP_INLINE_VISIBILITY
   1066     _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
   1067     _LIBCPP_INLINE_VISIBILITY
   1068     _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
   1069 };
   1070 
   1071 // atomic_is_lock_free
   1072 
   1073 template <class _Tp>
   1074 inline _LIBCPP_INLINE_VISIBILITY
   1075 bool
   1076 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1077 {
   1078     return __o->is_lock_free();
   1079 }
   1080 
   1081 template <class _Tp>
   1082 inline _LIBCPP_INLINE_VISIBILITY
   1083 bool
   1084 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
   1085 {
   1086     return __o->is_lock_free();
   1087 }
   1088 
   1089 // atomic_init
   1090 
   1091 template <class _Tp>
   1092 inline _LIBCPP_INLINE_VISIBILITY
   1093 void
   1094 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1095 {
   1096     __c11_atomic_init(&__o->__a_, __d);
   1097 }
   1098 
   1099 template <class _Tp>
   1100 inline _LIBCPP_INLINE_VISIBILITY
   1101 void
   1102 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1103 {
   1104     __c11_atomic_init(&__o->__a_, __d);
   1105 }
   1106 
   1107 // atomic_store
   1108 
   1109 template <class _Tp>
   1110 inline _LIBCPP_INLINE_VISIBILITY
   1111 void
   1112 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1113 {
   1114     __o->store(__d);
   1115 }
   1116 
   1117 template <class _Tp>
   1118 inline _LIBCPP_INLINE_VISIBILITY
   1119 void
   1120 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1121 {
   1122     __o->store(__d);
   1123 }
   1124 
   1125 // atomic_store_explicit
   1126 
   1127 template <class _Tp>
   1128 inline _LIBCPP_INLINE_VISIBILITY
   1129 void
   1130 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1131 {
   1132     __o->store(__d, __m);
   1133 }
   1134 
   1135 template <class _Tp>
   1136 inline _LIBCPP_INLINE_VISIBILITY
   1137 void
   1138 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1139 {
   1140     __o->store(__d, __m);
   1141 }
   1142 
   1143 // atomic_load
   1144 
   1145 template <class _Tp>
   1146 inline _LIBCPP_INLINE_VISIBILITY
   1147 _Tp
   1148 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1149 {
   1150     return __o->load();
   1151 }
   1152 
   1153 template <class _Tp>
   1154 inline _LIBCPP_INLINE_VISIBILITY
   1155 _Tp
   1156 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
   1157 {
   1158     return __o->load();
   1159 }
   1160 
   1161 // atomic_load_explicit
   1162 
   1163 template <class _Tp>
   1164 inline _LIBCPP_INLINE_VISIBILITY
   1165 _Tp
   1166 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1167 {
   1168     return __o->load(__m);
   1169 }
   1170 
   1171 template <class _Tp>
   1172 inline _LIBCPP_INLINE_VISIBILITY
   1173 _Tp
   1174 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1175 {
   1176     return __o->load(__m);
   1177 }
   1178 
   1179 // atomic_exchange
   1180 
   1181 template <class _Tp>
   1182 inline _LIBCPP_INLINE_VISIBILITY
   1183 _Tp
   1184 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1185 {
   1186     return __o->exchange(__d);
   1187 }
   1188 
   1189 template <class _Tp>
   1190 inline _LIBCPP_INLINE_VISIBILITY
   1191 _Tp
   1192 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1193 {
   1194     return __o->exchange(__d);
   1195 }
   1196 
   1197 // atomic_exchange_explicit
   1198 
   1199 template <class _Tp>
   1200 inline _LIBCPP_INLINE_VISIBILITY
   1201 _Tp
   1202 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1203 {
   1204     return __o->exchange(__d, __m);
   1205 }
   1206 
   1207 template <class _Tp>
   1208 inline _LIBCPP_INLINE_VISIBILITY
   1209 _Tp
   1210 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1211 {
   1212     return __o->exchange(__d, __m);
   1213 }
   1214 
   1215 // atomic_compare_exchange_weak
   1216 
   1217 template <class _Tp>
   1218 inline _LIBCPP_INLINE_VISIBILITY
   1219 bool
   1220 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1221 {
   1222     return __o->compare_exchange_weak(*__e, __d);
   1223 }
   1224 
   1225 template <class _Tp>
   1226 inline _LIBCPP_INLINE_VISIBILITY
   1227 bool
   1228 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1229 {
   1230     return __o->compare_exchange_weak(*__e, __d);
   1231 }
   1232 
   1233 // atomic_compare_exchange_strong
   1234 
   1235 template <class _Tp>
   1236 inline _LIBCPP_INLINE_VISIBILITY
   1237 bool
   1238 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1239 {
   1240     return __o->compare_exchange_strong(*__e, __d);
   1241 }
   1242 
   1243 template <class _Tp>
   1244 inline _LIBCPP_INLINE_VISIBILITY
   1245 bool
   1246 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1247 {
   1248     return __o->compare_exchange_strong(*__e, __d);
   1249 }
   1250 
   1251 // atomic_compare_exchange_weak_explicit
   1252 
   1253 template <class _Tp>
   1254 inline _LIBCPP_INLINE_VISIBILITY
   1255 bool
   1256 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
   1257                                       _Tp __d,
   1258                                       memory_order __s, memory_order __f) _NOEXCEPT
   1259 {
   1260     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1261 }
   1262 
   1263 template <class _Tp>
   1264 inline _LIBCPP_INLINE_VISIBILITY
   1265 bool
   1266 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
   1267                                       memory_order __s, memory_order __f) _NOEXCEPT
   1268 {
   1269     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1270 }
   1271 
   1272 // atomic_compare_exchange_strong_explicit
   1273 
   1274 template <class _Tp>
   1275 inline _LIBCPP_INLINE_VISIBILITY
   1276 bool
   1277 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
   1278                                         _Tp* __e, _Tp __d,
   1279                                         memory_order __s, memory_order __f) _NOEXCEPT
   1280 {
   1281     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1282 }
   1283 
   1284 template <class _Tp>
   1285 inline _LIBCPP_INLINE_VISIBILITY
   1286 bool
   1287 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
   1288                                         _Tp __d,
   1289                                         memory_order __s, memory_order __f) _NOEXCEPT
   1290 {
   1291     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1292 }
   1293 
   1294 // atomic_fetch_add
   1295 
   1296 template <class _Tp>
   1297 inline _LIBCPP_INLINE_VISIBILITY
   1298 typename enable_if
   1299 <
   1300     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1301     _Tp
   1302 >::type
   1303 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1304 {
   1305     return __o->fetch_add(__op);
   1306 }
   1307 
   1308 template <class _Tp>
   1309 inline _LIBCPP_INLINE_VISIBILITY
   1310 typename enable_if
   1311 <
   1312     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1313     _Tp
   1314 >::type
   1315 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1316 {
   1317     return __o->fetch_add(__op);
   1318 }
   1319 
   1320 template <class _Tp>
   1321 inline _LIBCPP_INLINE_VISIBILITY
   1322 _Tp*
   1323 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1324 {
   1325     return __o->fetch_add(__op);
   1326 }
   1327 
   1328 template <class _Tp>
   1329 inline _LIBCPP_INLINE_VISIBILITY
   1330 _Tp*
   1331 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1332 {
   1333     return __o->fetch_add(__op);
   1334 }
   1335 
   1336 // atomic_fetch_add_explicit
   1337 
   1338 template <class _Tp>
   1339 inline _LIBCPP_INLINE_VISIBILITY
   1340 typename enable_if
   1341 <
   1342     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1343     _Tp
   1344 >::type
   1345 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1346 {
   1347     return __o->fetch_add(__op, __m);
   1348 }
   1349 
   1350 template <class _Tp>
   1351 inline _LIBCPP_INLINE_VISIBILITY
   1352 typename enable_if
   1353 <
   1354     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1355     _Tp
   1356 >::type
   1357 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1358 {
   1359     return __o->fetch_add(__op, __m);
   1360 }
   1361 
   1362 template <class _Tp>
   1363 inline _LIBCPP_INLINE_VISIBILITY
   1364 _Tp*
   1365 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1366                           memory_order __m) _NOEXCEPT
   1367 {
   1368     return __o->fetch_add(__op, __m);
   1369 }
   1370 
   1371 template <class _Tp>
   1372 inline _LIBCPP_INLINE_VISIBILITY
   1373 _Tp*
   1374 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1375 {
   1376     return __o->fetch_add(__op, __m);
   1377 }
   1378 
   1379 // atomic_fetch_sub
   1380 
   1381 template <class _Tp>
   1382 inline _LIBCPP_INLINE_VISIBILITY
   1383 typename enable_if
   1384 <
   1385     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1386     _Tp
   1387 >::type
   1388 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1389 {
   1390     return __o->fetch_sub(__op);
   1391 }
   1392 
   1393 template <class _Tp>
   1394 inline _LIBCPP_INLINE_VISIBILITY
   1395 typename enable_if
   1396 <
   1397     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1398     _Tp
   1399 >::type
   1400 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1401 {
   1402     return __o->fetch_sub(__op);
   1403 }
   1404 
   1405 template <class _Tp>
   1406 inline _LIBCPP_INLINE_VISIBILITY
   1407 _Tp*
   1408 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1409 {
   1410     return __o->fetch_sub(__op);
   1411 }
   1412 
   1413 template <class _Tp>
   1414 inline _LIBCPP_INLINE_VISIBILITY
   1415 _Tp*
   1416 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1417 {
   1418     return __o->fetch_sub(__op);
   1419 }
   1420 
   1421 // atomic_fetch_sub_explicit
   1422 
   1423 template <class _Tp>
   1424 inline _LIBCPP_INLINE_VISIBILITY
   1425 typename enable_if
   1426 <
   1427     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1428     _Tp
   1429 >::type
   1430 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1431 {
   1432     return __o->fetch_sub(__op, __m);
   1433 }
   1434 
   1435 template <class _Tp>
   1436 inline _LIBCPP_INLINE_VISIBILITY
   1437 typename enable_if
   1438 <
   1439     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1440     _Tp
   1441 >::type
   1442 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1443 {
   1444     return __o->fetch_sub(__op, __m);
   1445 }
   1446 
   1447 template <class _Tp>
   1448 inline _LIBCPP_INLINE_VISIBILITY
   1449 _Tp*
   1450 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1451                           memory_order __m) _NOEXCEPT
   1452 {
   1453     return __o->fetch_sub(__op, __m);
   1454 }
   1455 
   1456 template <class _Tp>
   1457 inline _LIBCPP_INLINE_VISIBILITY
   1458 _Tp*
   1459 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1460 {
   1461     return __o->fetch_sub(__op, __m);
   1462 }
   1463 
   1464 // atomic_fetch_and
   1465 
   1466 template <class _Tp>
   1467 inline _LIBCPP_INLINE_VISIBILITY
   1468 typename enable_if
   1469 <
   1470     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1471     _Tp
   1472 >::type
   1473 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1474 {
   1475     return __o->fetch_and(__op);
   1476 }
   1477 
   1478 template <class _Tp>
   1479 inline _LIBCPP_INLINE_VISIBILITY
   1480 typename enable_if
   1481 <
   1482     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1483     _Tp
   1484 >::type
   1485 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1486 {
   1487     return __o->fetch_and(__op);
   1488 }
   1489 
   1490 // atomic_fetch_and_explicit
   1491 
   1492 template <class _Tp>
   1493 inline _LIBCPP_INLINE_VISIBILITY
   1494 typename enable_if
   1495 <
   1496     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1497     _Tp
   1498 >::type
   1499 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1500 {
   1501     return __o->fetch_and(__op, __m);
   1502 }
   1503 
   1504 template <class _Tp>
   1505 inline _LIBCPP_INLINE_VISIBILITY
   1506 typename enable_if
   1507 <
   1508     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1509     _Tp
   1510 >::type
   1511 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1512 {
   1513     return __o->fetch_and(__op, __m);
   1514 }
   1515 
   1516 // atomic_fetch_or
   1517 
   1518 template <class _Tp>
   1519 inline _LIBCPP_INLINE_VISIBILITY
   1520 typename enable_if
   1521 <
   1522     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1523     _Tp
   1524 >::type
   1525 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1526 {
   1527     return __o->fetch_or(__op);
   1528 }
   1529 
   1530 template <class _Tp>
   1531 inline _LIBCPP_INLINE_VISIBILITY
   1532 typename enable_if
   1533 <
   1534     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1535     _Tp
   1536 >::type
   1537 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1538 {
   1539     return __o->fetch_or(__op);
   1540 }
   1541 
   1542 // atomic_fetch_or_explicit
   1543 
   1544 template <class _Tp>
   1545 inline _LIBCPP_INLINE_VISIBILITY
   1546 typename enable_if
   1547 <
   1548     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1549     _Tp
   1550 >::type
   1551 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1552 {
   1553     return __o->fetch_or(__op, __m);
   1554 }
   1555 
   1556 template <class _Tp>
   1557 inline _LIBCPP_INLINE_VISIBILITY
   1558 typename enable_if
   1559 <
   1560     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1561     _Tp
   1562 >::type
   1563 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1564 {
   1565     return __o->fetch_or(__op, __m);
   1566 }
   1567 
   1568 // atomic_fetch_xor
   1569 
   1570 template <class _Tp>
   1571 inline _LIBCPP_INLINE_VISIBILITY
   1572 typename enable_if
   1573 <
   1574     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1575     _Tp
   1576 >::type
   1577 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1578 {
   1579     return __o->fetch_xor(__op);
   1580 }
   1581 
   1582 template <class _Tp>
   1583 inline _LIBCPP_INLINE_VISIBILITY
   1584 typename enable_if
   1585 <
   1586     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1587     _Tp
   1588 >::type
   1589 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1590 {
   1591     return __o->fetch_xor(__op);
   1592 }
   1593 
   1594 // atomic_fetch_xor_explicit
   1595 
   1596 template <class _Tp>
   1597 inline _LIBCPP_INLINE_VISIBILITY
   1598 typename enable_if
   1599 <
   1600     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1601     _Tp
   1602 >::type
   1603 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1604 {
   1605     return __o->fetch_xor(__op, __m);
   1606 }
   1607 
   1608 template <class _Tp>
   1609 inline _LIBCPP_INLINE_VISIBILITY
   1610 typename enable_if
   1611 <
   1612     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1613     _Tp
   1614 >::type
   1615 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1616 {
   1617     return __o->fetch_xor(__op, __m);
   1618 }
   1619 
   1620 // flag type and operations
   1621 
   1622 typedef struct atomic_flag
   1623 {
   1624     _Atomic(bool) __a_;
   1625 
   1626     _LIBCPP_INLINE_VISIBILITY
   1627     bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1628         {return __c11_atomic_exchange(&__a_, true, __m);}
   1629     _LIBCPP_INLINE_VISIBILITY
   1630     bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1631         {return __c11_atomic_exchange(&__a_, true, __m);}
   1632     _LIBCPP_INLINE_VISIBILITY
   1633     void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1634         {__c11_atomic_store(&__a_, false, __m);}
   1635     _LIBCPP_INLINE_VISIBILITY
   1636     void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1637         {__c11_atomic_store(&__a_, false, __m);}
   1638 
   1639     _LIBCPP_INLINE_VISIBILITY
   1640 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1641     atomic_flag() _NOEXCEPT = default;
   1642 #else
   1643     atomic_flag() _NOEXCEPT : __a_() {}
   1644 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1645 
   1646     _LIBCPP_INLINE_VISIBILITY
   1647     atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
   1648 
   1649 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1650     atomic_flag(const atomic_flag&) = delete;
   1651     atomic_flag& operator=(const atomic_flag&) = delete;
   1652     atomic_flag& operator=(const atomic_flag&) volatile = delete;
   1653 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1654 private:
   1655     atomic_flag(const atomic_flag&);
   1656     atomic_flag& operator=(const atomic_flag&);
   1657     atomic_flag& operator=(const atomic_flag&) volatile;
   1658 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1659 } atomic_flag;
   1660 
   1661 inline _LIBCPP_INLINE_VISIBILITY
   1662 bool
   1663 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
   1664 {
   1665     return __o->test_and_set();
   1666 }
   1667 
   1668 inline _LIBCPP_INLINE_VISIBILITY
   1669 bool
   1670 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
   1671 {
   1672     return __o->test_and_set();
   1673 }
   1674 
   1675 inline _LIBCPP_INLINE_VISIBILITY
   1676 bool
   1677 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1678 {
   1679     return __o->test_and_set(__m);
   1680 }
   1681 
   1682 inline _LIBCPP_INLINE_VISIBILITY
   1683 bool
   1684 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1685 {
   1686     return __o->test_and_set(__m);
   1687 }
   1688 
   1689 inline _LIBCPP_INLINE_VISIBILITY
   1690 void
   1691 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
   1692 {
   1693     __o->clear();
   1694 }
   1695 
   1696 inline _LIBCPP_INLINE_VISIBILITY
   1697 void
   1698 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
   1699 {
   1700     __o->clear();
   1701 }
   1702 
   1703 inline _LIBCPP_INLINE_VISIBILITY
   1704 void
   1705 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1706 {
   1707     __o->clear(__m);
   1708 }
   1709 
   1710 inline _LIBCPP_INLINE_VISIBILITY
   1711 void
   1712 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1713 {
   1714     __o->clear(__m);
   1715 }
   1716 
   1717 // fences
   1718 
   1719 inline _LIBCPP_INLINE_VISIBILITY
   1720 void
   1721 atomic_thread_fence(memory_order __m) _NOEXCEPT
   1722 {
   1723     __c11_atomic_thread_fence(__m);
   1724 }
   1725 
   1726 inline _LIBCPP_INLINE_VISIBILITY
   1727 void
   1728 atomic_signal_fence(memory_order __m) _NOEXCEPT
   1729 {
   1730     __c11_atomic_signal_fence(__m);
   1731 }
   1732 
   1733 // Atomics for standard typedef types
   1734 
   1735 typedef atomic<bool>               atomic_bool;
   1736 typedef atomic<char>               atomic_char;
   1737 typedef atomic<signed char>        atomic_schar;
   1738 typedef atomic<unsigned char>      atomic_uchar;
   1739 typedef atomic<short>              atomic_short;
   1740 typedef atomic<unsigned short>     atomic_ushort;
   1741 typedef atomic<int>                atomic_int;
   1742 typedef atomic<unsigned int>       atomic_uint;
   1743 typedef atomic<long>               atomic_long;
   1744 typedef atomic<unsigned long>      atomic_ulong;
   1745 typedef atomic<long long>          atomic_llong;
   1746 typedef atomic<unsigned long long> atomic_ullong;
   1747 typedef atomic<char16_t>           atomic_char16_t;
   1748 typedef atomic<char32_t>           atomic_char32_t;
   1749 typedef atomic<wchar_t>            atomic_wchar_t;
   1750 
   1751 typedef atomic<int_least8_t>   atomic_int_least8_t;
   1752 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
   1753 typedef atomic<int_least16_t>  atomic_int_least16_t;
   1754 typedef atomic<uint_least16_t> atomic_uint_least16_t;
   1755 typedef atomic<int_least32_t>  atomic_int_least32_t;
   1756 typedef atomic<uint_least32_t> atomic_uint_least32_t;
   1757 typedef atomic<int_least64_t>  atomic_int_least64_t;
   1758 typedef atomic<uint_least64_t> atomic_uint_least64_t;
   1759 
   1760 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
   1761 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
   1762 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
   1763 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
   1764 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
   1765 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
   1766 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
   1767 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
   1768 
   1769 typedef atomic<intptr_t>  atomic_intptr_t;
   1770 typedef atomic<uintptr_t> atomic_uintptr_t;
   1771 typedef atomic<size_t>    atomic_size_t;
   1772 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
   1773 typedef atomic<intmax_t>  atomic_intmax_t;
   1774 typedef atomic<uintmax_t> atomic_uintmax_t;
   1775 
   1776 #define ATOMIC_FLAG_INIT {false}
   1777 #define ATOMIC_VAR_INIT(__v) {__v}
   1778 
   1779 // lock-free property
   1780 
   1781 #define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
   1782 #define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
   1783 #define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
   1784 #define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
   1785 #define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
   1786 #define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
   1787 #define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
   1788 #define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
   1789 #define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
   1790 #define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
   1791 
   1792 #endif  //  !__has_feature(cxx_atomic)
   1793 
   1794 _LIBCPP_END_NAMESPACE_STD
   1795 
   1796 #endif  // !_LIBCPP_HAS_NO_THREADS
   1797 
   1798 #endif  // _LIBCPP_ATOMIC
   1799