Home | History | Annotate | Download | only in include
      1 // -*- C++ -*-
      2 //===--------------------------- atomic -----------------------------------===//
      3 //
      4 //                     The LLVM Compiler Infrastructure
      5 //
      6 // This file is distributed under the University of Illinois Open Source
      7 // License. See LICENSE.TXT for details.
      8 //
      9 //===----------------------------------------------------------------------===//
     10 
     11 #ifndef _LIBCPP_ATOMIC
     12 #define _LIBCPP_ATOMIC
     13 
     14 /*
     15     atomic synopsis
     16 
     17 namespace std
     18 {
     19 
     20 // order and consistency
     21 
     22 typedef enum memory_order
     23 {
     24     memory_order_relaxed,
     25     memory_order_consume,  // load-consume
     26     memory_order_acquire,  // load-acquire
     27     memory_order_release,  // store-release
     28     memory_order_acq_rel,  // store-release load-acquire
     29     memory_order_seq_cst   // store-release load-acquire
     30 } memory_order;
     31 
     32 template <class T> T kill_dependency(T y) noexcept;
     33 
     34 // lock-free property
     35 
     36 #define ATOMIC_BOOL_LOCK_FREE unspecified
     37 #define ATOMIC_CHAR_LOCK_FREE unspecified
     38 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
     39 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
     40 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
     41 #define ATOMIC_SHORT_LOCK_FREE unspecified
     42 #define ATOMIC_INT_LOCK_FREE unspecified
     43 #define ATOMIC_LONG_LOCK_FREE unspecified
     44 #define ATOMIC_LLONG_LOCK_FREE unspecified
     45 #define ATOMIC_POINTER_LOCK_FREE unspecified
     46 
     47 // flag type and operations
     48 
     49 typedef struct atomic_flag
     50 {
     51     bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
     52     bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
     53     void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
     54     void clear(memory_order m = memory_order_seq_cst) noexcept;
     55     atomic_flag()  noexcept = default;
     56     atomic_flag(const atomic_flag&) = delete;
     57     atomic_flag& operator=(const atomic_flag&) = delete;
     58     atomic_flag& operator=(const atomic_flag&) volatile = delete;
     59 } atomic_flag;
     60 
     61 bool
     62     atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
     63 
     64 bool
     65     atomic_flag_test_and_set(atomic_flag* obj) noexcept;
     66 
     67 bool
     68     atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
     69                                       memory_order m) noexcept;
     70 
     71 bool
     72     atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
     73 
     74 void
     75     atomic_flag_clear(volatile atomic_flag* obj) noexcept;
     76 
     77 void
     78     atomic_flag_clear(atomic_flag* obj) noexcept;
     79 
     80 void
     81     atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
     82 
     83 void
     84     atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
     85 
     86 #define ATOMIC_FLAG_INIT see below
     87 #define ATOMIC_VAR_INIT(value) see below
     88 
     89 template <class T>
     90 struct atomic
     91 {
     92     bool is_lock_free() const volatile noexcept;
     93     bool is_lock_free() const noexcept;
     94     void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
     95     void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
     96     T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
     97     T load(memory_order m = memory_order_seq_cst) const noexcept;
     98     operator T() const volatile noexcept;
     99     operator T() const noexcept;
    100     T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    101     T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
    102     bool compare_exchange_weak(T& expc, T desr,
    103                                memory_order s, memory_order f) volatile noexcept;
    104     bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
    105     bool compare_exchange_strong(T& expc, T desr,
    106                                  memory_order s, memory_order f) volatile noexcept;
    107     bool compare_exchange_strong(T& expc, T desr,
    108                                  memory_order s, memory_order f) noexcept;
    109     bool compare_exchange_weak(T& expc, T desr,
    110                                memory_order m = memory_order_seq_cst) volatile noexcept;
    111     bool compare_exchange_weak(T& expc, T desr,
    112                                memory_order m = memory_order_seq_cst) noexcept;
    113     bool compare_exchange_strong(T& expc, T desr,
    114                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    115     bool compare_exchange_strong(T& expc, T desr,
    116                                  memory_order m = memory_order_seq_cst) noexcept;
    117 
    118     atomic() noexcept = default;
    119     constexpr atomic(T desr) noexcept;
    120     atomic(const atomic&) = delete;
    121     atomic& operator=(const atomic&) = delete;
    122     atomic& operator=(const atomic&) volatile = delete;
    123     T operator=(T) volatile noexcept;
    124     T operator=(T) noexcept;
    125 };
    126 
    127 template <>
    128 struct atomic<integral>
    129 {
    130     bool is_lock_free() const volatile noexcept;
    131     bool is_lock_free() const noexcept;
    132     void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    133     void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    134     integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    135     integral load(memory_order m = memory_order_seq_cst) const noexcept;
    136     operator integral() const volatile noexcept;
    137     operator integral() const noexcept;
    138     integral exchange(integral desr,
    139                       memory_order m = memory_order_seq_cst) volatile noexcept;
    140     integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    141     bool compare_exchange_weak(integral& expc, integral desr,
    142                                memory_order s, memory_order f) volatile noexcept;
    143     bool compare_exchange_weak(integral& expc, integral desr,
    144                                memory_order s, memory_order f) noexcept;
    145     bool compare_exchange_strong(integral& expc, integral desr,
    146                                  memory_order s, memory_order f) volatile noexcept;
    147     bool compare_exchange_strong(integral& expc, integral desr,
    148                                  memory_order s, memory_order f) noexcept;
    149     bool compare_exchange_weak(integral& expc, integral desr,
    150                                memory_order m = memory_order_seq_cst) volatile noexcept;
    151     bool compare_exchange_weak(integral& expc, integral desr,
    152                                memory_order m = memory_order_seq_cst) noexcept;
    153     bool compare_exchange_strong(integral& expc, integral desr,
    154                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    155     bool compare_exchange_strong(integral& expc, integral desr,
    156                                  memory_order m = memory_order_seq_cst) noexcept;
    157 
    158     integral
    159         fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    160     integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
    161     integral
    162         fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    163     integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
    164     integral
    165         fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    166     integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
    167     integral
    168         fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    169     integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
    170     integral
    171         fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    172     integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
    173 
    174     atomic() noexcept = default;
    175     constexpr atomic(integral desr) noexcept;
    176     atomic(const atomic&) = delete;
    177     atomic& operator=(const atomic&) = delete;
    178     atomic& operator=(const atomic&) volatile = delete;
    179     integral operator=(integral desr) volatile noexcept;
    180     integral operator=(integral desr) noexcept;
    181 
    182     integral operator++(int) volatile noexcept;
    183     integral operator++(int) noexcept;
    184     integral operator--(int) volatile noexcept;
    185     integral operator--(int) noexcept;
    186     integral operator++() volatile noexcept;
    187     integral operator++() noexcept;
    188     integral operator--() volatile noexcept;
    189     integral operator--() noexcept;
    190     integral operator+=(integral op) volatile noexcept;
    191     integral operator+=(integral op) noexcept;
    192     integral operator-=(integral op) volatile noexcept;
    193     integral operator-=(integral op) noexcept;
    194     integral operator&=(integral op) volatile noexcept;
    195     integral operator&=(integral op) noexcept;
    196     integral operator|=(integral op) volatile noexcept;
    197     integral operator|=(integral op) noexcept;
    198     integral operator^=(integral op) volatile noexcept;
    199     integral operator^=(integral op) noexcept;
    200 };
    201 
    202 template <class T>
    203 struct atomic<T*>
    204 {
    205     bool is_lock_free() const volatile noexcept;
    206     bool is_lock_free() const noexcept;
    207     void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    208     void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    209     T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    210     T* load(memory_order m = memory_order_seq_cst) const noexcept;
    211     operator T*() const volatile noexcept;
    212     operator T*() const noexcept;
    213     T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    214     T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    215     bool compare_exchange_weak(T*& expc, T* desr,
    216                                memory_order s, memory_order f) volatile noexcept;
    217     bool compare_exchange_weak(T*& expc, T* desr,
    218                                memory_order s, memory_order f) noexcept;
    219     bool compare_exchange_strong(T*& expc, T* desr,
    220                                  memory_order s, memory_order f) volatile noexcept;
    221     bool compare_exchange_strong(T*& expc, T* desr,
    222                                  memory_order s, memory_order f) noexcept;
    223     bool compare_exchange_weak(T*& expc, T* desr,
    224                                memory_order m = memory_order_seq_cst) volatile noexcept;
    225     bool compare_exchange_weak(T*& expc, T* desr,
    226                                memory_order m = memory_order_seq_cst) noexcept;
    227     bool compare_exchange_strong(T*& expc, T* desr,
    228                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    229     bool compare_exchange_strong(T*& expc, T* desr,
    230                                  memory_order m = memory_order_seq_cst) noexcept;
    231     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    232     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    233     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    234     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    235 
    236     atomic() noexcept = default;
    237     constexpr atomic(T* desr) noexcept;
    238     atomic(const atomic&) = delete;
    239     atomic& operator=(const atomic&) = delete;
    240     atomic& operator=(const atomic&) volatile = delete;
    241 
    242     T* operator=(T*) volatile noexcept;
    243     T* operator=(T*) noexcept;
    244     T* operator++(int) volatile noexcept;
    245     T* operator++(int) noexcept;
    246     T* operator--(int) volatile noexcept;
    247     T* operator--(int) noexcept;
    248     T* operator++() volatile noexcept;
    249     T* operator++() noexcept;
    250     T* operator--() volatile noexcept;
    251     T* operator--() noexcept;
    252     T* operator+=(ptrdiff_t op) volatile noexcept;
    253     T* operator+=(ptrdiff_t op) noexcept;
    254     T* operator-=(ptrdiff_t op) volatile noexcept;
    255     T* operator-=(ptrdiff_t op) noexcept;
    256 };
    257 
    258 
    259 template <class T>
    260     bool
    261     atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
    262 
    263 template <class T>
    264     bool
    265     atomic_is_lock_free(const atomic<T>* obj) noexcept;
    266 
    267 template <class T>
    268     void
    269     atomic_init(volatile atomic<T>* obj, T desr) noexcept;
    270 
    271 template <class T>
    272     void
    273     atomic_init(atomic<T>* obj, T desr) noexcept;
    274 
    275 template <class T>
    276     void
    277     atomic_store(volatile atomic<T>* obj, T desr) noexcept;
    278 
    279 template <class T>
    280     void
    281     atomic_store(atomic<T>* obj, T desr) noexcept;
    282 
    283 template <class T>
    284     void
    285     atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    286 
    287 template <class T>
    288     void
    289     atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    290 
    291 template <class T>
    292     T
    293     atomic_load(const volatile atomic<T>* obj) noexcept;
    294 
    295 template <class T>
    296     T
    297     atomic_load(const atomic<T>* obj) noexcept;
    298 
    299 template <class T>
    300     T
    301     atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
    302 
    303 template <class T>
    304     T
    305     atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
    306 
    307 template <class T>
    308     T
    309     atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
    310 
    311 template <class T>
    312     T
    313     atomic_exchange(atomic<T>* obj, T desr) noexcept;
    314 
    315 template <class T>
    316     T
    317     atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    318 
    319 template <class T>
    320     T
    321     atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    322 
    323 template <class T>
    324     bool
    325     atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    326 
    327 template <class T>
    328     bool
    329     atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
    330 
    331 template <class T>
    332     bool
    333     atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    334 
    335 template <class T>
    336     bool
    337     atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
    338 
    339 template <class T>
    340     bool
    341     atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
    342                                           T desr,
    343                                           memory_order s, memory_order f) noexcept;
    344 
    345 template <class T>
    346     bool
    347     atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
    348                                           memory_order s, memory_order f) noexcept;
    349 
    350 template <class T>
    351     bool
    352     atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
    353                                             T* expc, T desr,
    354                                             memory_order s, memory_order f) noexcept;
    355 
    356 template <class T>
    357     bool
    358     atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
    359                                             T desr,
    360                                             memory_order s, memory_order f) noexcept;
    361 
    362 template <class Integral>
    363     Integral
    364     atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
    365 
    366 template <class Integral>
    367     Integral
    368     atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
    369 
    370 template <class Integral>
    371     Integral
    372     atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
    373                               memory_order m) noexcept;
    374 template <class Integral>
    375     Integral
    376     atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
    377                               memory_order m) noexcept;
    378 template <class Integral>
    379     Integral
    380     atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
    381 
    382 template <class Integral>
    383     Integral
    384     atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
    385 
    386 template <class Integral>
    387     Integral
    388     atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
    389                               memory_order m) noexcept;
    390 template <class Integral>
    391     Integral
    392     atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
    393                               memory_order m) noexcept;
    394 template <class Integral>
    395     Integral
    396     atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
    397 
    398 template <class Integral>
    399     Integral
    400     atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
    401 
    402 template <class Integral>
    403     Integral
    404     atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
    405                               memory_order m) noexcept;
    406 template <class Integral>
    407     Integral
    408     atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
    409                               memory_order m) noexcept;
    410 template <class Integral>
    411     Integral
    412     atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
    413 
    414 template <class Integral>
    415     Integral
    416     atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
    417 
    418 template <class Integral>
    419     Integral
    420     atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
    421                              memory_order m) noexcept;
    422 template <class Integral>
    423     Integral
    424     atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
    425                              memory_order m) noexcept;
    426 template <class Integral>
    427     Integral
    428     atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
    429 
    430 template <class Integral>
    431     Integral
    432     atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
    433 
    434 template <class Integral>
    435     Integral
    436     atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
    437                               memory_order m) noexcept;
    438 template <class Integral>
    439     Integral
    440     atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
    441                               memory_order m) noexcept;
    442 
    443 template <class T>
    444     T*
    445     atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    446 
    447 template <class T>
    448     T*
    449     atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
    450 
    451 template <class T>
    452     T*
    453     atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    454                               memory_order m) noexcept;
    455 template <class T>
    456     T*
    457     atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    458 
    459 template <class T>
    460     T*
    461     atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    462 
    463 template <class T>
    464     T*
    465     atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
    466 
    467 template <class T>
    468     T*
    469     atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    470                               memory_order m) noexcept;
    471 template <class T>
    472     T*
    473     atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    474 
    475 // Atomics for standard typedef types
    476 
    477 typedef atomic<bool>               atomic_bool;
    478 typedef atomic<char>               atomic_char;
    479 typedef atomic<signed char>        atomic_schar;
    480 typedef atomic<unsigned char>      atomic_uchar;
    481 typedef atomic<short>              atomic_short;
    482 typedef atomic<unsigned short>     atomic_ushort;
    483 typedef atomic<int>                atomic_int;
    484 typedef atomic<unsigned int>       atomic_uint;
    485 typedef atomic<long>               atomic_long;
    486 typedef atomic<unsigned long>      atomic_ulong;
    487 typedef atomic<long long>          atomic_llong;
    488 typedef atomic<unsigned long long> atomic_ullong;
    489 typedef atomic<char16_t>           atomic_char16_t;
    490 typedef atomic<char32_t>           atomic_char32_t;
    491 typedef atomic<wchar_t>            atomic_wchar_t;
    492 
    493 typedef atomic<int_least8_t>   atomic_int_least8_t;
    494 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
    495 typedef atomic<int_least16_t>  atomic_int_least16_t;
    496 typedef atomic<uint_least16_t> atomic_uint_least16_t;
    497 typedef atomic<int_least32_t>  atomic_int_least32_t;
    498 typedef atomic<uint_least32_t> atomic_uint_least32_t;
    499 typedef atomic<int_least64_t>  atomic_int_least64_t;
    500 typedef atomic<uint_least64_t> atomic_uint_least64_t;
    501 
    502 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
    503 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
    504 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
    505 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
    506 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
    507 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
    508 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
    509 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
    510 
    511 typedef atomic<intptr_t>  atomic_intptr_t;
    512 typedef atomic<uintptr_t> atomic_uintptr_t;
    513 typedef atomic<size_t>    atomic_size_t;
    514 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
    515 typedef atomic<intmax_t>  atomic_intmax_t;
    516 typedef atomic<uintmax_t> atomic_uintmax_t;
    517 
    518 // fences
    519 
    520 void atomic_thread_fence(memory_order m) noexcept;
    521 void atomic_signal_fence(memory_order m) noexcept;
    522 
    523 }  // std
    524 
    525 */
    526 
    527 #include <__config>
    528 #include <cstddef>
    529 #include <cstdint>
    530 #include <type_traits>
    531 
    532 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
    533 #pragma GCC system_header
    534 #endif
    535 
    536 _LIBCPP_BEGIN_NAMESPACE_STD
    537 
    538 #if !__has_feature(cxx_atomic)
    539 #error <atomic> is not implemented
    540 #else
    541 
    542 typedef enum memory_order
    543 {
    544     memory_order_relaxed, memory_order_consume, memory_order_acquire,
    545     memory_order_release, memory_order_acq_rel, memory_order_seq_cst
    546 } memory_order;
    547 
    548 #if !defined(__clang__)
    549 
    550 namespace __gcc_atomic {
    551 template <typename T>
    552 struct __gcc_atomic_t {
    553   __gcc_atomic_t() _NOEXCEPT {}
    554   explicit __gcc_atomic_t(T value) _NOEXCEPT : __a_value(value) {}
    555   T __a_value;
    556 };
    557 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
    558 
    559 template <typename T> T __create();
    560 
    561 template <typename __Tp, typename __Td>
    562 typename enable_if<sizeof(__Tp()->__a_value = __create<__Td>()), char>::type
    563     __test_atomic_assignable(int);
    564 template <typename T, typename U>
    565 __two __test_atomic_assignable(...);
    566 
    567 template <typename __Tp, typename __Td>
    568 struct __can_assign {
    569   static const bool value =
    570       sizeof(__test_atomic_assignable<__Tp, __Td>(1)) == sizeof(char);
    571 };
    572 
    573 static inline int __to_gcc_order(memory_order __order) {
    574   switch (__order) {
    575     case memory_order_relaxed:
    576       return __ATOMIC_RELAXED;
    577     case memory_order_consume:
    578       return __ATOMIC_CONSUME;
    579     case memory_order_acquire:
    580       return __ATOMIC_ACQUIRE;
    581     case memory_order_release:
    582       return __ATOMIC_RELEASE;
    583     case memory_order_acq_rel:
    584       return __ATOMIC_ACQ_REL;
    585     case memory_order_seq_cst:
    586       return __ATOMIC_SEQ_CST;
    587   }
    588 }
    589 
    590 } // namespace __gcc_atomic
    591 
    592 template <typename _Tp>
    593 static inline
    594 typename enable_if<
    595     __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
    596 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    597   __a->__a_value = __val;
    598 }
    599 
    600 template <typename _Tp>
    601 static inline
    602 typename enable_if<
    603     !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
    604      __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
    605 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    606   // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
    607   // the default operator= in an object is not volatile, a byte-by-byte copy
    608   // is required.
    609   volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
    610   volatile char* end = to + sizeof(_Tp);
    611   char* from = reinterpret_cast<char*>(&__val);
    612   while (to != end) {
    613     *to++ = *from++;
    614   }
    615 }
    616 
    617 template <typename _Tp>
    618 static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
    619   __a->__a_value = __val;
    620 }
    621 
    622 static inline void __c11_atomic_thread_fence(memory_order __order) {
    623   __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
    624 }
    625 
    626 static inline void __c11_atomic_signal_fence(memory_order __order) {
    627   __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
    628 }
    629 
    630 static inline bool __c11_atomic_is_lock_free(size_t __size) {
    631   return __atomic_is_lock_free(__size, 0);
    632 }
    633 
    634 template <typename _Tp>
    635 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
    636                                       memory_order __order) {
    637   return __atomic_store(&__a->__a_value, &__val,
    638                         __gcc_atomic::__to_gcc_order(__order));
    639 }
    640 
    641 template <typename _Tp>
    642 static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
    643                                       memory_order __order) {
    644   return __atomic_store(&__a->__a_value, &__val,
    645                         __gcc_atomic::__to_gcc_order(__order));
    646 }
    647 
    648 template <typename _Tp>
    649 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
    650                                     memory_order __order) {
    651   _Tp __ret;
    652   __atomic_load(&__a->__a_value, &__ret,
    653                 __gcc_atomic::__to_gcc_order(__order));
    654   return __ret;
    655 }
    656 
    657 template <typename _Tp>
    658 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
    659   _Tp __ret;
    660   __atomic_load(&__a->__a_value, &__ret,
    661                 __gcc_atomic::__to_gcc_order(__order));
    662   return __ret;
    663 }
    664 
    665 template <typename _Tp>
    666 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
    667                                         _Tp __value, memory_order __order) {
    668   _Tp __ret;
    669   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    670                     __gcc_atomic::__to_gcc_order(__order));
    671   return __ret;
    672 }
    673 
    674 template <typename _Tp>
    675 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
    676                                         memory_order __order) {
    677   _Tp __ret;
    678   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    679                     __gcc_atomic::__to_gcc_order(__order));
    680   return __ret;
    681 }
    682 
    683 template <typename _Tp>
    684 static inline bool __c11_atomic_compare_exchange_strong(
    685     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    686     memory_order __success, memory_order __failure) {
    687   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    688                                    false,
    689                                    __gcc_atomic::__to_gcc_order(__success),
    690                                    __gcc_atomic::__to_gcc_order(__failure));
    691 }
    692 
    693 template <typename _Tp>
    694 static inline bool __c11_atomic_compare_exchange_strong(
    695     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    696     memory_order __failure) {
    697   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    698                                    false,
    699                                    __gcc_atomic::__to_gcc_order(__success),
    700                                    __gcc_atomic::__to_gcc_order(__failure));
    701 }
    702 
    703 template <typename _Tp>
    704 static inline bool __c11_atomic_compare_exchange_weak(
    705     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    706     memory_order __success, memory_order __failure) {
    707   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    708                                    true,
    709                                    __gcc_atomic::__to_gcc_order(__success),
    710                                    __gcc_atomic::__to_gcc_order(__failure));
    711 }
    712 
    713 template <typename _Tp>
    714 static inline bool __c11_atomic_compare_exchange_weak(
    715     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    716     memory_order __failure) {
    717   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    718                                    true,
    719                                    __gcc_atomic::__to_gcc_order(__success),
    720                                    __gcc_atomic::__to_gcc_order(__failure));
    721 }
    722 
    723 template <typename _Tp>
    724 struct __skip_amt { enum {value = 1}; };
    725 
    726 template <typename _Tp>
    727 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
    728 
    729 // FIXME: Haven't figured out what the spec says about using arrays with
    730 // atomic_fetch_add. Force a failure rather than creating bad behavior.
    731 template <typename _Tp>
    732 struct __skip_amt<_Tp[]> { };
    733 template <typename _Tp, int n>
    734 struct __skip_amt<_Tp[n]> { };
    735 
    736 template <typename _Tp, typename _Td>
    737 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
    738                                          _Td __delta, memory_order __order) {
    739   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    740                             __gcc_atomic::__to_gcc_order(__order));
    741 }
    742 
    743 template <typename _Tp, typename _Td>
    744 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
    745                                          memory_order __order) {
    746   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    747                             __gcc_atomic::__to_gcc_order(__order));
    748 }
    749 
    750 template <typename _Tp, typename _Td>
    751 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
    752                                          _Td __delta, memory_order __order) {
    753   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    754                             __gcc_atomic::__to_gcc_order(__order));
    755 }
    756 
    757 template <typename _Tp, typename _Td>
    758 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
    759                                          memory_order __order) {
    760   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    761                             __gcc_atomic::__to_gcc_order(__order));
    762 }
    763 
    764 template <typename _Tp>
    765 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
    766                                          _Tp __pattern, memory_order __order) {
    767   return __atomic_fetch_and(&__a->__a_value, __pattern,
    768                             __gcc_atomic::__to_gcc_order(__order));
    769 }
    770 
    771 template <typename _Tp>
    772 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
    773                                          _Tp __pattern, memory_order __order) {
    774   return __atomic_fetch_and(&__a->__a_value, __pattern,
    775                             __gcc_atomic::__to_gcc_order(__order));
    776 }
    777 
    778 template <typename _Tp>
    779 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
    780                                         _Tp __pattern, memory_order __order) {
    781   return __atomic_fetch_or(&__a->__a_value, __pattern,
    782                            __gcc_atomic::__to_gcc_order(__order));
    783 }
    784 
    785 template <typename _Tp>
    786 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
    787                                         memory_order __order) {
    788   return __atomic_fetch_or(&__a->__a_value, __pattern,
    789                            __gcc_atomic::__to_gcc_order(__order));
    790 }
    791 
    792 template <typename _Tp>
    793 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
    794                                          _Tp __pattern, memory_order __order) {
    795   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    796                             __gcc_atomic::__to_gcc_order(__order));
    797 }
    798 
    799 template <typename _Tp>
    800 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
    801                                          memory_order __order) {
    802   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    803                             __gcc_atomic::__to_gcc_order(__order));
    804 }
    805 
    806 #endif // !__clang__
    807 
    808 template <class _Tp>
    809 inline _LIBCPP_INLINE_VISIBILITY
    810 _Tp
    811 kill_dependency(_Tp __y) _NOEXCEPT
    812 {
    813     return __y;
    814 }
    815 
    816 // general atomic<T>
    817 
    818 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
    819 struct __atomic_base  // false
    820 {
    821     mutable _Atomic(_Tp) __a_;
    822 
    823     _LIBCPP_INLINE_VISIBILITY
    824     bool is_lock_free() const volatile _NOEXCEPT
    825         {return __c11_atomic_is_lock_free(sizeof(_Tp));}
    826     _LIBCPP_INLINE_VISIBILITY
    827     bool is_lock_free() const _NOEXCEPT
    828         {return __c11_atomic_is_lock_free(sizeof(_Tp));}
    829     _LIBCPP_INLINE_VISIBILITY
    830     void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    831         {__c11_atomic_store(&__a_, __d, __m);}
    832     _LIBCPP_INLINE_VISIBILITY
    833     void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    834         {__c11_atomic_store(&__a_, __d, __m);}
    835     _LIBCPP_INLINE_VISIBILITY
    836     _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
    837         {return __c11_atomic_load(&__a_, __m);}
    838     _LIBCPP_INLINE_VISIBILITY
    839     _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
    840         {return __c11_atomic_load(&__a_, __m);}
    841     _LIBCPP_INLINE_VISIBILITY
    842     operator _Tp() const volatile _NOEXCEPT {return load();}
    843     _LIBCPP_INLINE_VISIBILITY
    844     operator _Tp() const _NOEXCEPT          {return load();}
    845     _LIBCPP_INLINE_VISIBILITY
    846     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    847         {return __c11_atomic_exchange(&__a_, __d, __m);}
    848     _LIBCPP_INLINE_VISIBILITY
    849     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    850         {return __c11_atomic_exchange(&__a_, __d, __m);}
    851     _LIBCPP_INLINE_VISIBILITY
    852     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    853                                memory_order __s, memory_order __f) volatile _NOEXCEPT
    854         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    855     _LIBCPP_INLINE_VISIBILITY
    856     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    857                                memory_order __s, memory_order __f) _NOEXCEPT
    858         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    859     _LIBCPP_INLINE_VISIBILITY
    860     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    861                                  memory_order __s, memory_order __f) volatile _NOEXCEPT
    862         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    863     _LIBCPP_INLINE_VISIBILITY
    864     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    865                                  memory_order __s, memory_order __f) _NOEXCEPT
    866         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    867     _LIBCPP_INLINE_VISIBILITY
    868     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    869                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    870         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    871     _LIBCPP_INLINE_VISIBILITY
    872     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    873                                memory_order __m = memory_order_seq_cst) _NOEXCEPT
    874         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    875     _LIBCPP_INLINE_VISIBILITY
    876     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    877                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    878         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    879     _LIBCPP_INLINE_VISIBILITY
    880     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    881                                  memory_order __m = memory_order_seq_cst) _NOEXCEPT
    882         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    883 
    884     _LIBCPP_INLINE_VISIBILITY
    885 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    886     __atomic_base() _NOEXCEPT = default;
    887 #else
    888     __atomic_base() _NOEXCEPT : __a_() {}
    889 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    890 
    891     _LIBCPP_INLINE_VISIBILITY
    892     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
    893 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    894     __atomic_base(const __atomic_base&) = delete;
    895     __atomic_base& operator=(const __atomic_base&) = delete;
    896     __atomic_base& operator=(const __atomic_base&) volatile = delete;
    897 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    898 private:
    899     __atomic_base(const __atomic_base&);
    900     __atomic_base& operator=(const __atomic_base&);
    901     __atomic_base& operator=(const __atomic_base&) volatile;
    902 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    903 };
    904 
    905 // atomic<Integral>
    906 
    907 template <class _Tp>
    908 struct __atomic_base<_Tp, true>
    909     : public __atomic_base<_Tp, false>
    910 {
    911     typedef __atomic_base<_Tp, false> __base;
    912     _LIBCPP_INLINE_VISIBILITY
    913     __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
    914     _LIBCPP_INLINE_VISIBILITY
    915     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
    916 
    917     _LIBCPP_INLINE_VISIBILITY
    918     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    919         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    920     _LIBCPP_INLINE_VISIBILITY
    921     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    922         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    923     _LIBCPP_INLINE_VISIBILITY
    924     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    925         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    926     _LIBCPP_INLINE_VISIBILITY
    927     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    928         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    929     _LIBCPP_INLINE_VISIBILITY
    930     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    931         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    932     _LIBCPP_INLINE_VISIBILITY
    933     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    934         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    935     _LIBCPP_INLINE_VISIBILITY
    936     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    937         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    938     _LIBCPP_INLINE_VISIBILITY
    939     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    940         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    941     _LIBCPP_INLINE_VISIBILITY
    942     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    943         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    944     _LIBCPP_INLINE_VISIBILITY
    945     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    946         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    947 
    948     _LIBCPP_INLINE_VISIBILITY
    949     _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
    950     _LIBCPP_INLINE_VISIBILITY
    951     _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
    952     _LIBCPP_INLINE_VISIBILITY
    953     _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
    954     _LIBCPP_INLINE_VISIBILITY
    955     _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
    956     _LIBCPP_INLINE_VISIBILITY
    957     _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
    958     _LIBCPP_INLINE_VISIBILITY
    959     _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
    960     _LIBCPP_INLINE_VISIBILITY
    961     _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
    962     _LIBCPP_INLINE_VISIBILITY
    963     _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
    964     _LIBCPP_INLINE_VISIBILITY
    965     _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
    966     _LIBCPP_INLINE_VISIBILITY
    967     _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
    968     _LIBCPP_INLINE_VISIBILITY
    969     _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
    970     _LIBCPP_INLINE_VISIBILITY
    971     _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
    972     _LIBCPP_INLINE_VISIBILITY
    973     _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
    974     _LIBCPP_INLINE_VISIBILITY
    975     _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
    976     _LIBCPP_INLINE_VISIBILITY
    977     _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
    978     _LIBCPP_INLINE_VISIBILITY
    979     _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
    980     _LIBCPP_INLINE_VISIBILITY
    981     _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
    982     _LIBCPP_INLINE_VISIBILITY
    983     _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
    984 };
    985 
    986 // atomic<T>
    987 
    988 template <class _Tp>
    989 struct atomic
    990     : public __atomic_base<_Tp>
    991 {
    992     typedef __atomic_base<_Tp> __base;
    993     _LIBCPP_INLINE_VISIBILITY
    994     atomic() _NOEXCEPT _LIBCPP_DEFAULT
    995     _LIBCPP_INLINE_VISIBILITY
    996     _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
    997 
    998     _LIBCPP_INLINE_VISIBILITY
    999     _Tp operator=(_Tp __d) volatile _NOEXCEPT
   1000         {__base::store(__d); return __d;}
   1001     _LIBCPP_INLINE_VISIBILITY
   1002     _Tp operator=(_Tp __d) _NOEXCEPT
   1003         {__base::store(__d); return __d;}
   1004 };
   1005 
   1006 // atomic<T*>
   1007 
   1008 template <class _Tp>
   1009 struct atomic<_Tp*>
   1010     : public __atomic_base<_Tp*>
   1011 {
   1012     typedef __atomic_base<_Tp*> __base;
   1013     _LIBCPP_INLINE_VISIBILITY
   1014     atomic() _NOEXCEPT _LIBCPP_DEFAULT
   1015     _LIBCPP_INLINE_VISIBILITY
   1016     _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
   1017 
   1018     _LIBCPP_INLINE_VISIBILITY
   1019     _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
   1020         {__base::store(__d); return __d;}
   1021     _LIBCPP_INLINE_VISIBILITY
   1022     _Tp* operator=(_Tp* __d) _NOEXCEPT
   1023         {__base::store(__d); return __d;}
   1024 
   1025     _LIBCPP_INLINE_VISIBILITY
   1026     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1027                                                                         volatile _NOEXCEPT
   1028         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1029     _LIBCPP_INLINE_VISIBILITY
   1030     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1031         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1032     _LIBCPP_INLINE_VISIBILITY
   1033     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1034                                                                         volatile _NOEXCEPT
   1035         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1036     _LIBCPP_INLINE_VISIBILITY
   1037     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1038         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1039 
   1040     _LIBCPP_INLINE_VISIBILITY
   1041     _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
   1042     _LIBCPP_INLINE_VISIBILITY
   1043     _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
   1044     _LIBCPP_INLINE_VISIBILITY
   1045     _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
   1046     _LIBCPP_INLINE_VISIBILITY
   1047     _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
   1048     _LIBCPP_INLINE_VISIBILITY
   1049     _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
   1050     _LIBCPP_INLINE_VISIBILITY
   1051     _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
   1052     _LIBCPP_INLINE_VISIBILITY
   1053     _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
   1054     _LIBCPP_INLINE_VISIBILITY
   1055     _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
   1056     _LIBCPP_INLINE_VISIBILITY
   1057     _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
   1058     _LIBCPP_INLINE_VISIBILITY
   1059     _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
   1060     _LIBCPP_INLINE_VISIBILITY
   1061     _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
   1062     _LIBCPP_INLINE_VISIBILITY
   1063     _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
   1064 };
   1065 
   1066 // atomic_is_lock_free
   1067 
   1068 template <class _Tp>
   1069 inline _LIBCPP_INLINE_VISIBILITY
   1070 bool
   1071 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1072 {
   1073     return __o->is_lock_free();
   1074 }
   1075 
   1076 template <class _Tp>
   1077 inline _LIBCPP_INLINE_VISIBILITY
   1078 bool
   1079 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
   1080 {
   1081     return __o->is_lock_free();
   1082 }
   1083 
   1084 // atomic_init
   1085 
   1086 template <class _Tp>
   1087 inline _LIBCPP_INLINE_VISIBILITY
   1088 void
   1089 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1090 {
   1091     __c11_atomic_init(&__o->__a_, __d);
   1092 }
   1093 
   1094 template <class _Tp>
   1095 inline _LIBCPP_INLINE_VISIBILITY
   1096 void
   1097 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1098 {
   1099     __c11_atomic_init(&__o->__a_, __d);
   1100 }
   1101 
   1102 // atomic_store
   1103 
   1104 template <class _Tp>
   1105 inline _LIBCPP_INLINE_VISIBILITY
   1106 void
   1107 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1108 {
   1109     __o->store(__d);
   1110 }
   1111 
   1112 template <class _Tp>
   1113 inline _LIBCPP_INLINE_VISIBILITY
   1114 void
   1115 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1116 {
   1117     __o->store(__d);
   1118 }
   1119 
   1120 // atomic_store_explicit
   1121 
   1122 template <class _Tp>
   1123 inline _LIBCPP_INLINE_VISIBILITY
   1124 void
   1125 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1126 {
   1127     __o->store(__d, __m);
   1128 }
   1129 
   1130 template <class _Tp>
   1131 inline _LIBCPP_INLINE_VISIBILITY
   1132 void
   1133 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1134 {
   1135     __o->store(__d, __m);
   1136 }
   1137 
   1138 // atomic_load
   1139 
   1140 template <class _Tp>
   1141 inline _LIBCPP_INLINE_VISIBILITY
   1142 _Tp
   1143 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1144 {
   1145     return __o->load();
   1146 }
   1147 
   1148 template <class _Tp>
   1149 inline _LIBCPP_INLINE_VISIBILITY
   1150 _Tp
   1151 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
   1152 {
   1153     return __o->load();
   1154 }
   1155 
   1156 // atomic_load_explicit
   1157 
   1158 template <class _Tp>
   1159 inline _LIBCPP_INLINE_VISIBILITY
   1160 _Tp
   1161 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1162 {
   1163     return __o->load(__m);
   1164 }
   1165 
   1166 template <class _Tp>
   1167 inline _LIBCPP_INLINE_VISIBILITY
   1168 _Tp
   1169 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1170 {
   1171     return __o->load(__m);
   1172 }
   1173 
   1174 // atomic_exchange
   1175 
   1176 template <class _Tp>
   1177 inline _LIBCPP_INLINE_VISIBILITY
   1178 _Tp
   1179 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1180 {
   1181     return __o->exchange(__d);
   1182 }
   1183 
   1184 template <class _Tp>
   1185 inline _LIBCPP_INLINE_VISIBILITY
   1186 _Tp
   1187 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1188 {
   1189     return __o->exchange(__d);
   1190 }
   1191 
   1192 // atomic_exchange_explicit
   1193 
   1194 template <class _Tp>
   1195 inline _LIBCPP_INLINE_VISIBILITY
   1196 _Tp
   1197 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1198 {
   1199     return __o->exchange(__d, __m);
   1200 }
   1201 
   1202 template <class _Tp>
   1203 inline _LIBCPP_INLINE_VISIBILITY
   1204 _Tp
   1205 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1206 {
   1207     return __o->exchange(__d, __m);
   1208 }
   1209 
   1210 // atomic_compare_exchange_weak
   1211 
   1212 template <class _Tp>
   1213 inline _LIBCPP_INLINE_VISIBILITY
   1214 bool
   1215 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1216 {
   1217     return __o->compare_exchange_weak(*__e, __d);
   1218 }
   1219 
   1220 template <class _Tp>
   1221 inline _LIBCPP_INLINE_VISIBILITY
   1222 bool
   1223 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1224 {
   1225     return __o->compare_exchange_weak(*__e, __d);
   1226 }
   1227 
   1228 // atomic_compare_exchange_strong
   1229 
   1230 template <class _Tp>
   1231 inline _LIBCPP_INLINE_VISIBILITY
   1232 bool
   1233 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1234 {
   1235     return __o->compare_exchange_strong(*__e, __d);
   1236 }
   1237 
   1238 template <class _Tp>
   1239 inline _LIBCPP_INLINE_VISIBILITY
   1240 bool
   1241 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1242 {
   1243     return __o->compare_exchange_strong(*__e, __d);
   1244 }
   1245 
   1246 // atomic_compare_exchange_weak_explicit
   1247 
   1248 template <class _Tp>
   1249 inline _LIBCPP_INLINE_VISIBILITY
   1250 bool
   1251 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
   1252                                       _Tp __d,
   1253                                       memory_order __s, memory_order __f) _NOEXCEPT
   1254 {
   1255     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1256 }
   1257 
   1258 template <class _Tp>
   1259 inline _LIBCPP_INLINE_VISIBILITY
   1260 bool
   1261 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
   1262                                       memory_order __s, memory_order __f) _NOEXCEPT
   1263 {
   1264     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1265 }
   1266 
   1267 // atomic_compare_exchange_strong_explicit
   1268 
   1269 template <class _Tp>
   1270 inline _LIBCPP_INLINE_VISIBILITY
   1271 bool
   1272 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
   1273                                         _Tp* __e, _Tp __d,
   1274                                         memory_order __s, memory_order __f) _NOEXCEPT
   1275 {
   1276     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1277 }
   1278 
   1279 template <class _Tp>
   1280 inline _LIBCPP_INLINE_VISIBILITY
   1281 bool
   1282 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
   1283                                         _Tp __d,
   1284                                         memory_order __s, memory_order __f) _NOEXCEPT
   1285 {
   1286     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1287 }
   1288 
   1289 // atomic_fetch_add
   1290 
   1291 template <class _Tp>
   1292 inline _LIBCPP_INLINE_VISIBILITY
   1293 typename enable_if
   1294 <
   1295     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1296     _Tp
   1297 >::type
   1298 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1299 {
   1300     return __o->fetch_add(__op);
   1301 }
   1302 
   1303 template <class _Tp>
   1304 inline _LIBCPP_INLINE_VISIBILITY
   1305 typename enable_if
   1306 <
   1307     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1308     _Tp
   1309 >::type
   1310 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1311 {
   1312     return __o->fetch_add(__op);
   1313 }
   1314 
   1315 template <class _Tp>
   1316 inline _LIBCPP_INLINE_VISIBILITY
   1317 _Tp*
   1318 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1319 {
   1320     return __o->fetch_add(__op);
   1321 }
   1322 
   1323 template <class _Tp>
   1324 inline _LIBCPP_INLINE_VISIBILITY
   1325 _Tp*
   1326 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1327 {
   1328     return __o->fetch_add(__op);
   1329 }
   1330 
   1331 // atomic_fetch_add_explicit
   1332 
   1333 template <class _Tp>
   1334 inline _LIBCPP_INLINE_VISIBILITY
   1335 typename enable_if
   1336 <
   1337     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1338     _Tp
   1339 >::type
   1340 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1341 {
   1342     return __o->fetch_add(__op, __m);
   1343 }
   1344 
   1345 template <class _Tp>
   1346 inline _LIBCPP_INLINE_VISIBILITY
   1347 typename enable_if
   1348 <
   1349     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1350     _Tp
   1351 >::type
   1352 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1353 {
   1354     return __o->fetch_add(__op, __m);
   1355 }
   1356 
   1357 template <class _Tp>
   1358 inline _LIBCPP_INLINE_VISIBILITY
   1359 _Tp*
   1360 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1361                           memory_order __m) _NOEXCEPT
   1362 {
   1363     return __o->fetch_add(__op, __m);
   1364 }
   1365 
   1366 template <class _Tp>
   1367 inline _LIBCPP_INLINE_VISIBILITY
   1368 _Tp*
   1369 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1370 {
   1371     return __o->fetch_add(__op, __m);
   1372 }
   1373 
   1374 // atomic_fetch_sub
   1375 
   1376 template <class _Tp>
   1377 inline _LIBCPP_INLINE_VISIBILITY
   1378 typename enable_if
   1379 <
   1380     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1381     _Tp
   1382 >::type
   1383 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1384 {
   1385     return __o->fetch_sub(__op);
   1386 }
   1387 
   1388 template <class _Tp>
   1389 inline _LIBCPP_INLINE_VISIBILITY
   1390 typename enable_if
   1391 <
   1392     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1393     _Tp
   1394 >::type
   1395 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1396 {
   1397     return __o->fetch_sub(__op);
   1398 }
   1399 
   1400 template <class _Tp>
   1401 inline _LIBCPP_INLINE_VISIBILITY
   1402 _Tp*
   1403 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1404 {
   1405     return __o->fetch_sub(__op);
   1406 }
   1407 
   1408 template <class _Tp>
   1409 inline _LIBCPP_INLINE_VISIBILITY
   1410 _Tp*
   1411 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1412 {
   1413     return __o->fetch_sub(__op);
   1414 }
   1415 
   1416 // atomic_fetch_sub_explicit
   1417 
   1418 template <class _Tp>
   1419 inline _LIBCPP_INLINE_VISIBILITY
   1420 typename enable_if
   1421 <
   1422     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1423     _Tp
   1424 >::type
   1425 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1426 {
   1427     return __o->fetch_sub(__op, __m);
   1428 }
   1429 
   1430 template <class _Tp>
   1431 inline _LIBCPP_INLINE_VISIBILITY
   1432 typename enable_if
   1433 <
   1434     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1435     _Tp
   1436 >::type
   1437 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1438 {
   1439     return __o->fetch_sub(__op, __m);
   1440 }
   1441 
   1442 template <class _Tp>
   1443 inline _LIBCPP_INLINE_VISIBILITY
   1444 _Tp*
   1445 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1446                           memory_order __m) _NOEXCEPT
   1447 {
   1448     return __o->fetch_sub(__op, __m);
   1449 }
   1450 
   1451 template <class _Tp>
   1452 inline _LIBCPP_INLINE_VISIBILITY
   1453 _Tp*
   1454 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1455 {
   1456     return __o->fetch_sub(__op, __m);
   1457 }
   1458 
   1459 // atomic_fetch_and
   1460 
   1461 template <class _Tp>
   1462 inline _LIBCPP_INLINE_VISIBILITY
   1463 typename enable_if
   1464 <
   1465     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1466     _Tp
   1467 >::type
   1468 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1469 {
   1470     return __o->fetch_and(__op);
   1471 }
   1472 
   1473 template <class _Tp>
   1474 inline _LIBCPP_INLINE_VISIBILITY
   1475 typename enable_if
   1476 <
   1477     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1478     _Tp
   1479 >::type
   1480 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1481 {
   1482     return __o->fetch_and(__op);
   1483 }
   1484 
   1485 // atomic_fetch_and_explicit
   1486 
   1487 template <class _Tp>
   1488 inline _LIBCPP_INLINE_VISIBILITY
   1489 typename enable_if
   1490 <
   1491     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1492     _Tp
   1493 >::type
   1494 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1495 {
   1496     return __o->fetch_and(__op, __m);
   1497 }
   1498 
   1499 template <class _Tp>
   1500 inline _LIBCPP_INLINE_VISIBILITY
   1501 typename enable_if
   1502 <
   1503     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1504     _Tp
   1505 >::type
   1506 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1507 {
   1508     return __o->fetch_and(__op, __m);
   1509 }
   1510 
   1511 // atomic_fetch_or
   1512 
   1513 template <class _Tp>
   1514 inline _LIBCPP_INLINE_VISIBILITY
   1515 typename enable_if
   1516 <
   1517     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1518     _Tp
   1519 >::type
   1520 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1521 {
   1522     return __o->fetch_or(__op);
   1523 }
   1524 
   1525 template <class _Tp>
   1526 inline _LIBCPP_INLINE_VISIBILITY
   1527 typename enable_if
   1528 <
   1529     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1530     _Tp
   1531 >::type
   1532 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1533 {
   1534     return __o->fetch_or(__op);
   1535 }
   1536 
   1537 // atomic_fetch_or_explicit
   1538 
   1539 template <class _Tp>
   1540 inline _LIBCPP_INLINE_VISIBILITY
   1541 typename enable_if
   1542 <
   1543     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1544     _Tp
   1545 >::type
   1546 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1547 {
   1548     return __o->fetch_or(__op, __m);
   1549 }
   1550 
   1551 template <class _Tp>
   1552 inline _LIBCPP_INLINE_VISIBILITY
   1553 typename enable_if
   1554 <
   1555     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1556     _Tp
   1557 >::type
   1558 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1559 {
   1560     return __o->fetch_or(__op, __m);
   1561 }
   1562 
   1563 // atomic_fetch_xor
   1564 
   1565 template <class _Tp>
   1566 inline _LIBCPP_INLINE_VISIBILITY
   1567 typename enable_if
   1568 <
   1569     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1570     _Tp
   1571 >::type
   1572 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1573 {
   1574     return __o->fetch_xor(__op);
   1575 }
   1576 
   1577 template <class _Tp>
   1578 inline _LIBCPP_INLINE_VISIBILITY
   1579 typename enable_if
   1580 <
   1581     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1582     _Tp
   1583 >::type
   1584 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1585 {
   1586     return __o->fetch_xor(__op);
   1587 }
   1588 
   1589 // atomic_fetch_xor_explicit
   1590 
   1591 template <class _Tp>
   1592 inline _LIBCPP_INLINE_VISIBILITY
   1593 typename enable_if
   1594 <
   1595     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1596     _Tp
   1597 >::type
   1598 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1599 {
   1600     return __o->fetch_xor(__op, __m);
   1601 }
   1602 
   1603 template <class _Tp>
   1604 inline _LIBCPP_INLINE_VISIBILITY
   1605 typename enable_if
   1606 <
   1607     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1608     _Tp
   1609 >::type
   1610 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1611 {
   1612     return __o->fetch_xor(__op, __m);
   1613 }
   1614 
   1615 // flag type and operations
   1616 
   1617 typedef struct atomic_flag
   1618 {
   1619     _Atomic(bool) __a_;
   1620 
   1621     _LIBCPP_INLINE_VISIBILITY
   1622     bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1623         {return __c11_atomic_exchange(&__a_, true, __m);}
   1624     _LIBCPP_INLINE_VISIBILITY
   1625     bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1626         {return __c11_atomic_exchange(&__a_, true, __m);}
   1627     _LIBCPP_INLINE_VISIBILITY
   1628     void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1629         {__c11_atomic_store(&__a_, false, __m);}
   1630     _LIBCPP_INLINE_VISIBILITY
   1631     void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1632         {__c11_atomic_store(&__a_, false, __m);}
   1633 
   1634     _LIBCPP_INLINE_VISIBILITY
   1635 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1636     atomic_flag() _NOEXCEPT = default;
   1637 #else
   1638     atomic_flag() _NOEXCEPT : __a_() {}
   1639 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1640 
   1641     _LIBCPP_INLINE_VISIBILITY
   1642     atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
   1643 
   1644 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1645     atomic_flag(const atomic_flag&) = delete;
   1646     atomic_flag& operator=(const atomic_flag&) = delete;
   1647     atomic_flag& operator=(const atomic_flag&) volatile = delete;
   1648 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1649 private:
   1650     atomic_flag(const atomic_flag&);
   1651     atomic_flag& operator=(const atomic_flag&);
   1652     atomic_flag& operator=(const atomic_flag&) volatile;
   1653 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1654 } atomic_flag;
   1655 
   1656 inline _LIBCPP_INLINE_VISIBILITY
   1657 bool
   1658 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
   1659 {
   1660     return __o->test_and_set();
   1661 }
   1662 
   1663 inline _LIBCPP_INLINE_VISIBILITY
   1664 bool
   1665 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
   1666 {
   1667     return __o->test_and_set();
   1668 }
   1669 
   1670 inline _LIBCPP_INLINE_VISIBILITY
   1671 bool
   1672 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1673 {
   1674     return __o->test_and_set(__m);
   1675 }
   1676 
   1677 inline _LIBCPP_INLINE_VISIBILITY
   1678 bool
   1679 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1680 {
   1681     return __o->test_and_set(__m);
   1682 }
   1683 
   1684 inline _LIBCPP_INLINE_VISIBILITY
   1685 void
   1686 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
   1687 {
   1688     __o->clear();
   1689 }
   1690 
   1691 inline _LIBCPP_INLINE_VISIBILITY
   1692 void
   1693 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
   1694 {
   1695     __o->clear();
   1696 }
   1697 
   1698 inline _LIBCPP_INLINE_VISIBILITY
   1699 void
   1700 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1701 {
   1702     __o->clear(__m);
   1703 }
   1704 
   1705 inline _LIBCPP_INLINE_VISIBILITY
   1706 void
   1707 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1708 {
   1709     __o->clear(__m);
   1710 }
   1711 
   1712 // fences
   1713 
   1714 inline _LIBCPP_INLINE_VISIBILITY
   1715 void
   1716 atomic_thread_fence(memory_order __m) _NOEXCEPT
   1717 {
   1718     __c11_atomic_thread_fence(__m);
   1719 }
   1720 
   1721 inline _LIBCPP_INLINE_VISIBILITY
   1722 void
   1723 atomic_signal_fence(memory_order __m) _NOEXCEPT
   1724 {
   1725     __c11_atomic_signal_fence(__m);
   1726 }
   1727 
   1728 // Atomics for standard typedef types
   1729 
   1730 typedef atomic<bool>               atomic_bool;
   1731 typedef atomic<char>               atomic_char;
   1732 typedef atomic<signed char>        atomic_schar;
   1733 typedef atomic<unsigned char>      atomic_uchar;
   1734 typedef atomic<short>              atomic_short;
   1735 typedef atomic<unsigned short>     atomic_ushort;
   1736 typedef atomic<int>                atomic_int;
   1737 typedef atomic<unsigned int>       atomic_uint;
   1738 typedef atomic<long>               atomic_long;
   1739 typedef atomic<unsigned long>      atomic_ulong;
   1740 typedef atomic<long long>          atomic_llong;
   1741 typedef atomic<unsigned long long> atomic_ullong;
   1742 typedef atomic<char16_t>           atomic_char16_t;
   1743 typedef atomic<char32_t>           atomic_char32_t;
   1744 typedef atomic<wchar_t>            atomic_wchar_t;
   1745 
   1746 typedef atomic<int_least8_t>   atomic_int_least8_t;
   1747 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
   1748 typedef atomic<int_least16_t>  atomic_int_least16_t;
   1749 typedef atomic<uint_least16_t> atomic_uint_least16_t;
   1750 typedef atomic<int_least32_t>  atomic_int_least32_t;
   1751 typedef atomic<uint_least32_t> atomic_uint_least32_t;
   1752 typedef atomic<int_least64_t>  atomic_int_least64_t;
   1753 typedef atomic<uint_least64_t> atomic_uint_least64_t;
   1754 
   1755 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
   1756 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
   1757 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
   1758 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
   1759 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
   1760 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
   1761 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
   1762 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
   1763 
   1764 typedef atomic<intptr_t>  atomic_intptr_t;
   1765 typedef atomic<uintptr_t> atomic_uintptr_t;
   1766 typedef atomic<size_t>    atomic_size_t;
   1767 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
   1768 typedef atomic<intmax_t>  atomic_intmax_t;
   1769 typedef atomic<uintmax_t> atomic_uintmax_t;
   1770 
   1771 #define ATOMIC_FLAG_INIT {false}
   1772 #define ATOMIC_VAR_INIT(__v) {__v}
   1773 
   1774 // lock-free property
   1775 
   1776 #define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
   1777 #define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
   1778 #define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
   1779 #define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
   1780 #define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
   1781 #define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
   1782 #define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
   1783 #define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
   1784 #define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
   1785 #define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
   1786 
   1787 #endif  //  !__has_feature(cxx_atomic)
   1788 
   1789 _LIBCPP_END_NAMESPACE_STD
   1790 
   1791 #endif  // _LIBCPP_ATOMIC
   1792