Home | History | Annotate | Download | only in include
      1 // -*- C++ -*-
      2 //===--------------------------- atomic -----------------------------------===//
      3 //
      4 //                     The LLVM Compiler Infrastructure
      5 //
      6 // This file is distributed under the University of Illinois Open Source
      7 // License. See LICENSE.TXT for details.
      8 //
      9 //===----------------------------------------------------------------------===//
     10 
     11 #ifndef _LIBCPP_ATOMIC
     12 #define _LIBCPP_ATOMIC
     13 
     14 /*
     15     atomic synopsis
     16 
     17 namespace std
     18 {
     19 
     20 // order and consistency
     21 
     22 typedef enum memory_order
     23 {
     24     memory_order_relaxed,
     25     memory_order_consume,  // load-consume
     26     memory_order_acquire,  // load-acquire
     27     memory_order_release,  // store-release
     28     memory_order_acq_rel,  // store-release load-acquire
     29     memory_order_seq_cst   // store-release load-acquire
     30 } memory_order;
     31 
     32 template <class T> T kill_dependency(T y) noexcept;
     33 
     34 // lock-free property
     35 
     36 #define ATOMIC_BOOL_LOCK_FREE unspecified
     37 #define ATOMIC_CHAR_LOCK_FREE unspecified
     38 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
     39 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
     40 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
     41 #define ATOMIC_SHORT_LOCK_FREE unspecified
     42 #define ATOMIC_INT_LOCK_FREE unspecified
     43 #define ATOMIC_LONG_LOCK_FREE unspecified
     44 #define ATOMIC_LLONG_LOCK_FREE unspecified
     45 #define ATOMIC_POINTER_LOCK_FREE unspecified
     46 
     47 // flag type and operations
     48 
     49 typedef struct atomic_flag
     50 {
     51     bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
     52     bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
     53     void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
     54     void clear(memory_order m = memory_order_seq_cst) noexcept;
     55     atomic_flag()  noexcept = default;
     56     atomic_flag(const atomic_flag&) = delete;
     57     atomic_flag& operator=(const atomic_flag&) = delete;
     58     atomic_flag& operator=(const atomic_flag&) volatile = delete;
     59 } atomic_flag;
     60 
     61 bool
     62     atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
     63 
     64 bool
     65     atomic_flag_test_and_set(atomic_flag* obj) noexcept;
     66 
     67 bool
     68     atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
     69                                       memory_order m) noexcept;
     70 
     71 bool
     72     atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
     73 
     74 void
     75     atomic_flag_clear(volatile atomic_flag* obj) noexcept;
     76 
     77 void
     78     atomic_flag_clear(atomic_flag* obj) noexcept;
     79 
     80 void
     81     atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
     82 
     83 void
     84     atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
     85 
     86 #define ATOMIC_FLAG_INIT see below
     87 #define ATOMIC_VAR_INIT(value) see below
     88 
     89 template <class T>
     90 struct atomic
     91 {
     92     bool is_lock_free() const volatile noexcept;
     93     bool is_lock_free() const noexcept;
     94     void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
     95     void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
     96     T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
     97     T load(memory_order m = memory_order_seq_cst) const noexcept;
     98     operator T() const volatile noexcept;
     99     operator T() const noexcept;
    100     T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    101     T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
    102     bool compare_exchange_weak(T& expc, T desr,
    103                                memory_order s, memory_order f) volatile noexcept;
    104     bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
    105     bool compare_exchange_strong(T& expc, T desr,
    106                                  memory_order s, memory_order f) volatile noexcept;
    107     bool compare_exchange_strong(T& expc, T desr,
    108                                  memory_order s, memory_order f) noexcept;
    109     bool compare_exchange_weak(T& expc, T desr,
    110                                memory_order m = memory_order_seq_cst) volatile noexcept;
    111     bool compare_exchange_weak(T& expc, T desr,
    112                                memory_order m = memory_order_seq_cst) noexcept;
    113     bool compare_exchange_strong(T& expc, T desr,
    114                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    115     bool compare_exchange_strong(T& expc, T desr,
    116                                  memory_order m = memory_order_seq_cst) noexcept;
    117 
    118     atomic() noexcept = default;
    119     constexpr atomic(T desr) noexcept;
    120     atomic(const atomic&) = delete;
    121     atomic& operator=(const atomic&) = delete;
    122     atomic& operator=(const atomic&) volatile = delete;
    123     T operator=(T) volatile noexcept;
    124     T operator=(T) noexcept;
    125 };
    126 
    127 template <>
    128 struct atomic<integral>
    129 {
    130     bool is_lock_free() const volatile noexcept;
    131     bool is_lock_free() const noexcept;
    132     void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    133     void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    134     integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    135     integral load(memory_order m = memory_order_seq_cst) const noexcept;
    136     operator integral() const volatile noexcept;
    137     operator integral() const noexcept;
    138     integral exchange(integral desr,
    139                       memory_order m = memory_order_seq_cst) volatile noexcept;
    140     integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    141     bool compare_exchange_weak(integral& expc, integral desr,
    142                                memory_order s, memory_order f) volatile noexcept;
    143     bool compare_exchange_weak(integral& expc, integral desr,
    144                                memory_order s, memory_order f) noexcept;
    145     bool compare_exchange_strong(integral& expc, integral desr,
    146                                  memory_order s, memory_order f) volatile noexcept;
    147     bool compare_exchange_strong(integral& expc, integral desr,
    148                                  memory_order s, memory_order f) noexcept;
    149     bool compare_exchange_weak(integral& expc, integral desr,
    150                                memory_order m = memory_order_seq_cst) volatile noexcept;
    151     bool compare_exchange_weak(integral& expc, integral desr,
    152                                memory_order m = memory_order_seq_cst) noexcept;
    153     bool compare_exchange_strong(integral& expc, integral desr,
    154                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    155     bool compare_exchange_strong(integral& expc, integral desr,
    156                                  memory_order m = memory_order_seq_cst) noexcept;
    157 
    158     integral
    159         fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    160     integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
    161     integral
    162         fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    163     integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
    164     integral
    165         fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    166     integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
    167     integral
    168         fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    169     integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
    170     integral
    171         fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    172     integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
    173 
    174     atomic() noexcept = default;
    175     constexpr atomic(integral desr) noexcept;
    176     atomic(const atomic&) = delete;
    177     atomic& operator=(const atomic&) = delete;
    178     atomic& operator=(const atomic&) volatile = delete;
    179     integral operator=(integral desr) volatile noexcept;
    180     integral operator=(integral desr) noexcept;
    181 
    182     integral operator++(int) volatile noexcept;
    183     integral operator++(int) noexcept;
    184     integral operator--(int) volatile noexcept;
    185     integral operator--(int) noexcept;
    186     integral operator++() volatile noexcept;
    187     integral operator++() noexcept;
    188     integral operator--() volatile noexcept;
    189     integral operator--() noexcept;
    190     integral operator+=(integral op) volatile noexcept;
    191     integral operator+=(integral op) noexcept;
    192     integral operator-=(integral op) volatile noexcept;
    193     integral operator-=(integral op) noexcept;
    194     integral operator&=(integral op) volatile noexcept;
    195     integral operator&=(integral op) noexcept;
    196     integral operator|=(integral op) volatile noexcept;
    197     integral operator|=(integral op) noexcept;
    198     integral operator^=(integral op) volatile noexcept;
    199     integral operator^=(integral op) noexcept;
    200 };
    201 
    202 template <class T>
    203 struct atomic<T*>
    204 {
    205     bool is_lock_free() const volatile noexcept;
    206     bool is_lock_free() const noexcept;
    207     void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    208     void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    209     T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    210     T* load(memory_order m = memory_order_seq_cst) const noexcept;
    211     operator T*() const volatile noexcept;
    212     operator T*() const noexcept;
    213     T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    214     T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    215     bool compare_exchange_weak(T*& expc, T* desr,
    216                                memory_order s, memory_order f) volatile noexcept;
    217     bool compare_exchange_weak(T*& expc, T* desr,
    218                                memory_order s, memory_order f) noexcept;
    219     bool compare_exchange_strong(T*& expc, T* desr,
    220                                  memory_order s, memory_order f) volatile noexcept;
    221     bool compare_exchange_strong(T*& expc, T* desr,
    222                                  memory_order s, memory_order f) noexcept;
    223     bool compare_exchange_weak(T*& expc, T* desr,
    224                                memory_order m = memory_order_seq_cst) volatile noexcept;
    225     bool compare_exchange_weak(T*& expc, T* desr,
    226                                memory_order m = memory_order_seq_cst) noexcept;
    227     bool compare_exchange_strong(T*& expc, T* desr,
    228                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    229     bool compare_exchange_strong(T*& expc, T* desr,
    230                                  memory_order m = memory_order_seq_cst) noexcept;
    231     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    232     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    233     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    234     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    235 
    236     atomic() noexcept = default;
    237     constexpr atomic(T* desr) noexcept;
    238     atomic(const atomic&) = delete;
    239     atomic& operator=(const atomic&) = delete;
    240     atomic& operator=(const atomic&) volatile = delete;
    241 
    242     T* operator=(T*) volatile noexcept;
    243     T* operator=(T*) noexcept;
    244     T* operator++(int) volatile noexcept;
    245     T* operator++(int) noexcept;
    246     T* operator--(int) volatile noexcept;
    247     T* operator--(int) noexcept;
    248     T* operator++() volatile noexcept;
    249     T* operator++() noexcept;
    250     T* operator--() volatile noexcept;
    251     T* operator--() noexcept;
    252     T* operator+=(ptrdiff_t op) volatile noexcept;
    253     T* operator+=(ptrdiff_t op) noexcept;
    254     T* operator-=(ptrdiff_t op) volatile noexcept;
    255     T* operator-=(ptrdiff_t op) noexcept;
    256 };
    257 
    258 
    259 template <class T>
    260     bool
    261     atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
    262 
    263 template <class T>
    264     bool
    265     atomic_is_lock_free(const atomic<T>* obj) noexcept;
    266 
    267 template <class T>
    268     void
    269     atomic_init(volatile atomic<T>* obj, T desr) noexcept;
    270 
    271 template <class T>
    272     void
    273     atomic_init(atomic<T>* obj, T desr) noexcept;
    274 
    275 template <class T>
    276     void
    277     atomic_store(volatile atomic<T>* obj, T desr) noexcept;
    278 
    279 template <class T>
    280     void
    281     atomic_store(atomic<T>* obj, T desr) noexcept;
    282 
    283 template <class T>
    284     void
    285     atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    286 
    287 template <class T>
    288     void
    289     atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    290 
    291 template <class T>
    292     T
    293     atomic_load(const volatile atomic<T>* obj) noexcept;
    294 
    295 template <class T>
    296     T
    297     atomic_load(const atomic<T>* obj) noexcept;
    298 
    299 template <class T>
    300     T
    301     atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
    302 
    303 template <class T>
    304     T
    305     atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
    306 
    307 template <class T>
    308     T
    309     atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
    310 
    311 template <class T>
    312     T
    313     atomic_exchange(atomic<T>* obj, T desr) noexcept;
    314 
    315 template <class T>
    316     T
    317     atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    318 
    319 template <class T>
    320     T
    321     atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    322 
    323 template <class T>
    324     bool
    325     atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    326 
    327 template <class T>
    328     bool
    329     atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
    330 
    331 template <class T>
    332     bool
    333     atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    334 
    335 template <class T>
    336     bool
    337     atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
    338 
    339 template <class T>
    340     bool
    341     atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
    342                                           T desr,
    343                                           memory_order s, memory_order f) noexcept;
    344 
    345 template <class T>
    346     bool
    347     atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
    348                                           memory_order s, memory_order f) noexcept;
    349 
    350 template <class T>
    351     bool
    352     atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
    353                                             T* expc, T desr,
    354                                             memory_order s, memory_order f) noexcept;
    355 
    356 template <class T>
    357     bool
    358     atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
    359                                             T desr,
    360                                             memory_order s, memory_order f) noexcept;
    361 
    362 template <class Integral>
    363     Integral
    364     atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
    365 
    366 template <class Integral>
    367     Integral
    368     atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
    369 
    370 template <class Integral>
    371     Integral
    372     atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
    373                               memory_order m) noexcept;
    374 template <class Integral>
    375     Integral
    376     atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
    377                               memory_order m) noexcept;
    378 template <class Integral>
    379     Integral
    380     atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
    381 
    382 template <class Integral>
    383     Integral
    384     atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
    385 
    386 template <class Integral>
    387     Integral
    388     atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
    389                               memory_order m) noexcept;
    390 template <class Integral>
    391     Integral
    392     atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
    393                               memory_order m) noexcept;
    394 template <class Integral>
    395     Integral
    396     atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
    397 
    398 template <class Integral>
    399     Integral
    400     atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
    401 
    402 template <class Integral>
    403     Integral
    404     atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
    405                               memory_order m) noexcept;
    406 template <class Integral>
    407     Integral
    408     atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
    409                               memory_order m) noexcept;
    410 template <class Integral>
    411     Integral
    412     atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
    413 
    414 template <class Integral>
    415     Integral
    416     atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
    417 
    418 template <class Integral>
    419     Integral
    420     atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
    421                              memory_order m) noexcept;
    422 template <class Integral>
    423     Integral
    424     atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
    425                              memory_order m) noexcept;
    426 template <class Integral>
    427     Integral
    428     atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
    429 
    430 template <class Integral>
    431     Integral
    432     atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
    433 
    434 template <class Integral>
    435     Integral
    436     atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
    437                               memory_order m) noexcept;
    438 template <class Integral>
    439     Integral
    440     atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
    441                               memory_order m) noexcept;
    442 
    443 template <class T>
    444     T*
    445     atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    446 
    447 template <class T>
    448     T*
    449     atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
    450 
    451 template <class T>
    452     T*
    453     atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    454                               memory_order m) noexcept;
    455 template <class T>
    456     T*
    457     atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    458 
    459 template <class T>
    460     T*
    461     atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    462 
    463 template <class T>
    464     T*
    465     atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
    466 
    467 template <class T>
    468     T*
    469     atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    470                               memory_order m) noexcept;
    471 template <class T>
    472     T*
    473     atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    474 
    475 // Atomics for standard typedef types
    476 
    477 typedef atomic<bool>               atomic_bool;
    478 typedef atomic<char>               atomic_char;
    479 typedef atomic<signed char>        atomic_schar;
    480 typedef atomic<unsigned char>      atomic_uchar;
    481 typedef atomic<short>              atomic_short;
    482 typedef atomic<unsigned short>     atomic_ushort;
    483 typedef atomic<int>                atomic_int;
    484 typedef atomic<unsigned int>       atomic_uint;
    485 typedef atomic<long>               atomic_long;
    486 typedef atomic<unsigned long>      atomic_ulong;
    487 typedef atomic<long long>          atomic_llong;
    488 typedef atomic<unsigned long long> atomic_ullong;
    489 typedef atomic<char16_t>           atomic_char16_t;
    490 typedef atomic<char32_t>           atomic_char32_t;
    491 typedef atomic<wchar_t>            atomic_wchar_t;
    492 
    493 typedef atomic<int_least8_t>   atomic_int_least8_t;
    494 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
    495 typedef atomic<int_least16_t>  atomic_int_least16_t;
    496 typedef atomic<uint_least16_t> atomic_uint_least16_t;
    497 typedef atomic<int_least32_t>  atomic_int_least32_t;
    498 typedef atomic<uint_least32_t> atomic_uint_least32_t;
    499 typedef atomic<int_least64_t>  atomic_int_least64_t;
    500 typedef atomic<uint_least64_t> atomic_uint_least64_t;
    501 
    502 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
    503 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
    504 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
    505 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
    506 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
    507 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
    508 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
    509 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
    510 
    511 typedef atomic<intptr_t>  atomic_intptr_t;
    512 typedef atomic<uintptr_t> atomic_uintptr_t;
    513 typedef atomic<size_t>    atomic_size_t;
    514 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
    515 typedef atomic<intmax_t>  atomic_intmax_t;
    516 typedef atomic<uintmax_t> atomic_uintmax_t;
    517 
    518 // fences
    519 
    520 void atomic_thread_fence(memory_order m) noexcept;
    521 void atomic_signal_fence(memory_order m) noexcept;
    522 
    523 }  // std
    524 
    525 */
    526 
    527 #include <__config>
    528 #include <cstddef>
    529 #include <cstdint>
    530 #include <type_traits>
    531 
    532 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
    533 #pragma GCC system_header
    534 #endif
    535 
    536 _LIBCPP_BEGIN_NAMESPACE_STD
    537 
    538 #if !__has_feature(cxx_atomic) && _GNUC_VER < 407
    539 #error <atomic> is not implemented
    540 #else
    541 
    542 typedef enum memory_order
    543 {
    544     memory_order_relaxed, memory_order_consume, memory_order_acquire,
    545     memory_order_release, memory_order_acq_rel, memory_order_seq_cst
    546 } memory_order;
    547 
    548 #if _GNUC_VER >= 407
    549 namespace __gcc_atomic {
    550 template <typename T>
    551 struct __gcc_atomic_t {
    552   __gcc_atomic_t() _NOEXCEPT {}
    553   explicit __gcc_atomic_t(T value) _NOEXCEPT : __a_value(value) {}
    554   T __a_value;
    555 };
    556 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
    557 
    558 template <typename T> T __create();
    559 
    560 template <typename __Tp, typename __Td>
    561 typename enable_if<sizeof(__Tp()->__a_value = __create<__Td>()), char>::type
    562     __test_atomic_assignable(int);
    563 template <typename T, typename U>
    564 __two __test_atomic_assignable(...);
    565 
    566 template <typename __Tp, typename __Td>
    567 struct __can_assign {
    568   static const bool value =
    569       sizeof(__test_atomic_assignable<__Tp, __Td>(1)) == sizeof(char);
    570 };
    571 
    572 static inline constexpr int __to_gcc_order(memory_order __order) {
    573   // Avoid switch statement to make this a constexpr.
    574   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
    575          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
    576           (__order == memory_order_release ? __ATOMIC_RELEASE:
    577            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
    578             (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
    579               __ATOMIC_CONSUME))));
    580 }
    581 
    582 } // namespace __gcc_atomic
    583 
    584 template <typename _Tp>
    585 static inline
    586 typename enable_if<
    587     __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
    588 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    589   __a->__a_value = __val;
    590 }
    591 
    592 template <typename _Tp>
    593 static inline
    594 typename enable_if<
    595     !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
    596      __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
    597 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    598   // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
    599   // the default operator= in an object is not volatile, a byte-by-byte copy
    600   // is required.
    601   volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
    602   volatile char* end = to + sizeof(_Tp);
    603   char* from = reinterpret_cast<char*>(&__val);
    604   while (to != end) {
    605     *to++ = *from++;
    606   }
    607 }
    608 
    609 template <typename _Tp>
    610 static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
    611   __a->__a_value = __val;
    612 }
    613 
    614 static inline void __c11_atomic_thread_fence(memory_order __order) {
    615   __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
    616 }
    617 
    618 static inline void __c11_atomic_signal_fence(memory_order __order) {
    619   __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
    620 }
    621 
    622 static inline bool __c11_atomic_is_lock_free(size_t __size) {
    623   return __atomic_is_lock_free(__size, 0);
    624 }
    625 
    626 template <typename _Tp>
    627 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
    628                                       memory_order __order) {
    629   return __atomic_store(&__a->__a_value, &__val,
    630                         __gcc_atomic::__to_gcc_order(__order));
    631 }
    632 
    633 template <typename _Tp>
    634 static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
    635                                       memory_order __order) {
    636   return __atomic_store(&__a->__a_value, &__val,
    637                         __gcc_atomic::__to_gcc_order(__order));
    638 }
    639 
    640 template <typename _Tp>
    641 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
    642                                     memory_order __order) {
    643   _Tp __ret;
    644   __atomic_load(&__a->__a_value, &__ret,
    645                 __gcc_atomic::__to_gcc_order(__order));
    646   return __ret;
    647 }
    648 
    649 template <typename _Tp>
    650 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
    651   _Tp __ret;
    652   __atomic_load(&__a->__a_value, &__ret,
    653                 __gcc_atomic::__to_gcc_order(__order));
    654   return __ret;
    655 }
    656 
    657 template <typename _Tp>
    658 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
    659                                         _Tp __value, memory_order __order) {
    660   _Tp __ret;
    661   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    662                     __gcc_atomic::__to_gcc_order(__order));
    663   return __ret;
    664 }
    665 
    666 template <typename _Tp>
    667 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
    668                                         memory_order __order) {
    669   _Tp __ret;
    670   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    671                     __gcc_atomic::__to_gcc_order(__order));
    672   return __ret;
    673 }
    674 
    675 template <typename _Tp>
    676 static inline bool __c11_atomic_compare_exchange_strong(
    677     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    678     memory_order __success, memory_order __failure) {
    679   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    680                                    false,
    681                                    __gcc_atomic::__to_gcc_order(__success),
    682                                    __gcc_atomic::__to_gcc_order(__failure));
    683 }
    684 
    685 template <typename _Tp>
    686 static inline bool __c11_atomic_compare_exchange_strong(
    687     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    688     memory_order __failure) {
    689   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    690                                    false,
    691                                    __gcc_atomic::__to_gcc_order(__success),
    692                                    __gcc_atomic::__to_gcc_order(__failure));
    693 }
    694 
    695 template <typename _Tp>
    696 static inline bool __c11_atomic_compare_exchange_weak(
    697     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    698     memory_order __success, memory_order __failure) {
    699   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    700                                    true,
    701                                    __gcc_atomic::__to_gcc_order(__success),
    702                                    __gcc_atomic::__to_gcc_order(__failure));
    703 }
    704 
    705 template <typename _Tp>
    706 static inline bool __c11_atomic_compare_exchange_weak(
    707     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    708     memory_order __failure) {
    709   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    710                                    true,
    711                                    __gcc_atomic::__to_gcc_order(__success),
    712                                    __gcc_atomic::__to_gcc_order(__failure));
    713 }
    714 
    715 template <typename _Tp>
    716 struct __skip_amt { enum {value = 1}; };
    717 
    718 template <typename _Tp>
    719 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
    720 
    721 // FIXME: Haven't figured out what the spec says about using arrays with
    722 // atomic_fetch_add. Force a failure rather than creating bad behavior.
    723 template <typename _Tp>
    724 struct __skip_amt<_Tp[]> { };
    725 template <typename _Tp, int n>
    726 struct __skip_amt<_Tp[n]> { };
    727 
    728 template <typename _Tp, typename _Td>
    729 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
    730                                          _Td __delta, memory_order __order) {
    731   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    732                             __gcc_atomic::__to_gcc_order(__order));
    733 }
    734 
    735 template <typename _Tp, typename _Td>
    736 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
    737                                          memory_order __order) {
    738   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    739                             __gcc_atomic::__to_gcc_order(__order));
    740 }
    741 
    742 template <typename _Tp, typename _Td>
    743 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
    744                                          _Td __delta, memory_order __order) {
    745   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    746                             __gcc_atomic::__to_gcc_order(__order));
    747 }
    748 
    749 template <typename _Tp, typename _Td>
    750 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
    751                                          memory_order __order) {
    752   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    753                             __gcc_atomic::__to_gcc_order(__order));
    754 }
    755 
    756 template <typename _Tp>
    757 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
    758                                          _Tp __pattern, memory_order __order) {
    759   return __atomic_fetch_and(&__a->__a_value, __pattern,
    760                             __gcc_atomic::__to_gcc_order(__order));
    761 }
    762 
    763 template <typename _Tp>
    764 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
    765                                          _Tp __pattern, memory_order __order) {
    766   return __atomic_fetch_and(&__a->__a_value, __pattern,
    767                             __gcc_atomic::__to_gcc_order(__order));
    768 }
    769 
    770 template <typename _Tp>
    771 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
    772                                         _Tp __pattern, memory_order __order) {
    773   return __atomic_fetch_or(&__a->__a_value, __pattern,
    774                            __gcc_atomic::__to_gcc_order(__order));
    775 }
    776 
    777 template <typename _Tp>
    778 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
    779                                         memory_order __order) {
    780   return __atomic_fetch_or(&__a->__a_value, __pattern,
    781                            __gcc_atomic::__to_gcc_order(__order));
    782 }
    783 
    784 template <typename _Tp>
    785 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
    786                                          _Tp __pattern, memory_order __order) {
    787   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    788                             __gcc_atomic::__to_gcc_order(__order));
    789 }
    790 
    791 template <typename _Tp>
    792 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
    793                                          memory_order __order) {
    794   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    795                             __gcc_atomic::__to_gcc_order(__order));
    796 }
    797 #endif // _GNUC_VER >= 407
    798 
    799 template <class _Tp>
    800 inline _LIBCPP_INLINE_VISIBILITY
    801 _Tp
    802 kill_dependency(_Tp __y) _NOEXCEPT
    803 {
    804     return __y;
    805 }
    806 
    807 // general atomic<T>
    808 
    809 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
    810 struct __atomic_base  // false
    811 {
    812     mutable _Atomic(_Tp) __a_;
    813 
    814     _LIBCPP_INLINE_VISIBILITY
    815     bool is_lock_free() const volatile _NOEXCEPT
    816         {return __c11_atomic_is_lock_free(sizeof(_Tp));}
    817     _LIBCPP_INLINE_VISIBILITY
    818     bool is_lock_free() const _NOEXCEPT
    819         {return __c11_atomic_is_lock_free(sizeof(_Tp));}
    820     _LIBCPP_INLINE_VISIBILITY
    821     void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    822         {__c11_atomic_store(&__a_, __d, __m);}
    823     _LIBCPP_INLINE_VISIBILITY
    824     void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    825         {__c11_atomic_store(&__a_, __d, __m);}
    826     _LIBCPP_INLINE_VISIBILITY
    827     _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
    828         {return __c11_atomic_load(&__a_, __m);}
    829     _LIBCPP_INLINE_VISIBILITY
    830     _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
    831         {return __c11_atomic_load(&__a_, __m);}
    832     _LIBCPP_INLINE_VISIBILITY
    833     operator _Tp() const volatile _NOEXCEPT {return load();}
    834     _LIBCPP_INLINE_VISIBILITY
    835     operator _Tp() const _NOEXCEPT          {return load();}
    836     _LIBCPP_INLINE_VISIBILITY
    837     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    838         {return __c11_atomic_exchange(&__a_, __d, __m);}
    839     _LIBCPP_INLINE_VISIBILITY
    840     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    841         {return __c11_atomic_exchange(&__a_, __d, __m);}
    842     _LIBCPP_INLINE_VISIBILITY
    843     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    844                                memory_order __s, memory_order __f) volatile _NOEXCEPT
    845         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    846     _LIBCPP_INLINE_VISIBILITY
    847     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    848                                memory_order __s, memory_order __f) _NOEXCEPT
    849         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    850     _LIBCPP_INLINE_VISIBILITY
    851     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    852                                  memory_order __s, memory_order __f) volatile _NOEXCEPT
    853         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    854     _LIBCPP_INLINE_VISIBILITY
    855     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    856                                  memory_order __s, memory_order __f) _NOEXCEPT
    857         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    858     _LIBCPP_INLINE_VISIBILITY
    859     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    860                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    861         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    862     _LIBCPP_INLINE_VISIBILITY
    863     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    864                                memory_order __m = memory_order_seq_cst) _NOEXCEPT
    865         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    866     _LIBCPP_INLINE_VISIBILITY
    867     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    868                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    869         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    870     _LIBCPP_INLINE_VISIBILITY
    871     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    872                                  memory_order __m = memory_order_seq_cst) _NOEXCEPT
    873         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    874 
    875     _LIBCPP_INLINE_VISIBILITY
    876 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    877     __atomic_base() _NOEXCEPT = default;
    878 #else
    879     __atomic_base() _NOEXCEPT : __a_() {}
    880 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    881 
    882     _LIBCPP_INLINE_VISIBILITY
    883     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
    884 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    885     __atomic_base(const __atomic_base&) = delete;
    886     __atomic_base& operator=(const __atomic_base&) = delete;
    887     __atomic_base& operator=(const __atomic_base&) volatile = delete;
    888 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    889 private:
    890     __atomic_base(const __atomic_base&);
    891     __atomic_base& operator=(const __atomic_base&);
    892     __atomic_base& operator=(const __atomic_base&) volatile;
    893 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    894 };
    895 
    896 // atomic<Integral>
    897 
    898 template <class _Tp>
    899 struct __atomic_base<_Tp, true>
    900     : public __atomic_base<_Tp, false>
    901 {
    902     typedef __atomic_base<_Tp, false> __base;
    903     _LIBCPP_INLINE_VISIBILITY
    904     __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
    905     _LIBCPP_INLINE_VISIBILITY
    906     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
    907 
    908     _LIBCPP_INLINE_VISIBILITY
    909     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    910         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    911     _LIBCPP_INLINE_VISIBILITY
    912     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    913         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    914     _LIBCPP_INLINE_VISIBILITY
    915     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    916         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    917     _LIBCPP_INLINE_VISIBILITY
    918     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    919         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    920     _LIBCPP_INLINE_VISIBILITY
    921     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    922         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    923     _LIBCPP_INLINE_VISIBILITY
    924     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    925         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    926     _LIBCPP_INLINE_VISIBILITY
    927     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    928         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    929     _LIBCPP_INLINE_VISIBILITY
    930     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    931         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    932     _LIBCPP_INLINE_VISIBILITY
    933     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    934         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    935     _LIBCPP_INLINE_VISIBILITY
    936     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    937         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    938 
    939     _LIBCPP_INLINE_VISIBILITY
    940     _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
    941     _LIBCPP_INLINE_VISIBILITY
    942     _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
    943     _LIBCPP_INLINE_VISIBILITY
    944     _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
    945     _LIBCPP_INLINE_VISIBILITY
    946     _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
    947     _LIBCPP_INLINE_VISIBILITY
    948     _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
    949     _LIBCPP_INLINE_VISIBILITY
    950     _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
    951     _LIBCPP_INLINE_VISIBILITY
    952     _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
    953     _LIBCPP_INLINE_VISIBILITY
    954     _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
    955     _LIBCPP_INLINE_VISIBILITY
    956     _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
    957     _LIBCPP_INLINE_VISIBILITY
    958     _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
    959     _LIBCPP_INLINE_VISIBILITY
    960     _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
    961     _LIBCPP_INLINE_VISIBILITY
    962     _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
    963     _LIBCPP_INLINE_VISIBILITY
    964     _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
    965     _LIBCPP_INLINE_VISIBILITY
    966     _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
    967     _LIBCPP_INLINE_VISIBILITY
    968     _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
    969     _LIBCPP_INLINE_VISIBILITY
    970     _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
    971     _LIBCPP_INLINE_VISIBILITY
    972     _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
    973     _LIBCPP_INLINE_VISIBILITY
    974     _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
    975 };
    976 
    977 // atomic<T>
    978 
    979 template <class _Tp>
    980 struct atomic
    981     : public __atomic_base<_Tp>
    982 {
    983     typedef __atomic_base<_Tp> __base;
    984     _LIBCPP_INLINE_VISIBILITY
    985     atomic() _NOEXCEPT _LIBCPP_DEFAULT
    986     _LIBCPP_INLINE_VISIBILITY
    987     _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
    988 
    989     _LIBCPP_INLINE_VISIBILITY
    990     _Tp operator=(_Tp __d) volatile _NOEXCEPT
    991         {__base::store(__d); return __d;}
    992     _LIBCPP_INLINE_VISIBILITY
    993     _Tp operator=(_Tp __d) _NOEXCEPT
    994         {__base::store(__d); return __d;}
    995 };
    996 
    997 // atomic<T*>
    998 
    999 template <class _Tp>
   1000 struct atomic<_Tp*>
   1001     : public __atomic_base<_Tp*>
   1002 {
   1003     typedef __atomic_base<_Tp*> __base;
   1004     _LIBCPP_INLINE_VISIBILITY
   1005     atomic() _NOEXCEPT _LIBCPP_DEFAULT
   1006     _LIBCPP_INLINE_VISIBILITY
   1007     _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
   1008 
   1009     _LIBCPP_INLINE_VISIBILITY
   1010     _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
   1011         {__base::store(__d); return __d;}
   1012     _LIBCPP_INLINE_VISIBILITY
   1013     _Tp* operator=(_Tp* __d) _NOEXCEPT
   1014         {__base::store(__d); return __d;}
   1015 
   1016     _LIBCPP_INLINE_VISIBILITY
   1017     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1018                                                                         volatile _NOEXCEPT
   1019         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1020     _LIBCPP_INLINE_VISIBILITY
   1021     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1022         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1023     _LIBCPP_INLINE_VISIBILITY
   1024     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1025                                                                         volatile _NOEXCEPT
   1026         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1027     _LIBCPP_INLINE_VISIBILITY
   1028     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1029         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1030 
   1031     _LIBCPP_INLINE_VISIBILITY
   1032     _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
   1033     _LIBCPP_INLINE_VISIBILITY
   1034     _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
   1035     _LIBCPP_INLINE_VISIBILITY
   1036     _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
   1037     _LIBCPP_INLINE_VISIBILITY
   1038     _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
   1039     _LIBCPP_INLINE_VISIBILITY
   1040     _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
   1041     _LIBCPP_INLINE_VISIBILITY
   1042     _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
   1043     _LIBCPP_INLINE_VISIBILITY
   1044     _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
   1045     _LIBCPP_INLINE_VISIBILITY
   1046     _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
   1047     _LIBCPP_INLINE_VISIBILITY
   1048     _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
   1049     _LIBCPP_INLINE_VISIBILITY
   1050     _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
   1051     _LIBCPP_INLINE_VISIBILITY
   1052     _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
   1053     _LIBCPP_INLINE_VISIBILITY
   1054     _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
   1055 };
   1056 
   1057 // atomic_is_lock_free
   1058 
   1059 template <class _Tp>
   1060 inline _LIBCPP_INLINE_VISIBILITY
   1061 bool
   1062 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1063 {
   1064     return __o->is_lock_free();
   1065 }
   1066 
   1067 template <class _Tp>
   1068 inline _LIBCPP_INLINE_VISIBILITY
   1069 bool
   1070 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
   1071 {
   1072     return __o->is_lock_free();
   1073 }
   1074 
   1075 // atomic_init
   1076 
   1077 template <class _Tp>
   1078 inline _LIBCPP_INLINE_VISIBILITY
   1079 void
   1080 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1081 {
   1082     __c11_atomic_init(&__o->__a_, __d);
   1083 }
   1084 
   1085 template <class _Tp>
   1086 inline _LIBCPP_INLINE_VISIBILITY
   1087 void
   1088 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1089 {
   1090     __c11_atomic_init(&__o->__a_, __d);
   1091 }
   1092 
   1093 // atomic_store
   1094 
   1095 template <class _Tp>
   1096 inline _LIBCPP_INLINE_VISIBILITY
   1097 void
   1098 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1099 {
   1100     __o->store(__d);
   1101 }
   1102 
   1103 template <class _Tp>
   1104 inline _LIBCPP_INLINE_VISIBILITY
   1105 void
   1106 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1107 {
   1108     __o->store(__d);
   1109 }
   1110 
   1111 // atomic_store_explicit
   1112 
   1113 template <class _Tp>
   1114 inline _LIBCPP_INLINE_VISIBILITY
   1115 void
   1116 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1117 {
   1118     __o->store(__d, __m);
   1119 }
   1120 
   1121 template <class _Tp>
   1122 inline _LIBCPP_INLINE_VISIBILITY
   1123 void
   1124 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1125 {
   1126     __o->store(__d, __m);
   1127 }
   1128 
   1129 // atomic_load
   1130 
   1131 template <class _Tp>
   1132 inline _LIBCPP_INLINE_VISIBILITY
   1133 _Tp
   1134 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1135 {
   1136     return __o->load();
   1137 }
   1138 
   1139 template <class _Tp>
   1140 inline _LIBCPP_INLINE_VISIBILITY
   1141 _Tp
   1142 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
   1143 {
   1144     return __o->load();
   1145 }
   1146 
   1147 // atomic_load_explicit
   1148 
   1149 template <class _Tp>
   1150 inline _LIBCPP_INLINE_VISIBILITY
   1151 _Tp
   1152 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1153 {
   1154     return __o->load(__m);
   1155 }
   1156 
   1157 template <class _Tp>
   1158 inline _LIBCPP_INLINE_VISIBILITY
   1159 _Tp
   1160 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1161 {
   1162     return __o->load(__m);
   1163 }
   1164 
   1165 // atomic_exchange
   1166 
   1167 template <class _Tp>
   1168 inline _LIBCPP_INLINE_VISIBILITY
   1169 _Tp
   1170 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1171 {
   1172     return __o->exchange(__d);
   1173 }
   1174 
   1175 template <class _Tp>
   1176 inline _LIBCPP_INLINE_VISIBILITY
   1177 _Tp
   1178 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1179 {
   1180     return __o->exchange(__d);
   1181 }
   1182 
   1183 // atomic_exchange_explicit
   1184 
   1185 template <class _Tp>
   1186 inline _LIBCPP_INLINE_VISIBILITY
   1187 _Tp
   1188 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1189 {
   1190     return __o->exchange(__d, __m);
   1191 }
   1192 
   1193 template <class _Tp>
   1194 inline _LIBCPP_INLINE_VISIBILITY
   1195 _Tp
   1196 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1197 {
   1198     return __o->exchange(__d, __m);
   1199 }
   1200 
   1201 // atomic_compare_exchange_weak
   1202 
   1203 template <class _Tp>
   1204 inline _LIBCPP_INLINE_VISIBILITY
   1205 bool
   1206 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1207 {
   1208     return __o->compare_exchange_weak(*__e, __d);
   1209 }
   1210 
   1211 template <class _Tp>
   1212 inline _LIBCPP_INLINE_VISIBILITY
   1213 bool
   1214 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1215 {
   1216     return __o->compare_exchange_weak(*__e, __d);
   1217 }
   1218 
   1219 // atomic_compare_exchange_strong
   1220 
   1221 template <class _Tp>
   1222 inline _LIBCPP_INLINE_VISIBILITY
   1223 bool
   1224 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1225 {
   1226     return __o->compare_exchange_strong(*__e, __d);
   1227 }
   1228 
   1229 template <class _Tp>
   1230 inline _LIBCPP_INLINE_VISIBILITY
   1231 bool
   1232 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1233 {
   1234     return __o->compare_exchange_strong(*__e, __d);
   1235 }
   1236 
   1237 // atomic_compare_exchange_weak_explicit
   1238 
   1239 template <class _Tp>
   1240 inline _LIBCPP_INLINE_VISIBILITY
   1241 bool
   1242 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
   1243                                       _Tp __d,
   1244                                       memory_order __s, memory_order __f) _NOEXCEPT
   1245 {
   1246     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1247 }
   1248 
   1249 template <class _Tp>
   1250 inline _LIBCPP_INLINE_VISIBILITY
   1251 bool
   1252 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
   1253                                       memory_order __s, memory_order __f) _NOEXCEPT
   1254 {
   1255     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1256 }
   1257 
   1258 // atomic_compare_exchange_strong_explicit
   1259 
   1260 template <class _Tp>
   1261 inline _LIBCPP_INLINE_VISIBILITY
   1262 bool
   1263 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
   1264                                         _Tp* __e, _Tp __d,
   1265                                         memory_order __s, memory_order __f) _NOEXCEPT
   1266 {
   1267     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1268 }
   1269 
   1270 template <class _Tp>
   1271 inline _LIBCPP_INLINE_VISIBILITY
   1272 bool
   1273 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
   1274                                         _Tp __d,
   1275                                         memory_order __s, memory_order __f) _NOEXCEPT
   1276 {
   1277     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1278 }
   1279 
   1280 // atomic_fetch_add
   1281 
   1282 template <class _Tp>
   1283 inline _LIBCPP_INLINE_VISIBILITY
   1284 typename enable_if
   1285 <
   1286     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1287     _Tp
   1288 >::type
   1289 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1290 {
   1291     return __o->fetch_add(__op);
   1292 }
   1293 
   1294 template <class _Tp>
   1295 inline _LIBCPP_INLINE_VISIBILITY
   1296 typename enable_if
   1297 <
   1298     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1299     _Tp
   1300 >::type
   1301 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1302 {
   1303     return __o->fetch_add(__op);
   1304 }
   1305 
   1306 template <class _Tp>
   1307 inline _LIBCPP_INLINE_VISIBILITY
   1308 _Tp*
   1309 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1310 {
   1311     return __o->fetch_add(__op);
   1312 }
   1313 
   1314 template <class _Tp>
   1315 inline _LIBCPP_INLINE_VISIBILITY
   1316 _Tp*
   1317 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1318 {
   1319     return __o->fetch_add(__op);
   1320 }
   1321 
   1322 // atomic_fetch_add_explicit
   1323 
   1324 template <class _Tp>
   1325 inline _LIBCPP_INLINE_VISIBILITY
   1326 typename enable_if
   1327 <
   1328     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1329     _Tp
   1330 >::type
   1331 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1332 {
   1333     return __o->fetch_add(__op, __m);
   1334 }
   1335 
   1336 template <class _Tp>
   1337 inline _LIBCPP_INLINE_VISIBILITY
   1338 typename enable_if
   1339 <
   1340     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1341     _Tp
   1342 >::type
   1343 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1344 {
   1345     return __o->fetch_add(__op, __m);
   1346 }
   1347 
   1348 template <class _Tp>
   1349 inline _LIBCPP_INLINE_VISIBILITY
   1350 _Tp*
   1351 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1352                           memory_order __m) _NOEXCEPT
   1353 {
   1354     return __o->fetch_add(__op, __m);
   1355 }
   1356 
   1357 template <class _Tp>
   1358 inline _LIBCPP_INLINE_VISIBILITY
   1359 _Tp*
   1360 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1361 {
   1362     return __o->fetch_add(__op, __m);
   1363 }
   1364 
   1365 // atomic_fetch_sub
   1366 
   1367 template <class _Tp>
   1368 inline _LIBCPP_INLINE_VISIBILITY
   1369 typename enable_if
   1370 <
   1371     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1372     _Tp
   1373 >::type
   1374 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1375 {
   1376     return __o->fetch_sub(__op);
   1377 }
   1378 
   1379 template <class _Tp>
   1380 inline _LIBCPP_INLINE_VISIBILITY
   1381 typename enable_if
   1382 <
   1383     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1384     _Tp
   1385 >::type
   1386 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1387 {
   1388     return __o->fetch_sub(__op);
   1389 }
   1390 
   1391 template <class _Tp>
   1392 inline _LIBCPP_INLINE_VISIBILITY
   1393 _Tp*
   1394 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1395 {
   1396     return __o->fetch_sub(__op);
   1397 }
   1398 
   1399 template <class _Tp>
   1400 inline _LIBCPP_INLINE_VISIBILITY
   1401 _Tp*
   1402 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1403 {
   1404     return __o->fetch_sub(__op);
   1405 }
   1406 
   1407 // atomic_fetch_sub_explicit
   1408 
   1409 template <class _Tp>
   1410 inline _LIBCPP_INLINE_VISIBILITY
   1411 typename enable_if
   1412 <
   1413     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1414     _Tp
   1415 >::type
   1416 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1417 {
   1418     return __o->fetch_sub(__op, __m);
   1419 }
   1420 
   1421 template <class _Tp>
   1422 inline _LIBCPP_INLINE_VISIBILITY
   1423 typename enable_if
   1424 <
   1425     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1426     _Tp
   1427 >::type
   1428 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1429 {
   1430     return __o->fetch_sub(__op, __m);
   1431 }
   1432 
   1433 template <class _Tp>
   1434 inline _LIBCPP_INLINE_VISIBILITY
   1435 _Tp*
   1436 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1437                           memory_order __m) _NOEXCEPT
   1438 {
   1439     return __o->fetch_sub(__op, __m);
   1440 }
   1441 
   1442 template <class _Tp>
   1443 inline _LIBCPP_INLINE_VISIBILITY
   1444 _Tp*
   1445 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1446 {
   1447     return __o->fetch_sub(__op, __m);
   1448 }
   1449 
   1450 // atomic_fetch_and
   1451 
   1452 template <class _Tp>
   1453 inline _LIBCPP_INLINE_VISIBILITY
   1454 typename enable_if
   1455 <
   1456     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1457     _Tp
   1458 >::type
   1459 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1460 {
   1461     return __o->fetch_and(__op);
   1462 }
   1463 
   1464 template <class _Tp>
   1465 inline _LIBCPP_INLINE_VISIBILITY
   1466 typename enable_if
   1467 <
   1468     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1469     _Tp
   1470 >::type
   1471 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1472 {
   1473     return __o->fetch_and(__op);
   1474 }
   1475 
   1476 // atomic_fetch_and_explicit
   1477 
   1478 template <class _Tp>
   1479 inline _LIBCPP_INLINE_VISIBILITY
   1480 typename enable_if
   1481 <
   1482     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1483     _Tp
   1484 >::type
   1485 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1486 {
   1487     return __o->fetch_and(__op, __m);
   1488 }
   1489 
   1490 template <class _Tp>
   1491 inline _LIBCPP_INLINE_VISIBILITY
   1492 typename enable_if
   1493 <
   1494     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1495     _Tp
   1496 >::type
   1497 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1498 {
   1499     return __o->fetch_and(__op, __m);
   1500 }
   1501 
   1502 // atomic_fetch_or
   1503 
   1504 template <class _Tp>
   1505 inline _LIBCPP_INLINE_VISIBILITY
   1506 typename enable_if
   1507 <
   1508     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1509     _Tp
   1510 >::type
   1511 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1512 {
   1513     return __o->fetch_or(__op);
   1514 }
   1515 
   1516 template <class _Tp>
   1517 inline _LIBCPP_INLINE_VISIBILITY
   1518 typename enable_if
   1519 <
   1520     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1521     _Tp
   1522 >::type
   1523 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1524 {
   1525     return __o->fetch_or(__op);
   1526 }
   1527 
   1528 // atomic_fetch_or_explicit
   1529 
   1530 template <class _Tp>
   1531 inline _LIBCPP_INLINE_VISIBILITY
   1532 typename enable_if
   1533 <
   1534     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1535     _Tp
   1536 >::type
   1537 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1538 {
   1539     return __o->fetch_or(__op, __m);
   1540 }
   1541 
   1542 template <class _Tp>
   1543 inline _LIBCPP_INLINE_VISIBILITY
   1544 typename enable_if
   1545 <
   1546     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1547     _Tp
   1548 >::type
   1549 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1550 {
   1551     return __o->fetch_or(__op, __m);
   1552 }
   1553 
   1554 // atomic_fetch_xor
   1555 
   1556 template <class _Tp>
   1557 inline _LIBCPP_INLINE_VISIBILITY
   1558 typename enable_if
   1559 <
   1560     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1561     _Tp
   1562 >::type
   1563 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1564 {
   1565     return __o->fetch_xor(__op);
   1566 }
   1567 
   1568 template <class _Tp>
   1569 inline _LIBCPP_INLINE_VISIBILITY
   1570 typename enable_if
   1571 <
   1572     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1573     _Tp
   1574 >::type
   1575 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1576 {
   1577     return __o->fetch_xor(__op);
   1578 }
   1579 
   1580 // atomic_fetch_xor_explicit
   1581 
   1582 template <class _Tp>
   1583 inline _LIBCPP_INLINE_VISIBILITY
   1584 typename enable_if
   1585 <
   1586     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1587     _Tp
   1588 >::type
   1589 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1590 {
   1591     return __o->fetch_xor(__op, __m);
   1592 }
   1593 
   1594 template <class _Tp>
   1595 inline _LIBCPP_INLINE_VISIBILITY
   1596 typename enable_if
   1597 <
   1598     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1599     _Tp
   1600 >::type
   1601 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1602 {
   1603     return __o->fetch_xor(__op, __m);
   1604 }
   1605 
   1606 // flag type and operations
   1607 
   1608 typedef struct atomic_flag
   1609 {
   1610     _Atomic(bool) __a_;
   1611 
   1612     _LIBCPP_INLINE_VISIBILITY
   1613     bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1614         {return __c11_atomic_exchange(&__a_, true, __m);}
   1615     _LIBCPP_INLINE_VISIBILITY
   1616     bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1617         {return __c11_atomic_exchange(&__a_, true, __m);}
   1618     _LIBCPP_INLINE_VISIBILITY
   1619     void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1620         {__c11_atomic_store(&__a_, false, __m);}
   1621     _LIBCPP_INLINE_VISIBILITY
   1622     void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1623         {__c11_atomic_store(&__a_, false, __m);}
   1624 
   1625     _LIBCPP_INLINE_VISIBILITY
   1626 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1627     atomic_flag() _NOEXCEPT = default;
   1628 #else
   1629     atomic_flag() _NOEXCEPT : __a_() {}
   1630 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1631 
   1632     _LIBCPP_INLINE_VISIBILITY
   1633     atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
   1634 
   1635 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1636     atomic_flag(const atomic_flag&) = delete;
   1637     atomic_flag& operator=(const atomic_flag&) = delete;
   1638     atomic_flag& operator=(const atomic_flag&) volatile = delete;
   1639 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1640 private:
   1641     atomic_flag(const atomic_flag&);
   1642     atomic_flag& operator=(const atomic_flag&);
   1643     atomic_flag& operator=(const atomic_flag&) volatile;
   1644 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1645 } atomic_flag;
   1646 
   1647 inline _LIBCPP_INLINE_VISIBILITY
   1648 bool
   1649 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
   1650 {
   1651     return __o->test_and_set();
   1652 }
   1653 
   1654 inline _LIBCPP_INLINE_VISIBILITY
   1655 bool
   1656 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
   1657 {
   1658     return __o->test_and_set();
   1659 }
   1660 
   1661 inline _LIBCPP_INLINE_VISIBILITY
   1662 bool
   1663 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1664 {
   1665     return __o->test_and_set(__m);
   1666 }
   1667 
   1668 inline _LIBCPP_INLINE_VISIBILITY
   1669 bool
   1670 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1671 {
   1672     return __o->test_and_set(__m);
   1673 }
   1674 
   1675 inline _LIBCPP_INLINE_VISIBILITY
   1676 void
   1677 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
   1678 {
   1679     __o->clear();
   1680 }
   1681 
   1682 inline _LIBCPP_INLINE_VISIBILITY
   1683 void
   1684 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
   1685 {
   1686     __o->clear();
   1687 }
   1688 
   1689 inline _LIBCPP_INLINE_VISIBILITY
   1690 void
   1691 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1692 {
   1693     __o->clear(__m);
   1694 }
   1695 
   1696 inline _LIBCPP_INLINE_VISIBILITY
   1697 void
   1698 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1699 {
   1700     __o->clear(__m);
   1701 }
   1702 
   1703 // fences
   1704 
   1705 inline _LIBCPP_INLINE_VISIBILITY
   1706 void
   1707 atomic_thread_fence(memory_order __m) _NOEXCEPT
   1708 {
   1709     __c11_atomic_thread_fence(__m);
   1710 }
   1711 
   1712 inline _LIBCPP_INLINE_VISIBILITY
   1713 void
   1714 atomic_signal_fence(memory_order __m) _NOEXCEPT
   1715 {
   1716     __c11_atomic_signal_fence(__m);
   1717 }
   1718 
   1719 // Atomics for standard typedef types
   1720 
   1721 typedef atomic<bool>               atomic_bool;
   1722 typedef atomic<char>               atomic_char;
   1723 typedef atomic<signed char>        atomic_schar;
   1724 typedef atomic<unsigned char>      atomic_uchar;
   1725 typedef atomic<short>              atomic_short;
   1726 typedef atomic<unsigned short>     atomic_ushort;
   1727 typedef atomic<int>                atomic_int;
   1728 typedef atomic<unsigned int>       atomic_uint;
   1729 typedef atomic<long>               atomic_long;
   1730 typedef atomic<unsigned long>      atomic_ulong;
   1731 typedef atomic<long long>          atomic_llong;
   1732 typedef atomic<unsigned long long> atomic_ullong;
   1733 typedef atomic<char16_t>           atomic_char16_t;
   1734 typedef atomic<char32_t>           atomic_char32_t;
   1735 typedef atomic<wchar_t>            atomic_wchar_t;
   1736 
   1737 typedef atomic<int_least8_t>   atomic_int_least8_t;
   1738 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
   1739 typedef atomic<int_least16_t>  atomic_int_least16_t;
   1740 typedef atomic<uint_least16_t> atomic_uint_least16_t;
   1741 typedef atomic<int_least32_t>  atomic_int_least32_t;
   1742 typedef atomic<uint_least32_t> atomic_uint_least32_t;
   1743 typedef atomic<int_least64_t>  atomic_int_least64_t;
   1744 typedef atomic<uint_least64_t> atomic_uint_least64_t;
   1745 
   1746 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
   1747 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
   1748 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
   1749 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
   1750 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
   1751 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
   1752 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
   1753 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
   1754 
   1755 typedef atomic<intptr_t>  atomic_intptr_t;
   1756 typedef atomic<uintptr_t> atomic_uintptr_t;
   1757 typedef atomic<size_t>    atomic_size_t;
   1758 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
   1759 typedef atomic<intmax_t>  atomic_intmax_t;
   1760 typedef atomic<uintmax_t> atomic_uintmax_t;
   1761 
   1762 #define ATOMIC_FLAG_INIT {false}
   1763 #define ATOMIC_VAR_INIT(__v) {__v}
   1764 
   1765 // lock-free property
   1766 
   1767 #define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
   1768 #define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
   1769 #define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
   1770 #define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
   1771 #define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
   1772 #define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
   1773 #define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
   1774 #define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
   1775 #define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
   1776 #define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
   1777 
   1778 #endif  //  !__has_feature(cxx_atomic)
   1779 
   1780 _LIBCPP_END_NAMESPACE_STD
   1781 
   1782 #endif  // _LIBCPP_ATOMIC
   1783