Home | History | Annotate | Download | only in include
      1 // -*- C++ -*-
      2 //===--------------------------- atomic -----------------------------------===//
      3 //
      4 //                     The LLVM Compiler Infrastructure
      5 //
      6 // This file is distributed under the University of Illinois Open Source
      7 // License. See LICENSE.TXT for details.
      8 //
      9 //===----------------------------------------------------------------------===//
     10 
     11 #ifndef _LIBCPP_ATOMIC
     12 #define _LIBCPP_ATOMIC
     13 
     14 /*
     15     atomic synopsis
     16 
     17 namespace std
     18 {
     19 
     20 // feature test macro
     21 
     22 #define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
     23 
     24 // order and consistency
     25 
     26 typedef enum memory_order
     27 {
     28     memory_order_relaxed,
     29     memory_order_consume,  // load-consume
     30     memory_order_acquire,  // load-acquire
     31     memory_order_release,  // store-release
     32     memory_order_acq_rel,  // store-release load-acquire
     33     memory_order_seq_cst   // store-release load-acquire
     34 } memory_order;
     35 
     36 template <class T> T kill_dependency(T y) noexcept;
     37 
     38 // lock-free property
     39 
     40 #define ATOMIC_BOOL_LOCK_FREE unspecified
     41 #define ATOMIC_CHAR_LOCK_FREE unspecified
     42 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
     43 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
     44 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
     45 #define ATOMIC_SHORT_LOCK_FREE unspecified
     46 #define ATOMIC_INT_LOCK_FREE unspecified
     47 #define ATOMIC_LONG_LOCK_FREE unspecified
     48 #define ATOMIC_LLONG_LOCK_FREE unspecified
     49 #define ATOMIC_POINTER_LOCK_FREE unspecified
     50 
     51 // flag type and operations
     52 
     53 typedef struct atomic_flag
     54 {
     55     bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
     56     bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
     57     void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
     58     void clear(memory_order m = memory_order_seq_cst) noexcept;
     59     atomic_flag()  noexcept = default;
     60     atomic_flag(const atomic_flag&) = delete;
     61     atomic_flag& operator=(const atomic_flag&) = delete;
     62     atomic_flag& operator=(const atomic_flag&) volatile = delete;
     63 } atomic_flag;
     64 
     65 bool
     66     atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
     67 
     68 bool
     69     atomic_flag_test_and_set(atomic_flag* obj) noexcept;
     70 
     71 bool
     72     atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
     73                                       memory_order m) noexcept;
     74 
     75 bool
     76     atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
     77 
     78 void
     79     atomic_flag_clear(volatile atomic_flag* obj) noexcept;
     80 
     81 void
     82     atomic_flag_clear(atomic_flag* obj) noexcept;
     83 
     84 void
     85     atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
     86 
     87 void
     88     atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
     89 
     90 #define ATOMIC_FLAG_INIT see below
     91 #define ATOMIC_VAR_INIT(value) see below
     92 
     93 template <class T>
     94 struct atomic
     95 {
     96     static constexpr bool is_always_lock_free;
     97     bool is_lock_free() const volatile noexcept;
     98     bool is_lock_free() const noexcept;
     99     void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    100     void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
    101     T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    102     T load(memory_order m = memory_order_seq_cst) const noexcept;
    103     operator T() const volatile noexcept;
    104     operator T() const noexcept;
    105     T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    106     T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
    107     bool compare_exchange_weak(T& expc, T desr,
    108                                memory_order s, memory_order f) volatile noexcept;
    109     bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
    110     bool compare_exchange_strong(T& expc, T desr,
    111                                  memory_order s, memory_order f) volatile noexcept;
    112     bool compare_exchange_strong(T& expc, T desr,
    113                                  memory_order s, memory_order f) noexcept;
    114     bool compare_exchange_weak(T& expc, T desr,
    115                                memory_order m = memory_order_seq_cst) volatile noexcept;
    116     bool compare_exchange_weak(T& expc, T desr,
    117                                memory_order m = memory_order_seq_cst) noexcept;
    118     bool compare_exchange_strong(T& expc, T desr,
    119                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    120     bool compare_exchange_strong(T& expc, T desr,
    121                                  memory_order m = memory_order_seq_cst) noexcept;
    122 
    123     atomic() noexcept = default;
    124     constexpr atomic(T desr) noexcept;
    125     atomic(const atomic&) = delete;
    126     atomic& operator=(const atomic&) = delete;
    127     atomic& operator=(const atomic&) volatile = delete;
    128     T operator=(T) volatile noexcept;
    129     T operator=(T) noexcept;
    130 };
    131 
    132 template <>
    133 struct atomic<integral>
    134 {
    135     static constexpr bool is_always_lock_free;
    136     bool is_lock_free() const volatile noexcept;
    137     bool is_lock_free() const noexcept;
    138     void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    139     void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    140     integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    141     integral load(memory_order m = memory_order_seq_cst) const noexcept;
    142     operator integral() const volatile noexcept;
    143     operator integral() const noexcept;
    144     integral exchange(integral desr,
    145                       memory_order m = memory_order_seq_cst) volatile noexcept;
    146     integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
    147     bool compare_exchange_weak(integral& expc, integral desr,
    148                                memory_order s, memory_order f) volatile noexcept;
    149     bool compare_exchange_weak(integral& expc, integral desr,
    150                                memory_order s, memory_order f) noexcept;
    151     bool compare_exchange_strong(integral& expc, integral desr,
    152                                  memory_order s, memory_order f) volatile noexcept;
    153     bool compare_exchange_strong(integral& expc, integral desr,
    154                                  memory_order s, memory_order f) noexcept;
    155     bool compare_exchange_weak(integral& expc, integral desr,
    156                                memory_order m = memory_order_seq_cst) volatile noexcept;
    157     bool compare_exchange_weak(integral& expc, integral desr,
    158                                memory_order m = memory_order_seq_cst) noexcept;
    159     bool compare_exchange_strong(integral& expc, integral desr,
    160                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    161     bool compare_exchange_strong(integral& expc, integral desr,
    162                                  memory_order m = memory_order_seq_cst) noexcept;
    163 
    164     integral
    165         fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    166     integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
    167     integral
    168         fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    169     integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
    170     integral
    171         fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    172     integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
    173     integral
    174         fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    175     integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
    176     integral
    177         fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
    178     integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
    179 
    180     atomic() noexcept = default;
    181     constexpr atomic(integral desr) noexcept;
    182     atomic(const atomic&) = delete;
    183     atomic& operator=(const atomic&) = delete;
    184     atomic& operator=(const atomic&) volatile = delete;
    185     integral operator=(integral desr) volatile noexcept;
    186     integral operator=(integral desr) noexcept;
    187 
    188     integral operator++(int) volatile noexcept;
    189     integral operator++(int) noexcept;
    190     integral operator--(int) volatile noexcept;
    191     integral operator--(int) noexcept;
    192     integral operator++() volatile noexcept;
    193     integral operator++() noexcept;
    194     integral operator--() volatile noexcept;
    195     integral operator--() noexcept;
    196     integral operator+=(integral op) volatile noexcept;
    197     integral operator+=(integral op) noexcept;
    198     integral operator-=(integral op) volatile noexcept;
    199     integral operator-=(integral op) noexcept;
    200     integral operator&=(integral op) volatile noexcept;
    201     integral operator&=(integral op) noexcept;
    202     integral operator|=(integral op) volatile noexcept;
    203     integral operator|=(integral op) noexcept;
    204     integral operator^=(integral op) volatile noexcept;
    205     integral operator^=(integral op) noexcept;
    206 };
    207 
    208 template <class T>
    209 struct atomic<T*>
    210 {
    211     static constexpr bool is_always_lock_free;
    212     bool is_lock_free() const volatile noexcept;
    213     bool is_lock_free() const noexcept;
    214     void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    215     void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    216     T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
    217     T* load(memory_order m = memory_order_seq_cst) const noexcept;
    218     operator T*() const volatile noexcept;
    219     operator T*() const noexcept;
    220     T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
    221     T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
    222     bool compare_exchange_weak(T*& expc, T* desr,
    223                                memory_order s, memory_order f) volatile noexcept;
    224     bool compare_exchange_weak(T*& expc, T* desr,
    225                                memory_order s, memory_order f) noexcept;
    226     bool compare_exchange_strong(T*& expc, T* desr,
    227                                  memory_order s, memory_order f) volatile noexcept;
    228     bool compare_exchange_strong(T*& expc, T* desr,
    229                                  memory_order s, memory_order f) noexcept;
    230     bool compare_exchange_weak(T*& expc, T* desr,
    231                                memory_order m = memory_order_seq_cst) volatile noexcept;
    232     bool compare_exchange_weak(T*& expc, T* desr,
    233                                memory_order m = memory_order_seq_cst) noexcept;
    234     bool compare_exchange_strong(T*& expc, T* desr,
    235                                 memory_order m = memory_order_seq_cst) volatile noexcept;
    236     bool compare_exchange_strong(T*& expc, T* desr,
    237                                  memory_order m = memory_order_seq_cst) noexcept;
    238     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    239     T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    240     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
    241     T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
    242 
    243     atomic() noexcept = default;
    244     constexpr atomic(T* desr) noexcept;
    245     atomic(const atomic&) = delete;
    246     atomic& operator=(const atomic&) = delete;
    247     atomic& operator=(const atomic&) volatile = delete;
    248 
    249     T* operator=(T*) volatile noexcept;
    250     T* operator=(T*) noexcept;
    251     T* operator++(int) volatile noexcept;
    252     T* operator++(int) noexcept;
    253     T* operator--(int) volatile noexcept;
    254     T* operator--(int) noexcept;
    255     T* operator++() volatile noexcept;
    256     T* operator++() noexcept;
    257     T* operator--() volatile noexcept;
    258     T* operator--() noexcept;
    259     T* operator+=(ptrdiff_t op) volatile noexcept;
    260     T* operator+=(ptrdiff_t op) noexcept;
    261     T* operator-=(ptrdiff_t op) volatile noexcept;
    262     T* operator-=(ptrdiff_t op) noexcept;
    263 };
    264 
    265 
    266 template <class T>
    267     bool
    268     atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
    269 
    270 template <class T>
    271     bool
    272     atomic_is_lock_free(const atomic<T>* obj) noexcept;
    273 
    274 template <class T>
    275     void
    276     atomic_init(volatile atomic<T>* obj, T desr) noexcept;
    277 
    278 template <class T>
    279     void
    280     atomic_init(atomic<T>* obj, T desr) noexcept;
    281 
    282 template <class T>
    283     void
    284     atomic_store(volatile atomic<T>* obj, T desr) noexcept;
    285 
    286 template <class T>
    287     void
    288     atomic_store(atomic<T>* obj, T desr) noexcept;
    289 
    290 template <class T>
    291     void
    292     atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    293 
    294 template <class T>
    295     void
    296     atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    297 
    298 template <class T>
    299     T
    300     atomic_load(const volatile atomic<T>* obj) noexcept;
    301 
    302 template <class T>
    303     T
    304     atomic_load(const atomic<T>* obj) noexcept;
    305 
    306 template <class T>
    307     T
    308     atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
    309 
    310 template <class T>
    311     T
    312     atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
    313 
    314 template <class T>
    315     T
    316     atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
    317 
    318 template <class T>
    319     T
    320     atomic_exchange(atomic<T>* obj, T desr) noexcept;
    321 
    322 template <class T>
    323     T
    324     atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
    325 
    326 template <class T>
    327     T
    328     atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
    329 
    330 template <class T>
    331     bool
    332     atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    333 
    334 template <class T>
    335     bool
    336     atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
    337 
    338 template <class T>
    339     bool
    340     atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
    341 
    342 template <class T>
    343     bool
    344     atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
    345 
    346 template <class T>
    347     bool
    348     atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
    349                                           T desr,
    350                                           memory_order s, memory_order f) noexcept;
    351 
    352 template <class T>
    353     bool
    354     atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
    355                                           memory_order s, memory_order f) noexcept;
    356 
    357 template <class T>
    358     bool
    359     atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
    360                                             T* expc, T desr,
    361                                             memory_order s, memory_order f) noexcept;
    362 
    363 template <class T>
    364     bool
    365     atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
    366                                             T desr,
    367                                             memory_order s, memory_order f) noexcept;
    368 
    369 template <class Integral>
    370     Integral
    371     atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
    372 
    373 template <class Integral>
    374     Integral
    375     atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
    376 
    377 template <class Integral>
    378     Integral
    379     atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
    380                               memory_order m) noexcept;
    381 template <class Integral>
    382     Integral
    383     atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
    384                               memory_order m) noexcept;
    385 template <class Integral>
    386     Integral
    387     atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
    388 
    389 template <class Integral>
    390     Integral
    391     atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
    392 
    393 template <class Integral>
    394     Integral
    395     atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
    396                               memory_order m) noexcept;
    397 template <class Integral>
    398     Integral
    399     atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
    400                               memory_order m) noexcept;
    401 template <class Integral>
    402     Integral
    403     atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
    404 
    405 template <class Integral>
    406     Integral
    407     atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
    408 
    409 template <class Integral>
    410     Integral
    411     atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
    412                               memory_order m) noexcept;
    413 template <class Integral>
    414     Integral
    415     atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
    416                               memory_order m) noexcept;
    417 template <class Integral>
    418     Integral
    419     atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
    420 
    421 template <class Integral>
    422     Integral
    423     atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
    424 
    425 template <class Integral>
    426     Integral
    427     atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
    428                              memory_order m) noexcept;
    429 template <class Integral>
    430     Integral
    431     atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
    432                              memory_order m) noexcept;
    433 template <class Integral>
    434     Integral
    435     atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
    436 
    437 template <class Integral>
    438     Integral
    439     atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
    440 
    441 template <class Integral>
    442     Integral
    443     atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
    444                               memory_order m) noexcept;
    445 template <class Integral>
    446     Integral
    447     atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
    448                               memory_order m) noexcept;
    449 
    450 template <class T>
    451     T*
    452     atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    453 
    454 template <class T>
    455     T*
    456     atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
    457 
    458 template <class T>
    459     T*
    460     atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    461                               memory_order m) noexcept;
    462 template <class T>
    463     T*
    464     atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    465 
    466 template <class T>
    467     T*
    468     atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
    469 
    470 template <class T>
    471     T*
    472     atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
    473 
    474 template <class T>
    475     T*
    476     atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
    477                               memory_order m) noexcept;
    478 template <class T>
    479     T*
    480     atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
    481 
    482 // Atomics for standard typedef types
    483 
    484 typedef atomic<bool>               atomic_bool;
    485 typedef atomic<char>               atomic_char;
    486 typedef atomic<signed char>        atomic_schar;
    487 typedef atomic<unsigned char>      atomic_uchar;
    488 typedef atomic<short>              atomic_short;
    489 typedef atomic<unsigned short>     atomic_ushort;
    490 typedef atomic<int>                atomic_int;
    491 typedef atomic<unsigned int>       atomic_uint;
    492 typedef atomic<long>               atomic_long;
    493 typedef atomic<unsigned long>      atomic_ulong;
    494 typedef atomic<long long>          atomic_llong;
    495 typedef atomic<unsigned long long> atomic_ullong;
    496 typedef atomic<char16_t>           atomic_char16_t;
    497 typedef atomic<char32_t>           atomic_char32_t;
    498 typedef atomic<wchar_t>            atomic_wchar_t;
    499 
    500 typedef atomic<int_least8_t>   atomic_int_least8_t;
    501 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
    502 typedef atomic<int_least16_t>  atomic_int_least16_t;
    503 typedef atomic<uint_least16_t> atomic_uint_least16_t;
    504 typedef atomic<int_least32_t>  atomic_int_least32_t;
    505 typedef atomic<uint_least32_t> atomic_uint_least32_t;
    506 typedef atomic<int_least64_t>  atomic_int_least64_t;
    507 typedef atomic<uint_least64_t> atomic_uint_least64_t;
    508 
    509 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
    510 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
    511 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
    512 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
    513 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
    514 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
    515 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
    516 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
    517 
    518 typedef atomic<intptr_t>  atomic_intptr_t;
    519 typedef atomic<uintptr_t> atomic_uintptr_t;
    520 typedef atomic<size_t>    atomic_size_t;
    521 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
    522 typedef atomic<intmax_t>  atomic_intmax_t;
    523 typedef atomic<uintmax_t> atomic_uintmax_t;
    524 
    525 // fences
    526 
    527 void atomic_thread_fence(memory_order m) noexcept;
    528 void atomic_signal_fence(memory_order m) noexcept;
    529 
    530 }  // std
    531 
    532 */
    533 
    534 #include <__config>
    535 #include <cstddef>
    536 #include <cstdint>
    537 #include <type_traits>
    538 
    539 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
    540 #pragma GCC system_header
    541 #endif
    542 
    543 #ifdef _LIBCPP_HAS_NO_THREADS
    544 #error <atomic> is not supported on this single threaded system
    545 #endif
    546 #if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
    547 #error <atomic> is not implemented
    548 #endif
    549 
    550 #if _LIBCPP_STD_VER > 14
    551 // FIXME: use the right feature test macro value as chose by SG10.
    552 # define __cpp_lib_atomic_is_always_lock_free 201603L
    553 #endif
    554 
    555 _LIBCPP_BEGIN_NAMESPACE_STD
    556 
    557 typedef enum memory_order
    558 {
    559     memory_order_relaxed, memory_order_consume, memory_order_acquire,
    560     memory_order_release, memory_order_acq_rel, memory_order_seq_cst
    561 } memory_order;
    562 
    563 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
    564 namespace __gcc_atomic {
    565 template <typename _Tp>
    566 struct __gcc_atomic_t {
    567 
    568 #if _GNUC_VER >= 501
    569     static_assert(is_trivially_copyable<_Tp>::value,
    570       "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
    571 #endif
    572 
    573   _LIBCPP_INLINE_VISIBILITY
    574 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    575     __gcc_atomic_t() _NOEXCEPT = default;
    576 #else
    577     __gcc_atomic_t() _NOEXCEPT : __a_value() {}
    578 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    579   _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
    580     : __a_value(value) {}
    581   _Tp __a_value;
    582 };
    583 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
    584 
    585 template <typename _Tp> _Tp __create();
    586 
    587 template <typename _Tp, typename _Td>
    588 typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
    589     __test_atomic_assignable(int);
    590 template <typename _Tp, typename _Up>
    591 __two __test_atomic_assignable(...);
    592 
    593 template <typename _Tp, typename _Td>
    594 struct __can_assign {
    595   static const bool value =
    596       sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
    597 };
    598 
    599 static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
    600   // Avoid switch statement to make this a constexpr.
    601   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
    602          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
    603           (__order == memory_order_release ? __ATOMIC_RELEASE:
    604            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
    605             (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
    606               __ATOMIC_CONSUME))));
    607 }
    608 
    609 static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
    610   // Avoid switch statement to make this a constexpr.
    611   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
    612          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
    613           (__order == memory_order_release ? __ATOMIC_RELAXED:
    614            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
    615             (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
    616               __ATOMIC_CONSUME))));
    617 }
    618 
    619 } // namespace __gcc_atomic
    620 
    621 template <typename _Tp>
    622 static inline
    623 typename enable_if<
    624     __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
    625 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    626   __a->__a_value = __val;
    627 }
    628 
    629 template <typename _Tp>
    630 static inline
    631 typename enable_if<
    632     !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
    633      __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
    634 __c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
    635   // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
    636   // the default operator= in an object is not volatile, a byte-by-byte copy
    637   // is required.
    638   volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
    639   volatile char* end = to + sizeof(_Tp);
    640   char* from = reinterpret_cast<char*>(&__val);
    641   while (to != end) {
    642     *to++ = *from++;
    643   }
    644 }
    645 
    646 template <typename _Tp>
    647 static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
    648   __a->__a_value = __val;
    649 }
    650 
    651 static inline void __c11_atomic_thread_fence(memory_order __order) {
    652   __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
    653 }
    654 
    655 static inline void __c11_atomic_signal_fence(memory_order __order) {
    656   __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
    657 }
    658 
    659 template <typename _Tp>
    660 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
    661                                       memory_order __order) {
    662   return __atomic_store(&__a->__a_value, &__val,
    663                         __gcc_atomic::__to_gcc_order(__order));
    664 }
    665 
    666 template <typename _Tp>
    667 static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
    668                                       memory_order __order) {
    669   __atomic_store(&__a->__a_value, &__val,
    670                  __gcc_atomic::__to_gcc_order(__order));
    671 }
    672 
    673 template <typename _Tp>
    674 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
    675                                     memory_order __order) {
    676   _Tp __ret;
    677   __atomic_load(&__a->__a_value, &__ret,
    678                 __gcc_atomic::__to_gcc_order(__order));
    679   return __ret;
    680 }
    681 
    682 template <typename _Tp>
    683 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
    684   _Tp __ret;
    685   __atomic_load(&__a->__a_value, &__ret,
    686                 __gcc_atomic::__to_gcc_order(__order));
    687   return __ret;
    688 }
    689 
    690 template <typename _Tp>
    691 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
    692                                         _Tp __value, memory_order __order) {
    693   _Tp __ret;
    694   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    695                     __gcc_atomic::__to_gcc_order(__order));
    696   return __ret;
    697 }
    698 
    699 template <typename _Tp>
    700 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
    701                                         memory_order __order) {
    702   _Tp __ret;
    703   __atomic_exchange(&__a->__a_value, &__value, &__ret,
    704                     __gcc_atomic::__to_gcc_order(__order));
    705   return __ret;
    706 }
    707 
    708 template <typename _Tp>
    709 static inline bool __c11_atomic_compare_exchange_strong(
    710     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    711     memory_order __success, memory_order __failure) {
    712   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    713                                    false,
    714                                    __gcc_atomic::__to_gcc_order(__success),
    715                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    716 }
    717 
    718 template <typename _Tp>
    719 static inline bool __c11_atomic_compare_exchange_strong(
    720     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    721     memory_order __failure) {
    722   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    723                                    false,
    724                                    __gcc_atomic::__to_gcc_order(__success),
    725                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    726 }
    727 
    728 template <typename _Tp>
    729 static inline bool __c11_atomic_compare_exchange_weak(
    730     volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
    731     memory_order __success, memory_order __failure) {
    732   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    733                                    true,
    734                                    __gcc_atomic::__to_gcc_order(__success),
    735                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    736 }
    737 
    738 template <typename _Tp>
    739 static inline bool __c11_atomic_compare_exchange_weak(
    740     _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
    741     memory_order __failure) {
    742   return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
    743                                    true,
    744                                    __gcc_atomic::__to_gcc_order(__success),
    745                                    __gcc_atomic::__to_gcc_failure_order(__failure));
    746 }
    747 
    748 template <typename _Tp>
    749 struct __skip_amt { enum {value = 1}; };
    750 
    751 template <typename _Tp>
    752 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
    753 
    754 // FIXME: Haven't figured out what the spec says about using arrays with
    755 // atomic_fetch_add. Force a failure rather than creating bad behavior.
    756 template <typename _Tp>
    757 struct __skip_amt<_Tp[]> { };
    758 template <typename _Tp, int n>
    759 struct __skip_amt<_Tp[n]> { };
    760 
    761 template <typename _Tp, typename _Td>
    762 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
    763                                          _Td __delta, memory_order __order) {
    764   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    765                             __gcc_atomic::__to_gcc_order(__order));
    766 }
    767 
    768 template <typename _Tp, typename _Td>
    769 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
    770                                          memory_order __order) {
    771   return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    772                             __gcc_atomic::__to_gcc_order(__order));
    773 }
    774 
    775 template <typename _Tp, typename _Td>
    776 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
    777                                          _Td __delta, memory_order __order) {
    778   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    779                             __gcc_atomic::__to_gcc_order(__order));
    780 }
    781 
    782 template <typename _Tp, typename _Td>
    783 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
    784                                          memory_order __order) {
    785   return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
    786                             __gcc_atomic::__to_gcc_order(__order));
    787 }
    788 
    789 template <typename _Tp>
    790 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
    791                                          _Tp __pattern, memory_order __order) {
    792   return __atomic_fetch_and(&__a->__a_value, __pattern,
    793                             __gcc_atomic::__to_gcc_order(__order));
    794 }
    795 
    796 template <typename _Tp>
    797 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
    798                                          _Tp __pattern, memory_order __order) {
    799   return __atomic_fetch_and(&__a->__a_value, __pattern,
    800                             __gcc_atomic::__to_gcc_order(__order));
    801 }
    802 
    803 template <typename _Tp>
    804 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
    805                                         _Tp __pattern, memory_order __order) {
    806   return __atomic_fetch_or(&__a->__a_value, __pattern,
    807                            __gcc_atomic::__to_gcc_order(__order));
    808 }
    809 
    810 template <typename _Tp>
    811 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
    812                                         memory_order __order) {
    813   return __atomic_fetch_or(&__a->__a_value, __pattern,
    814                            __gcc_atomic::__to_gcc_order(__order));
    815 }
    816 
    817 template <typename _Tp>
    818 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
    819                                          _Tp __pattern, memory_order __order) {
    820   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    821                             __gcc_atomic::__to_gcc_order(__order));
    822 }
    823 
    824 template <typename _Tp>
    825 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
    826                                          memory_order __order) {
    827   return __atomic_fetch_xor(&__a->__a_value, __pattern,
    828                             __gcc_atomic::__to_gcc_order(__order));
    829 }
    830 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
    831 
    832 template <class _Tp>
    833 inline _LIBCPP_INLINE_VISIBILITY
    834 _Tp
    835 kill_dependency(_Tp __y) _NOEXCEPT
    836 {
    837     return __y;
    838 }
    839 
    840 #define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
    841 #define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
    842 #define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
    843 #define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
    844 #define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
    845 #define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
    846 #define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
    847 #define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
    848 #define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
    849 #define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
    850 
    851 // general atomic<T>
    852 
    853 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
    854 struct __atomic_base  // false
    855 {
    856     mutable _Atomic(_Tp) __a_;
    857 
    858 #if defined(__cpp_lib_atomic_is_always_lock_free)
    859   static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
    860 #endif
    861 
    862     _LIBCPP_INLINE_VISIBILITY
    863     bool is_lock_free() const volatile _NOEXCEPT
    864     {
    865 #if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
    866     return __c11_atomic_is_lock_free(sizeof(_Tp));
    867 #else
    868     return __atomic_is_lock_free(sizeof(_Tp), 0);
    869 #endif
    870     }
    871     _LIBCPP_INLINE_VISIBILITY
    872     bool is_lock_free() const _NOEXCEPT
    873         {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
    874     _LIBCPP_INLINE_VISIBILITY
    875     void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    876         {__c11_atomic_store(&__a_, __d, __m);}
    877     _LIBCPP_INLINE_VISIBILITY
    878     void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    879         {__c11_atomic_store(&__a_, __d, __m);}
    880     _LIBCPP_INLINE_VISIBILITY
    881     _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
    882         {return __c11_atomic_load(&__a_, __m);}
    883     _LIBCPP_INLINE_VISIBILITY
    884     _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
    885         {return __c11_atomic_load(&__a_, __m);}
    886     _LIBCPP_INLINE_VISIBILITY
    887     operator _Tp() const volatile _NOEXCEPT {return load();}
    888     _LIBCPP_INLINE_VISIBILITY
    889     operator _Tp() const _NOEXCEPT          {return load();}
    890     _LIBCPP_INLINE_VISIBILITY
    891     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    892         {return __c11_atomic_exchange(&__a_, __d, __m);}
    893     _LIBCPP_INLINE_VISIBILITY
    894     _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    895         {return __c11_atomic_exchange(&__a_, __d, __m);}
    896     _LIBCPP_INLINE_VISIBILITY
    897     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    898                                memory_order __s, memory_order __f) volatile _NOEXCEPT
    899         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    900     _LIBCPP_INLINE_VISIBILITY
    901     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    902                                memory_order __s, memory_order __f) _NOEXCEPT
    903         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
    904     _LIBCPP_INLINE_VISIBILITY
    905     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    906                                  memory_order __s, memory_order __f) volatile _NOEXCEPT
    907         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    908     _LIBCPP_INLINE_VISIBILITY
    909     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    910                                  memory_order __s, memory_order __f) _NOEXCEPT
    911         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
    912     _LIBCPP_INLINE_VISIBILITY
    913     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    914                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    915         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    916     _LIBCPP_INLINE_VISIBILITY
    917     bool compare_exchange_weak(_Tp& __e, _Tp __d,
    918                                memory_order __m = memory_order_seq_cst) _NOEXCEPT
    919         {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
    920     _LIBCPP_INLINE_VISIBILITY
    921     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    922                               memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    923         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    924     _LIBCPP_INLINE_VISIBILITY
    925     bool compare_exchange_strong(_Tp& __e, _Tp __d,
    926                                  memory_order __m = memory_order_seq_cst) _NOEXCEPT
    927         {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
    928 
    929     _LIBCPP_INLINE_VISIBILITY
    930 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    931     __atomic_base() _NOEXCEPT = default;
    932 #else
    933     __atomic_base() _NOEXCEPT : __a_() {}
    934 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
    935 
    936     _LIBCPP_INLINE_VISIBILITY
    937     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
    938 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    939     __atomic_base(const __atomic_base&) = delete;
    940     __atomic_base& operator=(const __atomic_base&) = delete;
    941     __atomic_base& operator=(const __atomic_base&) volatile = delete;
    942 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    943 private:
    944     __atomic_base(const __atomic_base&);
    945     __atomic_base& operator=(const __atomic_base&);
    946     __atomic_base& operator=(const __atomic_base&) volatile;
    947 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
    948 };
    949 
    950 #if defined(__cpp_lib_atomic_is_always_lock_free)
    951 template <class _Tp, bool __b>
    952 _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
    953 #endif
    954 
    955 // atomic<Integral>
    956 
    957 template <class _Tp>
    958 struct __atomic_base<_Tp, true>
    959     : public __atomic_base<_Tp, false>
    960 {
    961     typedef __atomic_base<_Tp, false> __base;
    962     _LIBCPP_INLINE_VISIBILITY
    963     __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
    964     _LIBCPP_INLINE_VISIBILITY
    965     _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
    966 
    967     _LIBCPP_INLINE_VISIBILITY
    968     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    969         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    970     _LIBCPP_INLINE_VISIBILITY
    971     _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    972         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
    973     _LIBCPP_INLINE_VISIBILITY
    974     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    975         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    976     _LIBCPP_INLINE_VISIBILITY
    977     _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    978         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
    979     _LIBCPP_INLINE_VISIBILITY
    980     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    981         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    982     _LIBCPP_INLINE_VISIBILITY
    983     _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    984         {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
    985     _LIBCPP_INLINE_VISIBILITY
    986     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    987         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    988     _LIBCPP_INLINE_VISIBILITY
    989     _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    990         {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
    991     _LIBCPP_INLINE_VISIBILITY
    992     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
    993         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    994     _LIBCPP_INLINE_VISIBILITY
    995     _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
    996         {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
    997 
    998     _LIBCPP_INLINE_VISIBILITY
    999     _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
   1000     _LIBCPP_INLINE_VISIBILITY
   1001     _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
   1002     _LIBCPP_INLINE_VISIBILITY
   1003     _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
   1004     _LIBCPP_INLINE_VISIBILITY
   1005     _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
   1006     _LIBCPP_INLINE_VISIBILITY
   1007     _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
   1008     _LIBCPP_INLINE_VISIBILITY
   1009     _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
   1010     _LIBCPP_INLINE_VISIBILITY
   1011     _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
   1012     _LIBCPP_INLINE_VISIBILITY
   1013     _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
   1014     _LIBCPP_INLINE_VISIBILITY
   1015     _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
   1016     _LIBCPP_INLINE_VISIBILITY
   1017     _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
   1018     _LIBCPP_INLINE_VISIBILITY
   1019     _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
   1020     _LIBCPP_INLINE_VISIBILITY
   1021     _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
   1022     _LIBCPP_INLINE_VISIBILITY
   1023     _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
   1024     _LIBCPP_INLINE_VISIBILITY
   1025     _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
   1026     _LIBCPP_INLINE_VISIBILITY
   1027     _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
   1028     _LIBCPP_INLINE_VISIBILITY
   1029     _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
   1030     _LIBCPP_INLINE_VISIBILITY
   1031     _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
   1032     _LIBCPP_INLINE_VISIBILITY
   1033     _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
   1034 };
   1035 
   1036 // atomic<T>
   1037 
   1038 template <class _Tp>
   1039 struct atomic
   1040     : public __atomic_base<_Tp>
   1041 {
   1042     typedef __atomic_base<_Tp> __base;
   1043     _LIBCPP_INLINE_VISIBILITY
   1044     atomic() _NOEXCEPT _LIBCPP_DEFAULT
   1045     _LIBCPP_INLINE_VISIBILITY
   1046     _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
   1047 
   1048     _LIBCPP_INLINE_VISIBILITY
   1049     _Tp operator=(_Tp __d) volatile _NOEXCEPT
   1050         {__base::store(__d); return __d;}
   1051     _LIBCPP_INLINE_VISIBILITY
   1052     _Tp operator=(_Tp __d) _NOEXCEPT
   1053         {__base::store(__d); return __d;}
   1054 };
   1055 
   1056 // atomic<T*>
   1057 
   1058 template <class _Tp>
   1059 struct atomic<_Tp*>
   1060     : public __atomic_base<_Tp*>
   1061 {
   1062     typedef __atomic_base<_Tp*> __base;
   1063     _LIBCPP_INLINE_VISIBILITY
   1064     atomic() _NOEXCEPT _LIBCPP_DEFAULT
   1065     _LIBCPP_INLINE_VISIBILITY
   1066     _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
   1067 
   1068     _LIBCPP_INLINE_VISIBILITY
   1069     _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
   1070         {__base::store(__d); return __d;}
   1071     _LIBCPP_INLINE_VISIBILITY
   1072     _Tp* operator=(_Tp* __d) _NOEXCEPT
   1073         {__base::store(__d); return __d;}
   1074 
   1075     _LIBCPP_INLINE_VISIBILITY
   1076     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1077                                                                         volatile _NOEXCEPT
   1078         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1079     _LIBCPP_INLINE_VISIBILITY
   1080     _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1081         {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
   1082     _LIBCPP_INLINE_VISIBILITY
   1083     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
   1084                                                                         volatile _NOEXCEPT
   1085         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1086     _LIBCPP_INLINE_VISIBILITY
   1087     _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1088         {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
   1089 
   1090     _LIBCPP_INLINE_VISIBILITY
   1091     _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
   1092     _LIBCPP_INLINE_VISIBILITY
   1093     _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
   1094     _LIBCPP_INLINE_VISIBILITY
   1095     _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
   1096     _LIBCPP_INLINE_VISIBILITY
   1097     _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
   1098     _LIBCPP_INLINE_VISIBILITY
   1099     _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
   1100     _LIBCPP_INLINE_VISIBILITY
   1101     _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
   1102     _LIBCPP_INLINE_VISIBILITY
   1103     _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
   1104     _LIBCPP_INLINE_VISIBILITY
   1105     _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
   1106     _LIBCPP_INLINE_VISIBILITY
   1107     _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
   1108     _LIBCPP_INLINE_VISIBILITY
   1109     _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
   1110     _LIBCPP_INLINE_VISIBILITY
   1111     _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
   1112     _LIBCPP_INLINE_VISIBILITY
   1113     _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
   1114 };
   1115 
   1116 // atomic_is_lock_free
   1117 
   1118 template <class _Tp>
   1119 inline _LIBCPP_INLINE_VISIBILITY
   1120 bool
   1121 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1122 {
   1123     return __o->is_lock_free();
   1124 }
   1125 
   1126 template <class _Tp>
   1127 inline _LIBCPP_INLINE_VISIBILITY
   1128 bool
   1129 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
   1130 {
   1131     return __o->is_lock_free();
   1132 }
   1133 
   1134 // atomic_init
   1135 
   1136 template <class _Tp>
   1137 inline _LIBCPP_INLINE_VISIBILITY
   1138 void
   1139 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1140 {
   1141     __c11_atomic_init(&__o->__a_, __d);
   1142 }
   1143 
   1144 template <class _Tp>
   1145 inline _LIBCPP_INLINE_VISIBILITY
   1146 void
   1147 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1148 {
   1149     __c11_atomic_init(&__o->__a_, __d);
   1150 }
   1151 
   1152 // atomic_store
   1153 
   1154 template <class _Tp>
   1155 inline _LIBCPP_INLINE_VISIBILITY
   1156 void
   1157 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1158 {
   1159     __o->store(__d);
   1160 }
   1161 
   1162 template <class _Tp>
   1163 inline _LIBCPP_INLINE_VISIBILITY
   1164 void
   1165 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1166 {
   1167     __o->store(__d);
   1168 }
   1169 
   1170 // atomic_store_explicit
   1171 
   1172 template <class _Tp>
   1173 inline _LIBCPP_INLINE_VISIBILITY
   1174 void
   1175 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1176 {
   1177     __o->store(__d, __m);
   1178 }
   1179 
   1180 template <class _Tp>
   1181 inline _LIBCPP_INLINE_VISIBILITY
   1182 void
   1183 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1184 {
   1185     __o->store(__d, __m);
   1186 }
   1187 
   1188 // atomic_load
   1189 
   1190 template <class _Tp>
   1191 inline _LIBCPP_INLINE_VISIBILITY
   1192 _Tp
   1193 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
   1194 {
   1195     return __o->load();
   1196 }
   1197 
   1198 template <class _Tp>
   1199 inline _LIBCPP_INLINE_VISIBILITY
   1200 _Tp
   1201 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
   1202 {
   1203     return __o->load();
   1204 }
   1205 
   1206 // atomic_load_explicit
   1207 
   1208 template <class _Tp>
   1209 inline _LIBCPP_INLINE_VISIBILITY
   1210 _Tp
   1211 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1212 {
   1213     return __o->load(__m);
   1214 }
   1215 
   1216 template <class _Tp>
   1217 inline _LIBCPP_INLINE_VISIBILITY
   1218 _Tp
   1219 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
   1220 {
   1221     return __o->load(__m);
   1222 }
   1223 
   1224 // atomic_exchange
   1225 
   1226 template <class _Tp>
   1227 inline _LIBCPP_INLINE_VISIBILITY
   1228 _Tp
   1229 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1230 {
   1231     return __o->exchange(__d);
   1232 }
   1233 
   1234 template <class _Tp>
   1235 inline _LIBCPP_INLINE_VISIBILITY
   1236 _Tp
   1237 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
   1238 {
   1239     return __o->exchange(__d);
   1240 }
   1241 
   1242 // atomic_exchange_explicit
   1243 
   1244 template <class _Tp>
   1245 inline _LIBCPP_INLINE_VISIBILITY
   1246 _Tp
   1247 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1248 {
   1249     return __o->exchange(__d, __m);
   1250 }
   1251 
   1252 template <class _Tp>
   1253 inline _LIBCPP_INLINE_VISIBILITY
   1254 _Tp
   1255 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
   1256 {
   1257     return __o->exchange(__d, __m);
   1258 }
   1259 
   1260 // atomic_compare_exchange_weak
   1261 
   1262 template <class _Tp>
   1263 inline _LIBCPP_INLINE_VISIBILITY
   1264 bool
   1265 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1266 {
   1267     return __o->compare_exchange_weak(*__e, __d);
   1268 }
   1269 
   1270 template <class _Tp>
   1271 inline _LIBCPP_INLINE_VISIBILITY
   1272 bool
   1273 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1274 {
   1275     return __o->compare_exchange_weak(*__e, __d);
   1276 }
   1277 
   1278 // atomic_compare_exchange_strong
   1279 
   1280 template <class _Tp>
   1281 inline _LIBCPP_INLINE_VISIBILITY
   1282 bool
   1283 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1284 {
   1285     return __o->compare_exchange_strong(*__e, __d);
   1286 }
   1287 
   1288 template <class _Tp>
   1289 inline _LIBCPP_INLINE_VISIBILITY
   1290 bool
   1291 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
   1292 {
   1293     return __o->compare_exchange_strong(*__e, __d);
   1294 }
   1295 
   1296 // atomic_compare_exchange_weak_explicit
   1297 
   1298 template <class _Tp>
   1299 inline _LIBCPP_INLINE_VISIBILITY
   1300 bool
   1301 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
   1302                                       _Tp __d,
   1303                                       memory_order __s, memory_order __f) _NOEXCEPT
   1304 {
   1305     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1306 }
   1307 
   1308 template <class _Tp>
   1309 inline _LIBCPP_INLINE_VISIBILITY
   1310 bool
   1311 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
   1312                                       memory_order __s, memory_order __f) _NOEXCEPT
   1313 {
   1314     return __o->compare_exchange_weak(*__e, __d, __s, __f);
   1315 }
   1316 
   1317 // atomic_compare_exchange_strong_explicit
   1318 
   1319 template <class _Tp>
   1320 inline _LIBCPP_INLINE_VISIBILITY
   1321 bool
   1322 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
   1323                                         _Tp* __e, _Tp __d,
   1324                                         memory_order __s, memory_order __f) _NOEXCEPT
   1325 {
   1326     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1327 }
   1328 
   1329 template <class _Tp>
   1330 inline _LIBCPP_INLINE_VISIBILITY
   1331 bool
   1332 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
   1333                                         _Tp __d,
   1334                                         memory_order __s, memory_order __f) _NOEXCEPT
   1335 {
   1336     return __o->compare_exchange_strong(*__e, __d, __s, __f);
   1337 }
   1338 
   1339 // atomic_fetch_add
   1340 
   1341 template <class _Tp>
   1342 inline _LIBCPP_INLINE_VISIBILITY
   1343 typename enable_if
   1344 <
   1345     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1346     _Tp
   1347 >::type
   1348 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1349 {
   1350     return __o->fetch_add(__op);
   1351 }
   1352 
   1353 template <class _Tp>
   1354 inline _LIBCPP_INLINE_VISIBILITY
   1355 typename enable_if
   1356 <
   1357     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1358     _Tp
   1359 >::type
   1360 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1361 {
   1362     return __o->fetch_add(__op);
   1363 }
   1364 
   1365 template <class _Tp>
   1366 inline _LIBCPP_INLINE_VISIBILITY
   1367 _Tp*
   1368 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1369 {
   1370     return __o->fetch_add(__op);
   1371 }
   1372 
   1373 template <class _Tp>
   1374 inline _LIBCPP_INLINE_VISIBILITY
   1375 _Tp*
   1376 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1377 {
   1378     return __o->fetch_add(__op);
   1379 }
   1380 
   1381 // atomic_fetch_add_explicit
   1382 
   1383 template <class _Tp>
   1384 inline _LIBCPP_INLINE_VISIBILITY
   1385 typename enable_if
   1386 <
   1387     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1388     _Tp
   1389 >::type
   1390 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1391 {
   1392     return __o->fetch_add(__op, __m);
   1393 }
   1394 
   1395 template <class _Tp>
   1396 inline _LIBCPP_INLINE_VISIBILITY
   1397 typename enable_if
   1398 <
   1399     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1400     _Tp
   1401 >::type
   1402 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1403 {
   1404     return __o->fetch_add(__op, __m);
   1405 }
   1406 
   1407 template <class _Tp>
   1408 inline _LIBCPP_INLINE_VISIBILITY
   1409 _Tp*
   1410 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1411                           memory_order __m) _NOEXCEPT
   1412 {
   1413     return __o->fetch_add(__op, __m);
   1414 }
   1415 
   1416 template <class _Tp>
   1417 inline _LIBCPP_INLINE_VISIBILITY
   1418 _Tp*
   1419 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1420 {
   1421     return __o->fetch_add(__op, __m);
   1422 }
   1423 
   1424 // atomic_fetch_sub
   1425 
   1426 template <class _Tp>
   1427 inline _LIBCPP_INLINE_VISIBILITY
   1428 typename enable_if
   1429 <
   1430     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1431     _Tp
   1432 >::type
   1433 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1434 {
   1435     return __o->fetch_sub(__op);
   1436 }
   1437 
   1438 template <class _Tp>
   1439 inline _LIBCPP_INLINE_VISIBILITY
   1440 typename enable_if
   1441 <
   1442     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1443     _Tp
   1444 >::type
   1445 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1446 {
   1447     return __o->fetch_sub(__op);
   1448 }
   1449 
   1450 template <class _Tp>
   1451 inline _LIBCPP_INLINE_VISIBILITY
   1452 _Tp*
   1453 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1454 {
   1455     return __o->fetch_sub(__op);
   1456 }
   1457 
   1458 template <class _Tp>
   1459 inline _LIBCPP_INLINE_VISIBILITY
   1460 _Tp*
   1461 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
   1462 {
   1463     return __o->fetch_sub(__op);
   1464 }
   1465 
   1466 // atomic_fetch_sub_explicit
   1467 
   1468 template <class _Tp>
   1469 inline _LIBCPP_INLINE_VISIBILITY
   1470 typename enable_if
   1471 <
   1472     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1473     _Tp
   1474 >::type
   1475 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1476 {
   1477     return __o->fetch_sub(__op, __m);
   1478 }
   1479 
   1480 template <class _Tp>
   1481 inline _LIBCPP_INLINE_VISIBILITY
   1482 typename enable_if
   1483 <
   1484     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1485     _Tp
   1486 >::type
   1487 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1488 {
   1489     return __o->fetch_sub(__op, __m);
   1490 }
   1491 
   1492 template <class _Tp>
   1493 inline _LIBCPP_INLINE_VISIBILITY
   1494 _Tp*
   1495 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
   1496                           memory_order __m) _NOEXCEPT
   1497 {
   1498     return __o->fetch_sub(__op, __m);
   1499 }
   1500 
   1501 template <class _Tp>
   1502 inline _LIBCPP_INLINE_VISIBILITY
   1503 _Tp*
   1504 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
   1505 {
   1506     return __o->fetch_sub(__op, __m);
   1507 }
   1508 
   1509 // atomic_fetch_and
   1510 
   1511 template <class _Tp>
   1512 inline _LIBCPP_INLINE_VISIBILITY
   1513 typename enable_if
   1514 <
   1515     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1516     _Tp
   1517 >::type
   1518 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1519 {
   1520     return __o->fetch_and(__op);
   1521 }
   1522 
   1523 template <class _Tp>
   1524 inline _LIBCPP_INLINE_VISIBILITY
   1525 typename enable_if
   1526 <
   1527     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1528     _Tp
   1529 >::type
   1530 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1531 {
   1532     return __o->fetch_and(__op);
   1533 }
   1534 
   1535 // atomic_fetch_and_explicit
   1536 
   1537 template <class _Tp>
   1538 inline _LIBCPP_INLINE_VISIBILITY
   1539 typename enable_if
   1540 <
   1541     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1542     _Tp
   1543 >::type
   1544 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1545 {
   1546     return __o->fetch_and(__op, __m);
   1547 }
   1548 
   1549 template <class _Tp>
   1550 inline _LIBCPP_INLINE_VISIBILITY
   1551 typename enable_if
   1552 <
   1553     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1554     _Tp
   1555 >::type
   1556 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1557 {
   1558     return __o->fetch_and(__op, __m);
   1559 }
   1560 
   1561 // atomic_fetch_or
   1562 
   1563 template <class _Tp>
   1564 inline _LIBCPP_INLINE_VISIBILITY
   1565 typename enable_if
   1566 <
   1567     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1568     _Tp
   1569 >::type
   1570 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1571 {
   1572     return __o->fetch_or(__op);
   1573 }
   1574 
   1575 template <class _Tp>
   1576 inline _LIBCPP_INLINE_VISIBILITY
   1577 typename enable_if
   1578 <
   1579     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1580     _Tp
   1581 >::type
   1582 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1583 {
   1584     return __o->fetch_or(__op);
   1585 }
   1586 
   1587 // atomic_fetch_or_explicit
   1588 
   1589 template <class _Tp>
   1590 inline _LIBCPP_INLINE_VISIBILITY
   1591 typename enable_if
   1592 <
   1593     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1594     _Tp
   1595 >::type
   1596 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1597 {
   1598     return __o->fetch_or(__op, __m);
   1599 }
   1600 
   1601 template <class _Tp>
   1602 inline _LIBCPP_INLINE_VISIBILITY
   1603 typename enable_if
   1604 <
   1605     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1606     _Tp
   1607 >::type
   1608 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1609 {
   1610     return __o->fetch_or(__op, __m);
   1611 }
   1612 
   1613 // atomic_fetch_xor
   1614 
   1615 template <class _Tp>
   1616 inline _LIBCPP_INLINE_VISIBILITY
   1617 typename enable_if
   1618 <
   1619     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1620     _Tp
   1621 >::type
   1622 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1623 {
   1624     return __o->fetch_xor(__op);
   1625 }
   1626 
   1627 template <class _Tp>
   1628 inline _LIBCPP_INLINE_VISIBILITY
   1629 typename enable_if
   1630 <
   1631     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1632     _Tp
   1633 >::type
   1634 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
   1635 {
   1636     return __o->fetch_xor(__op);
   1637 }
   1638 
   1639 // atomic_fetch_xor_explicit
   1640 
   1641 template <class _Tp>
   1642 inline _LIBCPP_INLINE_VISIBILITY
   1643 typename enable_if
   1644 <
   1645     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1646     _Tp
   1647 >::type
   1648 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1649 {
   1650     return __o->fetch_xor(__op, __m);
   1651 }
   1652 
   1653 template <class _Tp>
   1654 inline _LIBCPP_INLINE_VISIBILITY
   1655 typename enable_if
   1656 <
   1657     is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
   1658     _Tp
   1659 >::type
   1660 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
   1661 {
   1662     return __o->fetch_xor(__op, __m);
   1663 }
   1664 
   1665 // flag type and operations
   1666 
   1667 typedef struct atomic_flag
   1668 {
   1669     _Atomic(bool) __a_;
   1670 
   1671     _LIBCPP_INLINE_VISIBILITY
   1672     bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1673         {return __c11_atomic_exchange(&__a_, true, __m);}
   1674     _LIBCPP_INLINE_VISIBILITY
   1675     bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1676         {return __c11_atomic_exchange(&__a_, true, __m);}
   1677     _LIBCPP_INLINE_VISIBILITY
   1678     void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
   1679         {__c11_atomic_store(&__a_, false, __m);}
   1680     _LIBCPP_INLINE_VISIBILITY
   1681     void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
   1682         {__c11_atomic_store(&__a_, false, __m);}
   1683 
   1684     _LIBCPP_INLINE_VISIBILITY
   1685 #ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1686     atomic_flag() _NOEXCEPT = default;
   1687 #else
   1688     atomic_flag() _NOEXCEPT : __a_() {}
   1689 #endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
   1690 
   1691     _LIBCPP_INLINE_VISIBILITY
   1692     atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
   1693 
   1694 #ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1695     atomic_flag(const atomic_flag&) = delete;
   1696     atomic_flag& operator=(const atomic_flag&) = delete;
   1697     atomic_flag& operator=(const atomic_flag&) volatile = delete;
   1698 #else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1699 private:
   1700     atomic_flag(const atomic_flag&);
   1701     atomic_flag& operator=(const atomic_flag&);
   1702     atomic_flag& operator=(const atomic_flag&) volatile;
   1703 #endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
   1704 } atomic_flag;
   1705 
   1706 inline _LIBCPP_INLINE_VISIBILITY
   1707 bool
   1708 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
   1709 {
   1710     return __o->test_and_set();
   1711 }
   1712 
   1713 inline _LIBCPP_INLINE_VISIBILITY
   1714 bool
   1715 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
   1716 {
   1717     return __o->test_and_set();
   1718 }
   1719 
   1720 inline _LIBCPP_INLINE_VISIBILITY
   1721 bool
   1722 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1723 {
   1724     return __o->test_and_set(__m);
   1725 }
   1726 
   1727 inline _LIBCPP_INLINE_VISIBILITY
   1728 bool
   1729 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1730 {
   1731     return __o->test_and_set(__m);
   1732 }
   1733 
   1734 inline _LIBCPP_INLINE_VISIBILITY
   1735 void
   1736 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
   1737 {
   1738     __o->clear();
   1739 }
   1740 
   1741 inline _LIBCPP_INLINE_VISIBILITY
   1742 void
   1743 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
   1744 {
   1745     __o->clear();
   1746 }
   1747 
   1748 inline _LIBCPP_INLINE_VISIBILITY
   1749 void
   1750 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
   1751 {
   1752     __o->clear(__m);
   1753 }
   1754 
   1755 inline _LIBCPP_INLINE_VISIBILITY
   1756 void
   1757 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
   1758 {
   1759     __o->clear(__m);
   1760 }
   1761 
   1762 // fences
   1763 
   1764 inline _LIBCPP_INLINE_VISIBILITY
   1765 void
   1766 atomic_thread_fence(memory_order __m) _NOEXCEPT
   1767 {
   1768     __c11_atomic_thread_fence(__m);
   1769 }
   1770 
   1771 inline _LIBCPP_INLINE_VISIBILITY
   1772 void
   1773 atomic_signal_fence(memory_order __m) _NOEXCEPT
   1774 {
   1775     __c11_atomic_signal_fence(__m);
   1776 }
   1777 
   1778 // Atomics for standard typedef types
   1779 
   1780 typedef atomic<bool>               atomic_bool;
   1781 typedef atomic<char>               atomic_char;
   1782 typedef atomic<signed char>        atomic_schar;
   1783 typedef atomic<unsigned char>      atomic_uchar;
   1784 typedef atomic<short>              atomic_short;
   1785 typedef atomic<unsigned short>     atomic_ushort;
   1786 typedef atomic<int>                atomic_int;
   1787 typedef atomic<unsigned int>       atomic_uint;
   1788 typedef atomic<long>               atomic_long;
   1789 typedef atomic<unsigned long>      atomic_ulong;
   1790 typedef atomic<long long>          atomic_llong;
   1791 typedef atomic<unsigned long long> atomic_ullong;
   1792 typedef atomic<char16_t>           atomic_char16_t;
   1793 typedef atomic<char32_t>           atomic_char32_t;
   1794 typedef atomic<wchar_t>            atomic_wchar_t;
   1795 
   1796 typedef atomic<int_least8_t>   atomic_int_least8_t;
   1797 typedef atomic<uint_least8_t>  atomic_uint_least8_t;
   1798 typedef atomic<int_least16_t>  atomic_int_least16_t;
   1799 typedef atomic<uint_least16_t> atomic_uint_least16_t;
   1800 typedef atomic<int_least32_t>  atomic_int_least32_t;
   1801 typedef atomic<uint_least32_t> atomic_uint_least32_t;
   1802 typedef atomic<int_least64_t>  atomic_int_least64_t;
   1803 typedef atomic<uint_least64_t> atomic_uint_least64_t;
   1804 
   1805 typedef atomic<int_fast8_t>   atomic_int_fast8_t;
   1806 typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
   1807 typedef atomic<int_fast16_t>  atomic_int_fast16_t;
   1808 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
   1809 typedef atomic<int_fast32_t>  atomic_int_fast32_t;
   1810 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
   1811 typedef atomic<int_fast64_t>  atomic_int_fast64_t;
   1812 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
   1813 
   1814 typedef atomic<intptr_t>  atomic_intptr_t;
   1815 typedef atomic<uintptr_t> atomic_uintptr_t;
   1816 typedef atomic<size_t>    atomic_size_t;
   1817 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
   1818 typedef atomic<intmax_t>  atomic_intmax_t;
   1819 typedef atomic<uintmax_t> atomic_uintmax_t;
   1820 
   1821 #define ATOMIC_FLAG_INIT {false}
   1822 #define ATOMIC_VAR_INIT(__v) {__v}
   1823 
   1824 _LIBCPP_END_NAMESPACE_STD
   1825 
   1826 #endif  // _LIBCPP_ATOMIC
   1827