1 // -*- C++ -*- 2 //===--------------------------- atomic -----------------------------------===// 3 // 4 // The LLVM Compiler Infrastructure 5 // 6 // This file is distributed under the University of Illinois Open Source 7 // License. See LICENSE.TXT for details. 8 // 9 //===----------------------------------------------------------------------===// 10 11 #ifndef _LIBCPP_ATOMIC 12 #define _LIBCPP_ATOMIC 13 14 /* 15 atomic synopsis 16 17 namespace std 18 { 19 20 // feature test macro 21 22 #define __cpp_lib_atomic_is_always_lock_free // as specified by SG10 23 24 // order and consistency 25 26 typedef enum memory_order 27 { 28 memory_order_relaxed, 29 memory_order_consume, // load-consume 30 memory_order_acquire, // load-acquire 31 memory_order_release, // store-release 32 memory_order_acq_rel, // store-release load-acquire 33 memory_order_seq_cst // store-release load-acquire 34 } memory_order; 35 36 template <class T> T kill_dependency(T y) noexcept; 37 38 // lock-free property 39 40 #define ATOMIC_BOOL_LOCK_FREE unspecified 41 #define ATOMIC_CHAR_LOCK_FREE unspecified 42 #define ATOMIC_CHAR16_T_LOCK_FREE unspecified 43 #define ATOMIC_CHAR32_T_LOCK_FREE unspecified 44 #define ATOMIC_WCHAR_T_LOCK_FREE unspecified 45 #define ATOMIC_SHORT_LOCK_FREE unspecified 46 #define ATOMIC_INT_LOCK_FREE unspecified 47 #define ATOMIC_LONG_LOCK_FREE unspecified 48 #define ATOMIC_LLONG_LOCK_FREE unspecified 49 #define ATOMIC_POINTER_LOCK_FREE unspecified 50 51 // flag type and operations 52 53 typedef struct atomic_flag 54 { 55 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept; 56 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept; 57 void clear(memory_order m = memory_order_seq_cst) volatile noexcept; 58 void clear(memory_order m = memory_order_seq_cst) noexcept; 59 atomic_flag() noexcept = default; 60 atomic_flag(const atomic_flag&) = delete; 61 atomic_flag& operator=(const atomic_flag&) = delete; 62 atomic_flag& operator=(const atomic_flag&) volatile = delete; 63 } atomic_flag; 64 65 bool 66 atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept; 67 68 bool 69 atomic_flag_test_and_set(atomic_flag* obj) noexcept; 70 71 bool 72 atomic_flag_test_and_set_explicit(volatile atomic_flag* obj, 73 memory_order m) noexcept; 74 75 bool 76 atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept; 77 78 void 79 atomic_flag_clear(volatile atomic_flag* obj) noexcept; 80 81 void 82 atomic_flag_clear(atomic_flag* obj) noexcept; 83 84 void 85 atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept; 86 87 void 88 atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept; 89 90 #define ATOMIC_FLAG_INIT see below 91 #define ATOMIC_VAR_INIT(value) see below 92 93 template <class T> 94 struct atomic 95 { 96 static constexpr bool is_always_lock_free; 97 bool is_lock_free() const volatile noexcept; 98 bool is_lock_free() const noexcept; 99 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; 100 void store(T desr, memory_order m = memory_order_seq_cst) noexcept; 101 T load(memory_order m = memory_order_seq_cst) const volatile noexcept; 102 T load(memory_order m = memory_order_seq_cst) const noexcept; 103 operator T() const volatile noexcept; 104 operator T() const noexcept; 105 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; 106 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept; 107 bool compare_exchange_weak(T& expc, T desr, 108 memory_order s, memory_order f) volatile noexcept; 109 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept; 110 bool compare_exchange_strong(T& expc, T desr, 111 memory_order s, memory_order f) volatile noexcept; 112 bool compare_exchange_strong(T& expc, T desr, 113 memory_order s, memory_order f) noexcept; 114 bool compare_exchange_weak(T& expc, T desr, 115 memory_order m = memory_order_seq_cst) volatile noexcept; 116 bool compare_exchange_weak(T& expc, T desr, 117 memory_order m = memory_order_seq_cst) noexcept; 118 bool compare_exchange_strong(T& expc, T desr, 119 memory_order m = memory_order_seq_cst) volatile noexcept; 120 bool compare_exchange_strong(T& expc, T desr, 121 memory_order m = memory_order_seq_cst) noexcept; 122 123 atomic() noexcept = default; 124 constexpr atomic(T desr) noexcept; 125 atomic(const atomic&) = delete; 126 atomic& operator=(const atomic&) = delete; 127 atomic& operator=(const atomic&) volatile = delete; 128 T operator=(T) volatile noexcept; 129 T operator=(T) noexcept; 130 }; 131 132 template <> 133 struct atomic<integral> 134 { 135 static constexpr bool is_always_lock_free; 136 bool is_lock_free() const volatile noexcept; 137 bool is_lock_free() const noexcept; 138 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept; 139 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept; 140 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept; 141 integral load(memory_order m = memory_order_seq_cst) const noexcept; 142 operator integral() const volatile noexcept; 143 operator integral() const noexcept; 144 integral exchange(integral desr, 145 memory_order m = memory_order_seq_cst) volatile noexcept; 146 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept; 147 bool compare_exchange_weak(integral& expc, integral desr, 148 memory_order s, memory_order f) volatile noexcept; 149 bool compare_exchange_weak(integral& expc, integral desr, 150 memory_order s, memory_order f) noexcept; 151 bool compare_exchange_strong(integral& expc, integral desr, 152 memory_order s, memory_order f) volatile noexcept; 153 bool compare_exchange_strong(integral& expc, integral desr, 154 memory_order s, memory_order f) noexcept; 155 bool compare_exchange_weak(integral& expc, integral desr, 156 memory_order m = memory_order_seq_cst) volatile noexcept; 157 bool compare_exchange_weak(integral& expc, integral desr, 158 memory_order m = memory_order_seq_cst) noexcept; 159 bool compare_exchange_strong(integral& expc, integral desr, 160 memory_order m = memory_order_seq_cst) volatile noexcept; 161 bool compare_exchange_strong(integral& expc, integral desr, 162 memory_order m = memory_order_seq_cst) noexcept; 163 164 integral 165 fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 166 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept; 167 integral 168 fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 169 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept; 170 integral 171 fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 172 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept; 173 integral 174 fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 175 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept; 176 integral 177 fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 178 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept; 179 180 atomic() noexcept = default; 181 constexpr atomic(integral desr) noexcept; 182 atomic(const atomic&) = delete; 183 atomic& operator=(const atomic&) = delete; 184 atomic& operator=(const atomic&) volatile = delete; 185 integral operator=(integral desr) volatile noexcept; 186 integral operator=(integral desr) noexcept; 187 188 integral operator++(int) volatile noexcept; 189 integral operator++(int) noexcept; 190 integral operator--(int) volatile noexcept; 191 integral operator--(int) noexcept; 192 integral operator++() volatile noexcept; 193 integral operator++() noexcept; 194 integral operator--() volatile noexcept; 195 integral operator--() noexcept; 196 integral operator+=(integral op) volatile noexcept; 197 integral operator+=(integral op) noexcept; 198 integral operator-=(integral op) volatile noexcept; 199 integral operator-=(integral op) noexcept; 200 integral operator&=(integral op) volatile noexcept; 201 integral operator&=(integral op) noexcept; 202 integral operator|=(integral op) volatile noexcept; 203 integral operator|=(integral op) noexcept; 204 integral operator^=(integral op) volatile noexcept; 205 integral operator^=(integral op) noexcept; 206 }; 207 208 template <class T> 209 struct atomic<T*> 210 { 211 static constexpr bool is_always_lock_free; 212 bool is_lock_free() const volatile noexcept; 213 bool is_lock_free() const noexcept; 214 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; 215 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept; 216 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept; 217 T* load(memory_order m = memory_order_seq_cst) const noexcept; 218 operator T*() const volatile noexcept; 219 operator T*() const noexcept; 220 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; 221 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept; 222 bool compare_exchange_weak(T*& expc, T* desr, 223 memory_order s, memory_order f) volatile noexcept; 224 bool compare_exchange_weak(T*& expc, T* desr, 225 memory_order s, memory_order f) noexcept; 226 bool compare_exchange_strong(T*& expc, T* desr, 227 memory_order s, memory_order f) volatile noexcept; 228 bool compare_exchange_strong(T*& expc, T* desr, 229 memory_order s, memory_order f) noexcept; 230 bool compare_exchange_weak(T*& expc, T* desr, 231 memory_order m = memory_order_seq_cst) volatile noexcept; 232 bool compare_exchange_weak(T*& expc, T* desr, 233 memory_order m = memory_order_seq_cst) noexcept; 234 bool compare_exchange_strong(T*& expc, T* desr, 235 memory_order m = memory_order_seq_cst) volatile noexcept; 236 bool compare_exchange_strong(T*& expc, T* desr, 237 memory_order m = memory_order_seq_cst) noexcept; 238 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; 239 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; 240 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; 241 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; 242 243 atomic() noexcept = default; 244 constexpr atomic(T* desr) noexcept; 245 atomic(const atomic&) = delete; 246 atomic& operator=(const atomic&) = delete; 247 atomic& operator=(const atomic&) volatile = delete; 248 249 T* operator=(T*) volatile noexcept; 250 T* operator=(T*) noexcept; 251 T* operator++(int) volatile noexcept; 252 T* operator++(int) noexcept; 253 T* operator--(int) volatile noexcept; 254 T* operator--(int) noexcept; 255 T* operator++() volatile noexcept; 256 T* operator++() noexcept; 257 T* operator--() volatile noexcept; 258 T* operator--() noexcept; 259 T* operator+=(ptrdiff_t op) volatile noexcept; 260 T* operator+=(ptrdiff_t op) noexcept; 261 T* operator-=(ptrdiff_t op) volatile noexcept; 262 T* operator-=(ptrdiff_t op) noexcept; 263 }; 264 265 266 template <class T> 267 bool 268 atomic_is_lock_free(const volatile atomic<T>* obj) noexcept; 269 270 template <class T> 271 bool 272 atomic_is_lock_free(const atomic<T>* obj) noexcept; 273 274 template <class T> 275 void 276 atomic_init(volatile atomic<T>* obj, T desr) noexcept; 277 278 template <class T> 279 void 280 atomic_init(atomic<T>* obj, T desr) noexcept; 281 282 template <class T> 283 void 284 atomic_store(volatile atomic<T>* obj, T desr) noexcept; 285 286 template <class T> 287 void 288 atomic_store(atomic<T>* obj, T desr) noexcept; 289 290 template <class T> 291 void 292 atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept; 293 294 template <class T> 295 void 296 atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept; 297 298 template <class T> 299 T 300 atomic_load(const volatile atomic<T>* obj) noexcept; 301 302 template <class T> 303 T 304 atomic_load(const atomic<T>* obj) noexcept; 305 306 template <class T> 307 T 308 atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept; 309 310 template <class T> 311 T 312 atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept; 313 314 template <class T> 315 T 316 atomic_exchange(volatile atomic<T>* obj, T desr) noexcept; 317 318 template <class T> 319 T 320 atomic_exchange(atomic<T>* obj, T desr) noexcept; 321 322 template <class T> 323 T 324 atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept; 325 326 template <class T> 327 T 328 atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept; 329 330 template <class T> 331 bool 332 atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept; 333 334 template <class T> 335 bool 336 atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept; 337 338 template <class T> 339 bool 340 atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept; 341 342 template <class T> 343 bool 344 atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept; 345 346 template <class T> 347 bool 348 atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc, 349 T desr, 350 memory_order s, memory_order f) noexcept; 351 352 template <class T> 353 bool 354 atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr, 355 memory_order s, memory_order f) noexcept; 356 357 template <class T> 358 bool 359 atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj, 360 T* expc, T desr, 361 memory_order s, memory_order f) noexcept; 362 363 template <class T> 364 bool 365 atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc, 366 T desr, 367 memory_order s, memory_order f) noexcept; 368 369 template <class Integral> 370 Integral 371 atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept; 372 373 template <class Integral> 374 Integral 375 atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept; 376 377 template <class Integral> 378 Integral 379 atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op, 380 memory_order m) noexcept; 381 template <class Integral> 382 Integral 383 atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op, 384 memory_order m) noexcept; 385 template <class Integral> 386 Integral 387 atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept; 388 389 template <class Integral> 390 Integral 391 atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept; 392 393 template <class Integral> 394 Integral 395 atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op, 396 memory_order m) noexcept; 397 template <class Integral> 398 Integral 399 atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op, 400 memory_order m) noexcept; 401 template <class Integral> 402 Integral 403 atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept; 404 405 template <class Integral> 406 Integral 407 atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept; 408 409 template <class Integral> 410 Integral 411 atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op, 412 memory_order m) noexcept; 413 template <class Integral> 414 Integral 415 atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op, 416 memory_order m) noexcept; 417 template <class Integral> 418 Integral 419 atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept; 420 421 template <class Integral> 422 Integral 423 atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept; 424 425 template <class Integral> 426 Integral 427 atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op, 428 memory_order m) noexcept; 429 template <class Integral> 430 Integral 431 atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op, 432 memory_order m) noexcept; 433 template <class Integral> 434 Integral 435 atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept; 436 437 template <class Integral> 438 Integral 439 atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept; 440 441 template <class Integral> 442 Integral 443 atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op, 444 memory_order m) noexcept; 445 template <class Integral> 446 Integral 447 atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op, 448 memory_order m) noexcept; 449 450 template <class T> 451 T* 452 atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept; 453 454 template <class T> 455 T* 456 atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept; 457 458 template <class T> 459 T* 460 atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op, 461 memory_order m) noexcept; 462 template <class T> 463 T* 464 atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept; 465 466 template <class T> 467 T* 468 atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept; 469 470 template <class T> 471 T* 472 atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept; 473 474 template <class T> 475 T* 476 atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op, 477 memory_order m) noexcept; 478 template <class T> 479 T* 480 atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept; 481 482 // Atomics for standard typedef types 483 484 typedef atomic<bool> atomic_bool; 485 typedef atomic<char> atomic_char; 486 typedef atomic<signed char> atomic_schar; 487 typedef atomic<unsigned char> atomic_uchar; 488 typedef atomic<short> atomic_short; 489 typedef atomic<unsigned short> atomic_ushort; 490 typedef atomic<int> atomic_int; 491 typedef atomic<unsigned int> atomic_uint; 492 typedef atomic<long> atomic_long; 493 typedef atomic<unsigned long> atomic_ulong; 494 typedef atomic<long long> atomic_llong; 495 typedef atomic<unsigned long long> atomic_ullong; 496 typedef atomic<char16_t> atomic_char16_t; 497 typedef atomic<char32_t> atomic_char32_t; 498 typedef atomic<wchar_t> atomic_wchar_t; 499 500 typedef atomic<int_least8_t> atomic_int_least8_t; 501 typedef atomic<uint_least8_t> atomic_uint_least8_t; 502 typedef atomic<int_least16_t> atomic_int_least16_t; 503 typedef atomic<uint_least16_t> atomic_uint_least16_t; 504 typedef atomic<int_least32_t> atomic_int_least32_t; 505 typedef atomic<uint_least32_t> atomic_uint_least32_t; 506 typedef atomic<int_least64_t> atomic_int_least64_t; 507 typedef atomic<uint_least64_t> atomic_uint_least64_t; 508 509 typedef atomic<int_fast8_t> atomic_int_fast8_t; 510 typedef atomic<uint_fast8_t> atomic_uint_fast8_t; 511 typedef atomic<int_fast16_t> atomic_int_fast16_t; 512 typedef atomic<uint_fast16_t> atomic_uint_fast16_t; 513 typedef atomic<int_fast32_t> atomic_int_fast32_t; 514 typedef atomic<uint_fast32_t> atomic_uint_fast32_t; 515 typedef atomic<int_fast64_t> atomic_int_fast64_t; 516 typedef atomic<uint_fast64_t> atomic_uint_fast64_t; 517 518 typedef atomic<int8_t> atomic_int8_t; 519 typedef atomic<uint8_t> atomic_uint8_t; 520 typedef atomic<int16_t> atomic_int16_t; 521 typedef atomic<uint16_t> atomic_uint16_t; 522 typedef atomic<int32_t> atomic_int32_t; 523 typedef atomic<uint32_t> atomic_uint32_t; 524 typedef atomic<int64_t> atomic_int64_t; 525 typedef atomic<uint64_t> atomic_uint64_t; 526 527 typedef atomic<intptr_t> atomic_intptr_t; 528 typedef atomic<uintptr_t> atomic_uintptr_t; 529 typedef atomic<size_t> atomic_size_t; 530 typedef atomic<ptrdiff_t> atomic_ptrdiff_t; 531 typedef atomic<intmax_t> atomic_intmax_t; 532 typedef atomic<uintmax_t> atomic_uintmax_t; 533 534 // fences 535 536 void atomic_thread_fence(memory_order m) noexcept; 537 void atomic_signal_fence(memory_order m) noexcept; 538 539 } // std 540 541 */ 542 543 #include <__config> 544 #include <cstddef> 545 #include <cstdint> 546 #include <type_traits> 547 548 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) 549 #pragma GCC system_header 550 #endif 551 552 #ifdef _LIBCPP_HAS_NO_THREADS 553 #error <atomic> is not supported on this single threaded system 554 #endif 555 #if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) 556 #error <atomic> is not implemented 557 #endif 558 559 #if _LIBCPP_STD_VER > 14 560 # define __cpp_lib_atomic_is_always_lock_free 201603L 561 #endif 562 563 #define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \ 564 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \ 565 __m == memory_order_acquire || \ 566 __m == memory_order_acq_rel, \ 567 "memory order argument to atomic operation is invalid") 568 569 #define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \ 570 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \ 571 __m == memory_order_acq_rel, \ 572 "memory order argument to atomic operation is invalid") 573 574 #define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \ 575 _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \ 576 __f == memory_order_acq_rel, \ 577 "memory order argument to atomic operation is invalid") 578 579 _LIBCPP_BEGIN_NAMESPACE_STD 580 581 typedef enum memory_order 582 { 583 memory_order_relaxed, memory_order_consume, memory_order_acquire, 584 memory_order_release, memory_order_acq_rel, memory_order_seq_cst 585 } memory_order; 586 587 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) 588 namespace __gcc_atomic { 589 template <typename _Tp> 590 struct __gcc_atomic_t { 591 592 #if _GNUC_VER >= 501 593 static_assert(is_trivially_copyable<_Tp>::value, 594 "std::atomic<Tp> requires that 'Tp' be a trivially copyable type"); 595 #endif 596 597 _LIBCPP_INLINE_VISIBILITY 598 #ifndef _LIBCPP_CXX03_LANG 599 __gcc_atomic_t() _NOEXCEPT = default; 600 #else 601 __gcc_atomic_t() _NOEXCEPT : __a_value() {} 602 #endif // _LIBCPP_CXX03_LANG 603 _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT 604 : __a_value(value) {} 605 _Tp __a_value; 606 }; 607 #define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x> 608 609 template <typename _Tp> _Tp __create(); 610 611 template <typename _Tp, typename _Td> 612 typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type 613 __test_atomic_assignable(int); 614 template <typename _Tp, typename _Up> 615 __two __test_atomic_assignable(...); 616 617 template <typename _Tp, typename _Td> 618 struct __can_assign { 619 static const bool value = 620 sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char); 621 }; 622 623 static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) { 624 // Avoid switch statement to make this a constexpr. 625 return __order == memory_order_relaxed ? __ATOMIC_RELAXED: 626 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: 627 (__order == memory_order_release ? __ATOMIC_RELEASE: 628 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: 629 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL: 630 __ATOMIC_CONSUME)))); 631 } 632 633 static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) { 634 // Avoid switch statement to make this a constexpr. 635 return __order == memory_order_relaxed ? __ATOMIC_RELAXED: 636 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: 637 (__order == memory_order_release ? __ATOMIC_RELAXED: 638 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: 639 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE: 640 __ATOMIC_CONSUME)))); 641 } 642 643 } // namespace __gcc_atomic 644 645 template <typename _Tp> 646 static inline 647 typename enable_if< 648 __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type 649 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) { 650 __a->__a_value = __val; 651 } 652 653 template <typename _Tp> 654 static inline 655 typename enable_if< 656 !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value && 657 __gcc_atomic::__can_assign< _Atomic(_Tp)*, _Tp>::value>::type 658 __c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) { 659 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because 660 // the default operator= in an object is not volatile, a byte-by-byte copy 661 // is required. 662 volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value); 663 volatile char* end = to + sizeof(_Tp); 664 char* from = reinterpret_cast<char*>(&__val); 665 while (to != end) { 666 *to++ = *from++; 667 } 668 } 669 670 template <typename _Tp> 671 static inline void __c11_atomic_init(_Atomic(_Tp)* __a, _Tp __val) { 672 __a->__a_value = __val; 673 } 674 675 static inline void __c11_atomic_thread_fence(memory_order __order) { 676 __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order)); 677 } 678 679 static inline void __c11_atomic_signal_fence(memory_order __order) { 680 __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order)); 681 } 682 683 template <typename _Tp> 684 static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a, _Tp __val, 685 memory_order __order) { 686 return __atomic_store(&__a->__a_value, &__val, 687 __gcc_atomic::__to_gcc_order(__order)); 688 } 689 690 template <typename _Tp> 691 static inline void __c11_atomic_store(_Atomic(_Tp)* __a, _Tp __val, 692 memory_order __order) { 693 __atomic_store(&__a->__a_value, &__val, 694 __gcc_atomic::__to_gcc_order(__order)); 695 } 696 697 template <typename _Tp> 698 static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a, 699 memory_order __order) { 700 _Tp __ret; 701 __atomic_load(&__a->__a_value, &__ret, 702 __gcc_atomic::__to_gcc_order(__order)); 703 return __ret; 704 } 705 706 template <typename _Tp> 707 static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) { 708 _Tp __ret; 709 __atomic_load(&__a->__a_value, &__ret, 710 __gcc_atomic::__to_gcc_order(__order)); 711 return __ret; 712 } 713 714 template <typename _Tp> 715 static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a, 716 _Tp __value, memory_order __order) { 717 _Tp __ret; 718 __atomic_exchange(&__a->__a_value, &__value, &__ret, 719 __gcc_atomic::__to_gcc_order(__order)); 720 return __ret; 721 } 722 723 template <typename _Tp> 724 static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value, 725 memory_order __order) { 726 _Tp __ret; 727 __atomic_exchange(&__a->__a_value, &__value, &__ret, 728 __gcc_atomic::__to_gcc_order(__order)); 729 return __ret; 730 } 731 732 template <typename _Tp> 733 static inline bool __c11_atomic_compare_exchange_strong( 734 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, 735 memory_order __success, memory_order __failure) { 736 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 737 false, 738 __gcc_atomic::__to_gcc_order(__success), 739 __gcc_atomic::__to_gcc_failure_order(__failure)); 740 } 741 742 template <typename _Tp> 743 static inline bool __c11_atomic_compare_exchange_strong( 744 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success, 745 memory_order __failure) { 746 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 747 false, 748 __gcc_atomic::__to_gcc_order(__success), 749 __gcc_atomic::__to_gcc_failure_order(__failure)); 750 } 751 752 template <typename _Tp> 753 static inline bool __c11_atomic_compare_exchange_weak( 754 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, 755 memory_order __success, memory_order __failure) { 756 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 757 true, 758 __gcc_atomic::__to_gcc_order(__success), 759 __gcc_atomic::__to_gcc_failure_order(__failure)); 760 } 761 762 template <typename _Tp> 763 static inline bool __c11_atomic_compare_exchange_weak( 764 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success, 765 memory_order __failure) { 766 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 767 true, 768 __gcc_atomic::__to_gcc_order(__success), 769 __gcc_atomic::__to_gcc_failure_order(__failure)); 770 } 771 772 template <typename _Tp> 773 struct __skip_amt { enum {value = 1}; }; 774 775 template <typename _Tp> 776 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; }; 777 778 // FIXME: Haven't figured out what the spec says about using arrays with 779 // atomic_fetch_add. Force a failure rather than creating bad behavior. 780 template <typename _Tp> 781 struct __skip_amt<_Tp[]> { }; 782 template <typename _Tp, int n> 783 struct __skip_amt<_Tp[n]> { }; 784 785 template <typename _Tp, typename _Td> 786 static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a, 787 _Td __delta, memory_order __order) { 788 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 789 __gcc_atomic::__to_gcc_order(__order)); 790 } 791 792 template <typename _Tp, typename _Td> 793 static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta, 794 memory_order __order) { 795 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 796 __gcc_atomic::__to_gcc_order(__order)); 797 } 798 799 template <typename _Tp, typename _Td> 800 static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a, 801 _Td __delta, memory_order __order) { 802 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 803 __gcc_atomic::__to_gcc_order(__order)); 804 } 805 806 template <typename _Tp, typename _Td> 807 static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta, 808 memory_order __order) { 809 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 810 __gcc_atomic::__to_gcc_order(__order)); 811 } 812 813 template <typename _Tp> 814 static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a, 815 _Tp __pattern, memory_order __order) { 816 return __atomic_fetch_and(&__a->__a_value, __pattern, 817 __gcc_atomic::__to_gcc_order(__order)); 818 } 819 820 template <typename _Tp> 821 static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a, 822 _Tp __pattern, memory_order __order) { 823 return __atomic_fetch_and(&__a->__a_value, __pattern, 824 __gcc_atomic::__to_gcc_order(__order)); 825 } 826 827 template <typename _Tp> 828 static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a, 829 _Tp __pattern, memory_order __order) { 830 return __atomic_fetch_or(&__a->__a_value, __pattern, 831 __gcc_atomic::__to_gcc_order(__order)); 832 } 833 834 template <typename _Tp> 835 static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern, 836 memory_order __order) { 837 return __atomic_fetch_or(&__a->__a_value, __pattern, 838 __gcc_atomic::__to_gcc_order(__order)); 839 } 840 841 template <typename _Tp> 842 static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a, 843 _Tp __pattern, memory_order __order) { 844 return __atomic_fetch_xor(&__a->__a_value, __pattern, 845 __gcc_atomic::__to_gcc_order(__order)); 846 } 847 848 template <typename _Tp> 849 static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern, 850 memory_order __order) { 851 return __atomic_fetch_xor(&__a->__a_value, __pattern, 852 __gcc_atomic::__to_gcc_order(__order)); 853 } 854 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP 855 856 template <class _Tp> 857 inline _LIBCPP_INLINE_VISIBILITY 858 _Tp 859 kill_dependency(_Tp __y) _NOEXCEPT 860 { 861 return __y; 862 } 863 864 #define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE 865 #define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE 866 #define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE 867 #define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE 868 #define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE 869 #define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE 870 #define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE 871 #define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE 872 #define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE 873 #define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE 874 875 // general atomic<T> 876 877 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value> 878 struct __atomic_base // false 879 { 880 mutable _Atomic(_Tp) __a_; 881 882 #if defined(__cpp_lib_atomic_is_always_lock_free) 883 static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0); 884 #endif 885 886 _LIBCPP_INLINE_VISIBILITY 887 bool is_lock_free() const volatile _NOEXCEPT 888 { 889 #if defined(_LIBCPP_HAS_C_ATOMIC_IMP) 890 return __c11_atomic_is_lock_free(sizeof(_Tp)); 891 #else 892 return __atomic_is_lock_free(sizeof(_Tp), 0); 893 #endif 894 } 895 _LIBCPP_INLINE_VISIBILITY 896 bool is_lock_free() const _NOEXCEPT 897 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();} 898 _LIBCPP_INLINE_VISIBILITY 899 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 900 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 901 {__c11_atomic_store(&__a_, __d, __m);} 902 _LIBCPP_INLINE_VISIBILITY 903 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT 904 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 905 {__c11_atomic_store(&__a_, __d, __m);} 906 _LIBCPP_INLINE_VISIBILITY 907 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 908 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 909 {return __c11_atomic_load(&__a_, __m);} 910 _LIBCPP_INLINE_VISIBILITY 911 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT 912 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 913 {return __c11_atomic_load(&__a_, __m);} 914 _LIBCPP_INLINE_VISIBILITY 915 operator _Tp() const volatile _NOEXCEPT {return load();} 916 _LIBCPP_INLINE_VISIBILITY 917 operator _Tp() const _NOEXCEPT {return load();} 918 _LIBCPP_INLINE_VISIBILITY 919 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 920 {return __c11_atomic_exchange(&__a_, __d, __m);} 921 _LIBCPP_INLINE_VISIBILITY 922 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT 923 {return __c11_atomic_exchange(&__a_, __d, __m);} 924 _LIBCPP_INLINE_VISIBILITY 925 bool compare_exchange_weak(_Tp& __e, _Tp __d, 926 memory_order __s, memory_order __f) volatile _NOEXCEPT 927 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 928 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} 929 _LIBCPP_INLINE_VISIBILITY 930 bool compare_exchange_weak(_Tp& __e, _Tp __d, 931 memory_order __s, memory_order __f) _NOEXCEPT 932 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 933 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} 934 _LIBCPP_INLINE_VISIBILITY 935 bool compare_exchange_strong(_Tp& __e, _Tp __d, 936 memory_order __s, memory_order __f) volatile _NOEXCEPT 937 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 938 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} 939 _LIBCPP_INLINE_VISIBILITY 940 bool compare_exchange_strong(_Tp& __e, _Tp __d, 941 memory_order __s, memory_order __f) _NOEXCEPT 942 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 943 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} 944 _LIBCPP_INLINE_VISIBILITY 945 bool compare_exchange_weak(_Tp& __e, _Tp __d, 946 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 947 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} 948 _LIBCPP_INLINE_VISIBILITY 949 bool compare_exchange_weak(_Tp& __e, _Tp __d, 950 memory_order __m = memory_order_seq_cst) _NOEXCEPT 951 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} 952 _LIBCPP_INLINE_VISIBILITY 953 bool compare_exchange_strong(_Tp& __e, _Tp __d, 954 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 955 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} 956 _LIBCPP_INLINE_VISIBILITY 957 bool compare_exchange_strong(_Tp& __e, _Tp __d, 958 memory_order __m = memory_order_seq_cst) _NOEXCEPT 959 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} 960 961 _LIBCPP_INLINE_VISIBILITY 962 #ifndef _LIBCPP_CXX03_LANG 963 __atomic_base() _NOEXCEPT = default; 964 #else 965 __atomic_base() _NOEXCEPT : __a_() {} 966 #endif // _LIBCPP_CXX03_LANG 967 968 _LIBCPP_INLINE_VISIBILITY 969 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {} 970 #ifndef _LIBCPP_CXX03_LANG 971 __atomic_base(const __atomic_base&) = delete; 972 __atomic_base& operator=(const __atomic_base&) = delete; 973 __atomic_base& operator=(const __atomic_base&) volatile = delete; 974 #else 975 private: 976 __atomic_base(const __atomic_base&); 977 __atomic_base& operator=(const __atomic_base&); 978 __atomic_base& operator=(const __atomic_base&) volatile; 979 #endif 980 }; 981 982 #if defined(__cpp_lib_atomic_is_always_lock_free) 983 template <class _Tp, bool __b> 984 _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free; 985 #endif 986 987 // atomic<Integral> 988 989 template <class _Tp> 990 struct __atomic_base<_Tp, true> 991 : public __atomic_base<_Tp, false> 992 { 993 typedef __atomic_base<_Tp, false> __base; 994 _LIBCPP_INLINE_VISIBILITY 995 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT 996 _LIBCPP_INLINE_VISIBILITY 997 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {} 998 999 _LIBCPP_INLINE_VISIBILITY 1000 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1001 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} 1002 _LIBCPP_INLINE_VISIBILITY 1003 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1004 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} 1005 _LIBCPP_INLINE_VISIBILITY 1006 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1007 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} 1008 _LIBCPP_INLINE_VISIBILITY 1009 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1010 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} 1011 _LIBCPP_INLINE_VISIBILITY 1012 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1013 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);} 1014 _LIBCPP_INLINE_VISIBILITY 1015 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1016 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);} 1017 _LIBCPP_INLINE_VISIBILITY 1018 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1019 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);} 1020 _LIBCPP_INLINE_VISIBILITY 1021 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1022 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);} 1023 _LIBCPP_INLINE_VISIBILITY 1024 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1025 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);} 1026 _LIBCPP_INLINE_VISIBILITY 1027 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1028 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);} 1029 1030 _LIBCPP_INLINE_VISIBILITY 1031 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));} 1032 _LIBCPP_INLINE_VISIBILITY 1033 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));} 1034 _LIBCPP_INLINE_VISIBILITY 1035 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));} 1036 _LIBCPP_INLINE_VISIBILITY 1037 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));} 1038 _LIBCPP_INLINE_VISIBILITY 1039 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} 1040 _LIBCPP_INLINE_VISIBILITY 1041 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} 1042 _LIBCPP_INLINE_VISIBILITY 1043 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} 1044 _LIBCPP_INLINE_VISIBILITY 1045 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} 1046 _LIBCPP_INLINE_VISIBILITY 1047 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} 1048 _LIBCPP_INLINE_VISIBILITY 1049 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;} 1050 _LIBCPP_INLINE_VISIBILITY 1051 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} 1052 _LIBCPP_INLINE_VISIBILITY 1053 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;} 1054 _LIBCPP_INLINE_VISIBILITY 1055 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;} 1056 _LIBCPP_INLINE_VISIBILITY 1057 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;} 1058 _LIBCPP_INLINE_VISIBILITY 1059 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;} 1060 _LIBCPP_INLINE_VISIBILITY 1061 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;} 1062 _LIBCPP_INLINE_VISIBILITY 1063 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;} 1064 _LIBCPP_INLINE_VISIBILITY 1065 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;} 1066 }; 1067 1068 // atomic<T> 1069 1070 template <class _Tp> 1071 struct atomic 1072 : public __atomic_base<_Tp> 1073 { 1074 typedef __atomic_base<_Tp> __base; 1075 _LIBCPP_INLINE_VISIBILITY 1076 atomic() _NOEXCEPT _LIBCPP_DEFAULT 1077 _LIBCPP_INLINE_VISIBILITY 1078 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {} 1079 1080 _LIBCPP_INLINE_VISIBILITY 1081 _Tp operator=(_Tp __d) volatile _NOEXCEPT 1082 {__base::store(__d); return __d;} 1083 _LIBCPP_INLINE_VISIBILITY 1084 _Tp operator=(_Tp __d) _NOEXCEPT 1085 {__base::store(__d); return __d;} 1086 }; 1087 1088 // atomic<T*> 1089 1090 template <class _Tp> 1091 struct atomic<_Tp*> 1092 : public __atomic_base<_Tp*> 1093 { 1094 typedef __atomic_base<_Tp*> __base; 1095 _LIBCPP_INLINE_VISIBILITY 1096 atomic() _NOEXCEPT _LIBCPP_DEFAULT 1097 _LIBCPP_INLINE_VISIBILITY 1098 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {} 1099 1100 _LIBCPP_INLINE_VISIBILITY 1101 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT 1102 {__base::store(__d); return __d;} 1103 _LIBCPP_INLINE_VISIBILITY 1104 _Tp* operator=(_Tp* __d) _NOEXCEPT 1105 {__base::store(__d); return __d;} 1106 1107 _LIBCPP_INLINE_VISIBILITY 1108 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) 1109 volatile _NOEXCEPT 1110 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} 1111 _LIBCPP_INLINE_VISIBILITY 1112 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1113 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);} 1114 _LIBCPP_INLINE_VISIBILITY 1115 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) 1116 volatile _NOEXCEPT 1117 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} 1118 _LIBCPP_INLINE_VISIBILITY 1119 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1120 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);} 1121 1122 _LIBCPP_INLINE_VISIBILITY 1123 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);} 1124 _LIBCPP_INLINE_VISIBILITY 1125 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);} 1126 _LIBCPP_INLINE_VISIBILITY 1127 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);} 1128 _LIBCPP_INLINE_VISIBILITY 1129 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);} 1130 _LIBCPP_INLINE_VISIBILITY 1131 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;} 1132 _LIBCPP_INLINE_VISIBILITY 1133 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;} 1134 _LIBCPP_INLINE_VISIBILITY 1135 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;} 1136 _LIBCPP_INLINE_VISIBILITY 1137 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;} 1138 _LIBCPP_INLINE_VISIBILITY 1139 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} 1140 _LIBCPP_INLINE_VISIBILITY 1141 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;} 1142 _LIBCPP_INLINE_VISIBILITY 1143 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} 1144 _LIBCPP_INLINE_VISIBILITY 1145 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;} 1146 }; 1147 1148 // atomic_is_lock_free 1149 1150 template <class _Tp> 1151 inline _LIBCPP_INLINE_VISIBILITY 1152 bool 1153 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT 1154 { 1155 return __o->is_lock_free(); 1156 } 1157 1158 template <class _Tp> 1159 inline _LIBCPP_INLINE_VISIBILITY 1160 bool 1161 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT 1162 { 1163 return __o->is_lock_free(); 1164 } 1165 1166 // atomic_init 1167 1168 template <class _Tp> 1169 inline _LIBCPP_INLINE_VISIBILITY 1170 void 1171 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT 1172 { 1173 __c11_atomic_init(&__o->__a_, __d); 1174 } 1175 1176 template <class _Tp> 1177 inline _LIBCPP_INLINE_VISIBILITY 1178 void 1179 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT 1180 { 1181 __c11_atomic_init(&__o->__a_, __d); 1182 } 1183 1184 // atomic_store 1185 1186 template <class _Tp> 1187 inline _LIBCPP_INLINE_VISIBILITY 1188 void 1189 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT 1190 { 1191 __o->store(__d); 1192 } 1193 1194 template <class _Tp> 1195 inline _LIBCPP_INLINE_VISIBILITY 1196 void 1197 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT 1198 { 1199 __o->store(__d); 1200 } 1201 1202 // atomic_store_explicit 1203 1204 template <class _Tp> 1205 inline _LIBCPP_INLINE_VISIBILITY 1206 void 1207 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT 1208 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1209 { 1210 __o->store(__d, __m); 1211 } 1212 1213 template <class _Tp> 1214 inline _LIBCPP_INLINE_VISIBILITY 1215 void 1216 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT 1217 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1218 { 1219 __o->store(__d, __m); 1220 } 1221 1222 // atomic_load 1223 1224 template <class _Tp> 1225 inline _LIBCPP_INLINE_VISIBILITY 1226 _Tp 1227 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT 1228 { 1229 return __o->load(); 1230 } 1231 1232 template <class _Tp> 1233 inline _LIBCPP_INLINE_VISIBILITY 1234 _Tp 1235 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT 1236 { 1237 return __o->load(); 1238 } 1239 1240 // atomic_load_explicit 1241 1242 template <class _Tp> 1243 inline _LIBCPP_INLINE_VISIBILITY 1244 _Tp 1245 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 1246 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1247 { 1248 return __o->load(__m); 1249 } 1250 1251 template <class _Tp> 1252 inline _LIBCPP_INLINE_VISIBILITY 1253 _Tp 1254 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 1255 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1256 { 1257 return __o->load(__m); 1258 } 1259 1260 // atomic_exchange 1261 1262 template <class _Tp> 1263 inline _LIBCPP_INLINE_VISIBILITY 1264 _Tp 1265 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT 1266 { 1267 return __o->exchange(__d); 1268 } 1269 1270 template <class _Tp> 1271 inline _LIBCPP_INLINE_VISIBILITY 1272 _Tp 1273 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT 1274 { 1275 return __o->exchange(__d); 1276 } 1277 1278 // atomic_exchange_explicit 1279 1280 template <class _Tp> 1281 inline _LIBCPP_INLINE_VISIBILITY 1282 _Tp 1283 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT 1284 { 1285 return __o->exchange(__d, __m); 1286 } 1287 1288 template <class _Tp> 1289 inline _LIBCPP_INLINE_VISIBILITY 1290 _Tp 1291 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT 1292 { 1293 return __o->exchange(__d, __m); 1294 } 1295 1296 // atomic_compare_exchange_weak 1297 1298 template <class _Tp> 1299 inline _LIBCPP_INLINE_VISIBILITY 1300 bool 1301 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT 1302 { 1303 return __o->compare_exchange_weak(*__e, __d); 1304 } 1305 1306 template <class _Tp> 1307 inline _LIBCPP_INLINE_VISIBILITY 1308 bool 1309 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT 1310 { 1311 return __o->compare_exchange_weak(*__e, __d); 1312 } 1313 1314 // atomic_compare_exchange_strong 1315 1316 template <class _Tp> 1317 inline _LIBCPP_INLINE_VISIBILITY 1318 bool 1319 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT 1320 { 1321 return __o->compare_exchange_strong(*__e, __d); 1322 } 1323 1324 template <class _Tp> 1325 inline _LIBCPP_INLINE_VISIBILITY 1326 bool 1327 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT 1328 { 1329 return __o->compare_exchange_strong(*__e, __d); 1330 } 1331 1332 // atomic_compare_exchange_weak_explicit 1333 1334 template <class _Tp> 1335 inline _LIBCPP_INLINE_VISIBILITY 1336 bool 1337 atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e, 1338 _Tp __d, 1339 memory_order __s, memory_order __f) _NOEXCEPT 1340 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1341 { 1342 return __o->compare_exchange_weak(*__e, __d, __s, __f); 1343 } 1344 1345 template <class _Tp> 1346 inline _LIBCPP_INLINE_VISIBILITY 1347 bool 1348 atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d, 1349 memory_order __s, memory_order __f) _NOEXCEPT 1350 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1351 { 1352 return __o->compare_exchange_weak(*__e, __d, __s, __f); 1353 } 1354 1355 // atomic_compare_exchange_strong_explicit 1356 1357 template <class _Tp> 1358 inline _LIBCPP_INLINE_VISIBILITY 1359 bool 1360 atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o, 1361 _Tp* __e, _Tp __d, 1362 memory_order __s, memory_order __f) _NOEXCEPT 1363 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1364 { 1365 return __o->compare_exchange_strong(*__e, __d, __s, __f); 1366 } 1367 1368 template <class _Tp> 1369 inline _LIBCPP_INLINE_VISIBILITY 1370 bool 1371 atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e, 1372 _Tp __d, 1373 memory_order __s, memory_order __f) _NOEXCEPT 1374 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1375 { 1376 return __o->compare_exchange_strong(*__e, __d, __s, __f); 1377 } 1378 1379 // atomic_fetch_add 1380 1381 template <class _Tp> 1382 inline _LIBCPP_INLINE_VISIBILITY 1383 typename enable_if 1384 < 1385 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1386 _Tp 1387 >::type 1388 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1389 { 1390 return __o->fetch_add(__op); 1391 } 1392 1393 template <class _Tp> 1394 inline _LIBCPP_INLINE_VISIBILITY 1395 typename enable_if 1396 < 1397 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1398 _Tp 1399 >::type 1400 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1401 { 1402 return __o->fetch_add(__op); 1403 } 1404 1405 template <class _Tp> 1406 inline _LIBCPP_INLINE_VISIBILITY 1407 _Tp* 1408 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT 1409 { 1410 return __o->fetch_add(__op); 1411 } 1412 1413 template <class _Tp> 1414 inline _LIBCPP_INLINE_VISIBILITY 1415 _Tp* 1416 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT 1417 { 1418 return __o->fetch_add(__op); 1419 } 1420 1421 // atomic_fetch_add_explicit 1422 1423 template <class _Tp> 1424 inline _LIBCPP_INLINE_VISIBILITY 1425 typename enable_if 1426 < 1427 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1428 _Tp 1429 >::type 1430 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1431 { 1432 return __o->fetch_add(__op, __m); 1433 } 1434 1435 template <class _Tp> 1436 inline _LIBCPP_INLINE_VISIBILITY 1437 typename enable_if 1438 < 1439 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1440 _Tp 1441 >::type 1442 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1443 { 1444 return __o->fetch_add(__op, __m); 1445 } 1446 1447 template <class _Tp> 1448 inline _LIBCPP_INLINE_VISIBILITY 1449 _Tp* 1450 atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op, 1451 memory_order __m) _NOEXCEPT 1452 { 1453 return __o->fetch_add(__op, __m); 1454 } 1455 1456 template <class _Tp> 1457 inline _LIBCPP_INLINE_VISIBILITY 1458 _Tp* 1459 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT 1460 { 1461 return __o->fetch_add(__op, __m); 1462 } 1463 1464 // atomic_fetch_sub 1465 1466 template <class _Tp> 1467 inline _LIBCPP_INLINE_VISIBILITY 1468 typename enable_if 1469 < 1470 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1471 _Tp 1472 >::type 1473 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1474 { 1475 return __o->fetch_sub(__op); 1476 } 1477 1478 template <class _Tp> 1479 inline _LIBCPP_INLINE_VISIBILITY 1480 typename enable_if 1481 < 1482 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1483 _Tp 1484 >::type 1485 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1486 { 1487 return __o->fetch_sub(__op); 1488 } 1489 1490 template <class _Tp> 1491 inline _LIBCPP_INLINE_VISIBILITY 1492 _Tp* 1493 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT 1494 { 1495 return __o->fetch_sub(__op); 1496 } 1497 1498 template <class _Tp> 1499 inline _LIBCPP_INLINE_VISIBILITY 1500 _Tp* 1501 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT 1502 { 1503 return __o->fetch_sub(__op); 1504 } 1505 1506 // atomic_fetch_sub_explicit 1507 1508 template <class _Tp> 1509 inline _LIBCPP_INLINE_VISIBILITY 1510 typename enable_if 1511 < 1512 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1513 _Tp 1514 >::type 1515 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1516 { 1517 return __o->fetch_sub(__op, __m); 1518 } 1519 1520 template <class _Tp> 1521 inline _LIBCPP_INLINE_VISIBILITY 1522 typename enable_if 1523 < 1524 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1525 _Tp 1526 >::type 1527 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1528 { 1529 return __o->fetch_sub(__op, __m); 1530 } 1531 1532 template <class _Tp> 1533 inline _LIBCPP_INLINE_VISIBILITY 1534 _Tp* 1535 atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op, 1536 memory_order __m) _NOEXCEPT 1537 { 1538 return __o->fetch_sub(__op, __m); 1539 } 1540 1541 template <class _Tp> 1542 inline _LIBCPP_INLINE_VISIBILITY 1543 _Tp* 1544 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT 1545 { 1546 return __o->fetch_sub(__op, __m); 1547 } 1548 1549 // atomic_fetch_and 1550 1551 template <class _Tp> 1552 inline _LIBCPP_INLINE_VISIBILITY 1553 typename enable_if 1554 < 1555 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1556 _Tp 1557 >::type 1558 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1559 { 1560 return __o->fetch_and(__op); 1561 } 1562 1563 template <class _Tp> 1564 inline _LIBCPP_INLINE_VISIBILITY 1565 typename enable_if 1566 < 1567 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1568 _Tp 1569 >::type 1570 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1571 { 1572 return __o->fetch_and(__op); 1573 } 1574 1575 // atomic_fetch_and_explicit 1576 1577 template <class _Tp> 1578 inline _LIBCPP_INLINE_VISIBILITY 1579 typename enable_if 1580 < 1581 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1582 _Tp 1583 >::type 1584 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1585 { 1586 return __o->fetch_and(__op, __m); 1587 } 1588 1589 template <class _Tp> 1590 inline _LIBCPP_INLINE_VISIBILITY 1591 typename enable_if 1592 < 1593 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1594 _Tp 1595 >::type 1596 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1597 { 1598 return __o->fetch_and(__op, __m); 1599 } 1600 1601 // atomic_fetch_or 1602 1603 template <class _Tp> 1604 inline _LIBCPP_INLINE_VISIBILITY 1605 typename enable_if 1606 < 1607 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1608 _Tp 1609 >::type 1610 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1611 { 1612 return __o->fetch_or(__op); 1613 } 1614 1615 template <class _Tp> 1616 inline _LIBCPP_INLINE_VISIBILITY 1617 typename enable_if 1618 < 1619 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1620 _Tp 1621 >::type 1622 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1623 { 1624 return __o->fetch_or(__op); 1625 } 1626 1627 // atomic_fetch_or_explicit 1628 1629 template <class _Tp> 1630 inline _LIBCPP_INLINE_VISIBILITY 1631 typename enable_if 1632 < 1633 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1634 _Tp 1635 >::type 1636 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1637 { 1638 return __o->fetch_or(__op, __m); 1639 } 1640 1641 template <class _Tp> 1642 inline _LIBCPP_INLINE_VISIBILITY 1643 typename enable_if 1644 < 1645 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1646 _Tp 1647 >::type 1648 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1649 { 1650 return __o->fetch_or(__op, __m); 1651 } 1652 1653 // atomic_fetch_xor 1654 1655 template <class _Tp> 1656 inline _LIBCPP_INLINE_VISIBILITY 1657 typename enable_if 1658 < 1659 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1660 _Tp 1661 >::type 1662 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1663 { 1664 return __o->fetch_xor(__op); 1665 } 1666 1667 template <class _Tp> 1668 inline _LIBCPP_INLINE_VISIBILITY 1669 typename enable_if 1670 < 1671 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1672 _Tp 1673 >::type 1674 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1675 { 1676 return __o->fetch_xor(__op); 1677 } 1678 1679 // atomic_fetch_xor_explicit 1680 1681 template <class _Tp> 1682 inline _LIBCPP_INLINE_VISIBILITY 1683 typename enable_if 1684 < 1685 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1686 _Tp 1687 >::type 1688 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1689 { 1690 return __o->fetch_xor(__op, __m); 1691 } 1692 1693 template <class _Tp> 1694 inline _LIBCPP_INLINE_VISIBILITY 1695 typename enable_if 1696 < 1697 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 1698 _Tp 1699 >::type 1700 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT 1701 { 1702 return __o->fetch_xor(__op, __m); 1703 } 1704 1705 // flag type and operations 1706 1707 typedef struct atomic_flag 1708 { 1709 _Atomic(bool) __a_; 1710 1711 _LIBCPP_INLINE_VISIBILITY 1712 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1713 {return __c11_atomic_exchange(&__a_, true, __m);} 1714 _LIBCPP_INLINE_VISIBILITY 1715 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT 1716 {return __c11_atomic_exchange(&__a_, true, __m);} 1717 _LIBCPP_INLINE_VISIBILITY 1718 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1719 {__c11_atomic_store(&__a_, false, __m);} 1720 _LIBCPP_INLINE_VISIBILITY 1721 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT 1722 {__c11_atomic_store(&__a_, false, __m);} 1723 1724 _LIBCPP_INLINE_VISIBILITY 1725 #ifndef _LIBCPP_CXX03_LANG 1726 atomic_flag() _NOEXCEPT = default; 1727 #else 1728 atomic_flag() _NOEXCEPT : __a_() {} 1729 #endif // _LIBCPP_CXX03_LANG 1730 1731 _LIBCPP_INLINE_VISIBILITY 1732 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION 1733 1734 #ifndef _LIBCPP_CXX03_LANG 1735 atomic_flag(const atomic_flag&) = delete; 1736 atomic_flag& operator=(const atomic_flag&) = delete; 1737 atomic_flag& operator=(const atomic_flag&) volatile = delete; 1738 #else 1739 private: 1740 atomic_flag(const atomic_flag&); 1741 atomic_flag& operator=(const atomic_flag&); 1742 atomic_flag& operator=(const atomic_flag&) volatile; 1743 #endif 1744 } atomic_flag; 1745 1746 inline _LIBCPP_INLINE_VISIBILITY 1747 bool 1748 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT 1749 { 1750 return __o->test_and_set(); 1751 } 1752 1753 inline _LIBCPP_INLINE_VISIBILITY 1754 bool 1755 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT 1756 { 1757 return __o->test_and_set(); 1758 } 1759 1760 inline _LIBCPP_INLINE_VISIBILITY 1761 bool 1762 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 1763 { 1764 return __o->test_and_set(__m); 1765 } 1766 1767 inline _LIBCPP_INLINE_VISIBILITY 1768 bool 1769 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT 1770 { 1771 return __o->test_and_set(__m); 1772 } 1773 1774 inline _LIBCPP_INLINE_VISIBILITY 1775 void 1776 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT 1777 { 1778 __o->clear(); 1779 } 1780 1781 inline _LIBCPP_INLINE_VISIBILITY 1782 void 1783 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT 1784 { 1785 __o->clear(); 1786 } 1787 1788 inline _LIBCPP_INLINE_VISIBILITY 1789 void 1790 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 1791 { 1792 __o->clear(__m); 1793 } 1794 1795 inline _LIBCPP_INLINE_VISIBILITY 1796 void 1797 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT 1798 { 1799 __o->clear(__m); 1800 } 1801 1802 // fences 1803 1804 inline _LIBCPP_INLINE_VISIBILITY 1805 void 1806 atomic_thread_fence(memory_order __m) _NOEXCEPT 1807 { 1808 __c11_atomic_thread_fence(__m); 1809 } 1810 1811 inline _LIBCPP_INLINE_VISIBILITY 1812 void 1813 atomic_signal_fence(memory_order __m) _NOEXCEPT 1814 { 1815 __c11_atomic_signal_fence(__m); 1816 } 1817 1818 // Atomics for standard typedef types 1819 1820 typedef atomic<bool> atomic_bool; 1821 typedef atomic<char> atomic_char; 1822 typedef atomic<signed char> atomic_schar; 1823 typedef atomic<unsigned char> atomic_uchar; 1824 typedef atomic<short> atomic_short; 1825 typedef atomic<unsigned short> atomic_ushort; 1826 typedef atomic<int> atomic_int; 1827 typedef atomic<unsigned int> atomic_uint; 1828 typedef atomic<long> atomic_long; 1829 typedef atomic<unsigned long> atomic_ulong; 1830 typedef atomic<long long> atomic_llong; 1831 typedef atomic<unsigned long long> atomic_ullong; 1832 typedef atomic<char16_t> atomic_char16_t; 1833 typedef atomic<char32_t> atomic_char32_t; 1834 typedef atomic<wchar_t> atomic_wchar_t; 1835 1836 typedef atomic<int_least8_t> atomic_int_least8_t; 1837 typedef atomic<uint_least8_t> atomic_uint_least8_t; 1838 typedef atomic<int_least16_t> atomic_int_least16_t; 1839 typedef atomic<uint_least16_t> atomic_uint_least16_t; 1840 typedef atomic<int_least32_t> atomic_int_least32_t; 1841 typedef atomic<uint_least32_t> atomic_uint_least32_t; 1842 typedef atomic<int_least64_t> atomic_int_least64_t; 1843 typedef atomic<uint_least64_t> atomic_uint_least64_t; 1844 1845 typedef atomic<int_fast8_t> atomic_int_fast8_t; 1846 typedef atomic<uint_fast8_t> atomic_uint_fast8_t; 1847 typedef atomic<int_fast16_t> atomic_int_fast16_t; 1848 typedef atomic<uint_fast16_t> atomic_uint_fast16_t; 1849 typedef atomic<int_fast32_t> atomic_int_fast32_t; 1850 typedef atomic<uint_fast32_t> atomic_uint_fast32_t; 1851 typedef atomic<int_fast64_t> atomic_int_fast64_t; 1852 typedef atomic<uint_fast64_t> atomic_uint_fast64_t; 1853 1854 typedef atomic< int8_t> atomic_int8_t; 1855 typedef atomic<uint8_t> atomic_uint8_t; 1856 typedef atomic< int16_t> atomic_int16_t; 1857 typedef atomic<uint16_t> atomic_uint16_t; 1858 typedef atomic< int32_t> atomic_int32_t; 1859 typedef atomic<uint32_t> atomic_uint32_t; 1860 typedef atomic< int64_t> atomic_int64_t; 1861 typedef atomic<uint64_t> atomic_uint64_t; 1862 1863 typedef atomic<intptr_t> atomic_intptr_t; 1864 typedef atomic<uintptr_t> atomic_uintptr_t; 1865 typedef atomic<size_t> atomic_size_t; 1866 typedef atomic<ptrdiff_t> atomic_ptrdiff_t; 1867 typedef atomic<intmax_t> atomic_intmax_t; 1868 typedef atomic<uintmax_t> atomic_uintmax_t; 1869 1870 #define ATOMIC_FLAG_INIT {false} 1871 #define ATOMIC_VAR_INIT(__v) {__v} 1872 1873 _LIBCPP_END_NAMESPACE_STD 1874 1875 #endif // _LIBCPP_ATOMIC 1876