1 // -*- C++ -*- header. 2 3 // Copyright (C) 2008-2013 Free Software Foundation, Inc. 4 // 5 // This file is part of the GNU ISO C++ Library. This library is free 6 // software; you can redistribute it and/or modify it under the 7 // terms of the GNU General Public License as published by the 8 // Free Software Foundation; either version 3, or (at your option) 9 // any later version. 10 11 // This library is distributed in the hope that it will be useful, 12 // but WITHOUT ANY WARRANTY; without even the implied warranty of 13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 // GNU General Public License for more details. 15 16 // Under Section 7 of GPL version 3, you are granted additional 17 // permissions described in the GCC Runtime Library Exception, version 18 // 3.1, as published by the Free Software Foundation. 19 20 // You should have received a copy of the GNU General Public License and 21 // a copy of the GCC Runtime Library Exception along with this program; 22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 23 // <http://www.gnu.org/licenses/>. 24 25 /** @file include/atomic 26 * This is a Standard C++ Library header. 27 */ 28 29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl. 30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html 31 32 #ifndef _GLIBCXX_ATOMIC 33 #define _GLIBCXX_ATOMIC 1 34 35 #pragma GCC system_header 36 37 #if __cplusplus < 201103L 38 # include <bits/c++0x_warning.h> 39 #endif 40 41 #include <bits/atomic_base.h> 42 43 namespace std _GLIBCXX_VISIBILITY(default) 44 { 45 _GLIBCXX_BEGIN_NAMESPACE_VERSION 46 47 /** 48 * @addtogroup atomics 49 * @{ 50 */ 51 52 /// atomic_bool 53 // NB: No operators or fetch-operations for this type. 54 struct atomic_bool 55 { 56 private: 57 __atomic_base<bool> _M_base; 58 59 public: 60 atomic_bool() noexcept = default; 61 ~atomic_bool() noexcept = default; 62 atomic_bool(const atomic_bool&) = delete; 63 atomic_bool& operator=(const atomic_bool&) = delete; 64 atomic_bool& operator=(const atomic_bool&) volatile = delete; 65 66 constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { } 67 68 bool 69 operator=(bool __i) noexcept 70 { return _M_base.operator=(__i); } 71 72 bool 73 operator=(bool __i) volatile noexcept 74 { return _M_base.operator=(__i); } 75 76 operator bool() const noexcept 77 { return _M_base.load(); } 78 79 operator bool() const volatile noexcept 80 { return _M_base.load(); } 81 82 bool 83 is_lock_free() const noexcept { return _M_base.is_lock_free(); } 84 85 bool 86 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); } 87 88 void 89 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept 90 { _M_base.store(__i, __m); } 91 92 void 93 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept 94 { _M_base.store(__i, __m); } 95 96 bool 97 load(memory_order __m = memory_order_seq_cst) const noexcept 98 { return _M_base.load(__m); } 99 100 bool 101 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 102 { return _M_base.load(__m); } 103 104 bool 105 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept 106 { return _M_base.exchange(__i, __m); } 107 108 bool 109 exchange(bool __i, 110 memory_order __m = memory_order_seq_cst) volatile noexcept 111 { return _M_base.exchange(__i, __m); } 112 113 bool 114 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 115 memory_order __m2) noexcept 116 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 117 118 bool 119 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 120 memory_order __m2) volatile noexcept 121 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 122 123 bool 124 compare_exchange_weak(bool& __i1, bool __i2, 125 memory_order __m = memory_order_seq_cst) noexcept 126 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 127 128 bool 129 compare_exchange_weak(bool& __i1, bool __i2, 130 memory_order __m = memory_order_seq_cst) volatile noexcept 131 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 132 133 bool 134 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 135 memory_order __m2) noexcept 136 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 137 138 bool 139 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 140 memory_order __m2) volatile noexcept 141 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 142 143 bool 144 compare_exchange_strong(bool& __i1, bool __i2, 145 memory_order __m = memory_order_seq_cst) noexcept 146 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 147 148 bool 149 compare_exchange_strong(bool& __i1, bool __i2, 150 memory_order __m = memory_order_seq_cst) volatile noexcept 151 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 152 }; 153 154 155 /** 156 * @brief Generic atomic type, primary class template. 157 * 158 * @tparam _Tp Type to be made atomic, must be trivally copyable. 159 */ 160 template<typename _Tp> 161 struct atomic 162 { 163 private: 164 _Tp _M_i; 165 166 public: 167 atomic() noexcept = default; 168 ~atomic() noexcept = default; 169 atomic(const atomic&) = delete; 170 atomic& operator=(const atomic&) = delete; 171 atomic& operator=(const atomic&) volatile = delete; 172 173 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { } 174 175 operator _Tp() const noexcept 176 { return load(); } 177 178 operator _Tp() const volatile noexcept 179 { return load(); } 180 181 _Tp 182 operator=(_Tp __i) noexcept 183 { store(__i); return __i; } 184 185 _Tp 186 operator=(_Tp __i) volatile noexcept 187 { store(__i); return __i; } 188 189 bool 190 is_lock_free() const noexcept 191 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); } 192 193 bool 194 is_lock_free() const volatile noexcept 195 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); } 196 197 void 198 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept 199 { __atomic_store(&_M_i, &__i, _m); } 200 201 void 202 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept 203 { __atomic_store(&_M_i, &__i, _m); } 204 205 _Tp 206 load(memory_order _m = memory_order_seq_cst) const noexcept 207 { 208 _Tp tmp; 209 __atomic_load(&_M_i, &tmp, _m); 210 return tmp; 211 } 212 213 _Tp 214 load(memory_order _m = memory_order_seq_cst) const volatile noexcept 215 { 216 _Tp tmp; 217 __atomic_load(&_M_i, &tmp, _m); 218 return tmp; 219 } 220 221 _Tp 222 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept 223 { 224 _Tp tmp; 225 __atomic_exchange(&_M_i, &__i, &tmp, _m); 226 return tmp; 227 } 228 229 _Tp 230 exchange(_Tp __i, 231 memory_order _m = memory_order_seq_cst) volatile noexcept 232 { 233 _Tp tmp; 234 __atomic_exchange(&_M_i, &__i, &tmp, _m); 235 return tmp; 236 } 237 238 bool 239 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 240 memory_order __f) noexcept 241 { 242 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 243 } 244 245 bool 246 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 247 memory_order __f) volatile noexcept 248 { 249 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 250 } 251 252 bool 253 compare_exchange_weak(_Tp& __e, _Tp __i, 254 memory_order __m = memory_order_seq_cst) noexcept 255 { return compare_exchange_weak(__e, __i, __m, __m); } 256 257 bool 258 compare_exchange_weak(_Tp& __e, _Tp __i, 259 memory_order __m = memory_order_seq_cst) volatile noexcept 260 { return compare_exchange_weak(__e, __i, __m, __m); } 261 262 bool 263 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 264 memory_order __f) noexcept 265 { 266 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 267 } 268 269 bool 270 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 271 memory_order __f) volatile noexcept 272 { 273 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 274 } 275 276 bool 277 compare_exchange_strong(_Tp& __e, _Tp __i, 278 memory_order __m = memory_order_seq_cst) noexcept 279 { return compare_exchange_strong(__e, __i, __m, __m); } 280 281 bool 282 compare_exchange_strong(_Tp& __e, _Tp __i, 283 memory_order __m = memory_order_seq_cst) volatile noexcept 284 { return compare_exchange_strong(__e, __i, __m, __m); } 285 }; 286 287 288 /// Partial specialization for pointer types. 289 template<typename _Tp> 290 struct atomic<_Tp*> 291 { 292 typedef _Tp* __pointer_type; 293 typedef __atomic_base<_Tp*> __base_type; 294 __base_type _M_b; 295 296 atomic() noexcept = default; 297 ~atomic() noexcept = default; 298 atomic(const atomic&) = delete; 299 atomic& operator=(const atomic&) = delete; 300 atomic& operator=(const atomic&) volatile = delete; 301 302 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { } 303 304 operator __pointer_type() const noexcept 305 { return __pointer_type(_M_b); } 306 307 operator __pointer_type() const volatile noexcept 308 { return __pointer_type(_M_b); } 309 310 __pointer_type 311 operator=(__pointer_type __p) noexcept 312 { return _M_b.operator=(__p); } 313 314 __pointer_type 315 operator=(__pointer_type __p) volatile noexcept 316 { return _M_b.operator=(__p); } 317 318 __pointer_type 319 operator++(int) noexcept 320 { return _M_b++; } 321 322 __pointer_type 323 operator++(int) volatile noexcept 324 { return _M_b++; } 325 326 __pointer_type 327 operator--(int) noexcept 328 { return _M_b--; } 329 330 __pointer_type 331 operator--(int) volatile noexcept 332 { return _M_b--; } 333 334 __pointer_type 335 operator++() noexcept 336 { return ++_M_b; } 337 338 __pointer_type 339 operator++() volatile noexcept 340 { return ++_M_b; } 341 342 __pointer_type 343 operator--() noexcept 344 { return --_M_b; } 345 346 __pointer_type 347 operator--() volatile noexcept 348 { return --_M_b; } 349 350 __pointer_type 351 operator+=(ptrdiff_t __d) noexcept 352 { return _M_b.operator+=(__d); } 353 354 __pointer_type 355 operator+=(ptrdiff_t __d) volatile noexcept 356 { return _M_b.operator+=(__d); } 357 358 __pointer_type 359 operator-=(ptrdiff_t __d) noexcept 360 { return _M_b.operator-=(__d); } 361 362 __pointer_type 363 operator-=(ptrdiff_t __d) volatile noexcept 364 { return _M_b.operator-=(__d); } 365 366 bool 367 is_lock_free() const noexcept 368 { return _M_b.is_lock_free(); } 369 370 bool 371 is_lock_free() const volatile noexcept 372 { return _M_b.is_lock_free(); } 373 374 void 375 store(__pointer_type __p, 376 memory_order __m = memory_order_seq_cst) noexcept 377 { return _M_b.store(__p, __m); } 378 379 void 380 store(__pointer_type __p, 381 memory_order __m = memory_order_seq_cst) volatile noexcept 382 { return _M_b.store(__p, __m); } 383 384 __pointer_type 385 load(memory_order __m = memory_order_seq_cst) const noexcept 386 { return _M_b.load(__m); } 387 388 __pointer_type 389 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 390 { return _M_b.load(__m); } 391 392 __pointer_type 393 exchange(__pointer_type __p, 394 memory_order __m = memory_order_seq_cst) noexcept 395 { return _M_b.exchange(__p, __m); } 396 397 __pointer_type 398 exchange(__pointer_type __p, 399 memory_order __m = memory_order_seq_cst) volatile noexcept 400 { return _M_b.exchange(__p, __m); } 401 402 bool 403 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 404 memory_order __m1, memory_order __m2) noexcept 405 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 406 407 bool 408 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 409 memory_order __m1, 410 memory_order __m2) volatile noexcept 411 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 412 413 bool 414 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 415 memory_order __m = memory_order_seq_cst) noexcept 416 { 417 return compare_exchange_weak(__p1, __p2, __m, 418 __cmpexch_failure_order(__m)); 419 } 420 421 bool 422 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 423 memory_order __m = memory_order_seq_cst) volatile noexcept 424 { 425 return compare_exchange_weak(__p1, __p2, __m, 426 __cmpexch_failure_order(__m)); 427 } 428 429 bool 430 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 431 memory_order __m1, memory_order __m2) noexcept 432 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 433 434 bool 435 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 436 memory_order __m1, 437 memory_order __m2) volatile noexcept 438 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 439 440 bool 441 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 442 memory_order __m = memory_order_seq_cst) noexcept 443 { 444 return _M_b.compare_exchange_strong(__p1, __p2, __m, 445 __cmpexch_failure_order(__m)); 446 } 447 448 bool 449 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 450 memory_order __m = memory_order_seq_cst) volatile noexcept 451 { 452 return _M_b.compare_exchange_strong(__p1, __p2, __m, 453 __cmpexch_failure_order(__m)); 454 } 455 456 __pointer_type 457 fetch_add(ptrdiff_t __d, 458 memory_order __m = memory_order_seq_cst) noexcept 459 { return _M_b.fetch_add(__d, __m); } 460 461 __pointer_type 462 fetch_add(ptrdiff_t __d, 463 memory_order __m = memory_order_seq_cst) volatile noexcept 464 { return _M_b.fetch_add(__d, __m); } 465 466 __pointer_type 467 fetch_sub(ptrdiff_t __d, 468 memory_order __m = memory_order_seq_cst) noexcept 469 { return _M_b.fetch_sub(__d, __m); } 470 471 __pointer_type 472 fetch_sub(ptrdiff_t __d, 473 memory_order __m = memory_order_seq_cst) volatile noexcept 474 { return _M_b.fetch_sub(__d, __m); } 475 }; 476 477 478 /// Explicit specialization for bool. 479 template<> 480 struct atomic<bool> : public atomic_bool 481 { 482 typedef bool __integral_type; 483 typedef atomic_bool __base_type; 484 485 atomic() noexcept = default; 486 ~atomic() noexcept = default; 487 atomic(const atomic&) = delete; 488 atomic& operator=(const atomic&) = delete; 489 atomic& operator=(const atomic&) volatile = delete; 490 491 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 492 493 using __base_type::operator __integral_type; 494 using __base_type::operator=; 495 }; 496 497 /// Explicit specialization for char. 498 template<> 499 struct atomic<char> : public atomic_char 500 { 501 typedef char __integral_type; 502 typedef atomic_char __base_type; 503 504 atomic() noexcept = default; 505 ~atomic() noexcept = default; 506 atomic(const atomic&) = delete; 507 atomic& operator=(const atomic&) = delete; 508 atomic& operator=(const atomic&) volatile = delete; 509 510 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 511 512 using __base_type::operator __integral_type; 513 using __base_type::operator=; 514 }; 515 516 /// Explicit specialization for signed char. 517 template<> 518 struct atomic<signed char> : public atomic_schar 519 { 520 typedef signed char __integral_type; 521 typedef atomic_schar __base_type; 522 523 atomic() noexcept= default; 524 ~atomic() noexcept = default; 525 atomic(const atomic&) = delete; 526 atomic& operator=(const atomic&) = delete; 527 atomic& operator=(const atomic&) volatile = delete; 528 529 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 530 531 using __base_type::operator __integral_type; 532 using __base_type::operator=; 533 }; 534 535 /// Explicit specialization for unsigned char. 536 template<> 537 struct atomic<unsigned char> : public atomic_uchar 538 { 539 typedef unsigned char __integral_type; 540 typedef atomic_uchar __base_type; 541 542 atomic() noexcept= default; 543 ~atomic() noexcept = default; 544 atomic(const atomic&) = delete; 545 atomic& operator=(const atomic&) = delete; 546 atomic& operator=(const atomic&) volatile = delete; 547 548 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 549 550 using __base_type::operator __integral_type; 551 using __base_type::operator=; 552 }; 553 554 /// Explicit specialization for short. 555 template<> 556 struct atomic<short> : public atomic_short 557 { 558 typedef short __integral_type; 559 typedef atomic_short __base_type; 560 561 atomic() noexcept = default; 562 ~atomic() noexcept = default; 563 atomic(const atomic&) = delete; 564 atomic& operator=(const atomic&) = delete; 565 atomic& operator=(const atomic&) volatile = delete; 566 567 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 568 569 using __base_type::operator __integral_type; 570 using __base_type::operator=; 571 }; 572 573 /// Explicit specialization for unsigned short. 574 template<> 575 struct atomic<unsigned short> : public atomic_ushort 576 { 577 typedef unsigned short __integral_type; 578 typedef atomic_ushort __base_type; 579 580 atomic() noexcept = default; 581 ~atomic() noexcept = default; 582 atomic(const atomic&) = delete; 583 atomic& operator=(const atomic&) = delete; 584 atomic& operator=(const atomic&) volatile = delete; 585 586 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 587 588 using __base_type::operator __integral_type; 589 using __base_type::operator=; 590 }; 591 592 /// Explicit specialization for int. 593 template<> 594 struct atomic<int> : atomic_int 595 { 596 typedef int __integral_type; 597 typedef atomic_int __base_type; 598 599 atomic() noexcept = default; 600 ~atomic() noexcept = default; 601 atomic(const atomic&) = delete; 602 atomic& operator=(const atomic&) = delete; 603 atomic& operator=(const atomic&) volatile = delete; 604 605 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 606 607 using __base_type::operator __integral_type; 608 using __base_type::operator=; 609 }; 610 611 /// Explicit specialization for unsigned int. 612 template<> 613 struct atomic<unsigned int> : public atomic_uint 614 { 615 typedef unsigned int __integral_type; 616 typedef atomic_uint __base_type; 617 618 atomic() noexcept = default; 619 ~atomic() noexcept = default; 620 atomic(const atomic&) = delete; 621 atomic& operator=(const atomic&) = delete; 622 atomic& operator=(const atomic&) volatile = delete; 623 624 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 625 626 using __base_type::operator __integral_type; 627 using __base_type::operator=; 628 }; 629 630 /// Explicit specialization for long. 631 template<> 632 struct atomic<long> : public atomic_long 633 { 634 typedef long __integral_type; 635 typedef atomic_long __base_type; 636 637 atomic() noexcept = default; 638 ~atomic() noexcept = default; 639 atomic(const atomic&) = delete; 640 atomic& operator=(const atomic&) = delete; 641 atomic& operator=(const atomic&) volatile = delete; 642 643 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 644 645 using __base_type::operator __integral_type; 646 using __base_type::operator=; 647 }; 648 649 /// Explicit specialization for unsigned long. 650 template<> 651 struct atomic<unsigned long> : public atomic_ulong 652 { 653 typedef unsigned long __integral_type; 654 typedef atomic_ulong __base_type; 655 656 atomic() noexcept = default; 657 ~atomic() noexcept = default; 658 atomic(const atomic&) = delete; 659 atomic& operator=(const atomic&) = delete; 660 atomic& operator=(const atomic&) volatile = delete; 661 662 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 663 664 using __base_type::operator __integral_type; 665 using __base_type::operator=; 666 }; 667 668 /// Explicit specialization for long long. 669 template<> 670 struct atomic<long long> : public atomic_llong 671 { 672 typedef long long __integral_type; 673 typedef atomic_llong __base_type; 674 675 atomic() noexcept = default; 676 ~atomic() noexcept = default; 677 atomic(const atomic&) = delete; 678 atomic& operator=(const atomic&) = delete; 679 atomic& operator=(const atomic&) volatile = delete; 680 681 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 682 683 using __base_type::operator __integral_type; 684 using __base_type::operator=; 685 }; 686 687 /// Explicit specialization for unsigned long long. 688 template<> 689 struct atomic<unsigned long long> : public atomic_ullong 690 { 691 typedef unsigned long long __integral_type; 692 typedef atomic_ullong __base_type; 693 694 atomic() noexcept = default; 695 ~atomic() noexcept = default; 696 atomic(const atomic&) = delete; 697 atomic& operator=(const atomic&) = delete; 698 atomic& operator=(const atomic&) volatile = delete; 699 700 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 701 702 using __base_type::operator __integral_type; 703 using __base_type::operator=; 704 }; 705 706 /// Explicit specialization for wchar_t. 707 template<> 708 struct atomic<wchar_t> : public atomic_wchar_t 709 { 710 typedef wchar_t __integral_type; 711 typedef atomic_wchar_t __base_type; 712 713 atomic() noexcept = default; 714 ~atomic() noexcept = default; 715 atomic(const atomic&) = delete; 716 atomic& operator=(const atomic&) = delete; 717 atomic& operator=(const atomic&) volatile = delete; 718 719 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 720 721 using __base_type::operator __integral_type; 722 using __base_type::operator=; 723 }; 724 725 /// Explicit specialization for char16_t. 726 template<> 727 struct atomic<char16_t> : public atomic_char16_t 728 { 729 typedef char16_t __integral_type; 730 typedef atomic_char16_t __base_type; 731 732 atomic() noexcept = default; 733 ~atomic() noexcept = default; 734 atomic(const atomic&) = delete; 735 atomic& operator=(const atomic&) = delete; 736 atomic& operator=(const atomic&) volatile = delete; 737 738 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 739 740 using __base_type::operator __integral_type; 741 using __base_type::operator=; 742 }; 743 744 /// Explicit specialization for char32_t. 745 template<> 746 struct atomic<char32_t> : public atomic_char32_t 747 { 748 typedef char32_t __integral_type; 749 typedef atomic_char32_t __base_type; 750 751 atomic() noexcept = default; 752 ~atomic() noexcept = default; 753 atomic(const atomic&) = delete; 754 atomic& operator=(const atomic&) = delete; 755 atomic& operator=(const atomic&) volatile = delete; 756 757 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 758 759 using __base_type::operator __integral_type; 760 using __base_type::operator=; 761 }; 762 763 764 // Function definitions, atomic_flag operations. 765 inline bool 766 atomic_flag_test_and_set_explicit(atomic_flag* __a, 767 memory_order __m) noexcept 768 { return __a->test_and_set(__m); } 769 770 inline bool 771 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, 772 memory_order __m) noexcept 773 { return __a->test_and_set(__m); } 774 775 inline void 776 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept 777 { __a->clear(__m); } 778 779 inline void 780 atomic_flag_clear_explicit(volatile atomic_flag* __a, 781 memory_order __m) noexcept 782 { __a->clear(__m); } 783 784 inline bool 785 atomic_flag_test_and_set(atomic_flag* __a) noexcept 786 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 787 788 inline bool 789 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept 790 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 791 792 inline void 793 atomic_flag_clear(atomic_flag* __a) noexcept 794 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 795 796 inline void 797 atomic_flag_clear(volatile atomic_flag* __a) noexcept 798 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 799 800 801 // Function templates generally applicable to atomic types. 802 template<typename _ITp> 803 inline bool 804 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept 805 { return __a->is_lock_free(); } 806 807 template<typename _ITp> 808 inline bool 809 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept 810 { return __a->is_lock_free(); } 811 812 template<typename _ITp> 813 inline void 814 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept; 815 816 template<typename _ITp> 817 inline void 818 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept; 819 820 template<typename _ITp> 821 inline void 822 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, 823 memory_order __m) noexcept 824 { __a->store(__i, __m); } 825 826 template<typename _ITp> 827 inline void 828 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i, 829 memory_order __m) noexcept 830 { __a->store(__i, __m); } 831 832 template<typename _ITp> 833 inline _ITp 834 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept 835 { return __a->load(__m); } 836 837 template<typename _ITp> 838 inline _ITp 839 atomic_load_explicit(const volatile atomic<_ITp>* __a, 840 memory_order __m) noexcept 841 { return __a->load(__m); } 842 843 template<typename _ITp> 844 inline _ITp 845 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i, 846 memory_order __m) noexcept 847 { return __a->exchange(__i, __m); } 848 849 template<typename _ITp> 850 inline _ITp 851 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i, 852 memory_order __m) noexcept 853 { return __a->exchange(__i, __m); } 854 855 template<typename _ITp> 856 inline bool 857 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a, 858 _ITp* __i1, _ITp __i2, 859 memory_order __m1, 860 memory_order __m2) noexcept 861 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 862 863 template<typename _ITp> 864 inline bool 865 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a, 866 _ITp* __i1, _ITp __i2, 867 memory_order __m1, 868 memory_order __m2) noexcept 869 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 870 871 template<typename _ITp> 872 inline bool 873 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a, 874 _ITp* __i1, _ITp __i2, 875 memory_order __m1, 876 memory_order __m2) noexcept 877 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 878 879 template<typename _ITp> 880 inline bool 881 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a, 882 _ITp* __i1, _ITp __i2, 883 memory_order __m1, 884 memory_order __m2) noexcept 885 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 886 887 888 template<typename _ITp> 889 inline void 890 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept 891 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 892 893 template<typename _ITp> 894 inline void 895 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept 896 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 897 898 template<typename _ITp> 899 inline _ITp 900 atomic_load(const atomic<_ITp>* __a) noexcept 901 { return atomic_load_explicit(__a, memory_order_seq_cst); } 902 903 template<typename _ITp> 904 inline _ITp 905 atomic_load(const volatile atomic<_ITp>* __a) noexcept 906 { return atomic_load_explicit(__a, memory_order_seq_cst); } 907 908 template<typename _ITp> 909 inline _ITp 910 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept 911 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 912 913 template<typename _ITp> 914 inline _ITp 915 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept 916 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 917 918 template<typename _ITp> 919 inline bool 920 atomic_compare_exchange_weak(atomic<_ITp>* __a, 921 _ITp* __i1, _ITp __i2) noexcept 922 { 923 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 924 memory_order_seq_cst, 925 memory_order_seq_cst); 926 } 927 928 template<typename _ITp> 929 inline bool 930 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a, 931 _ITp* __i1, _ITp __i2) noexcept 932 { 933 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 934 memory_order_seq_cst, 935 memory_order_seq_cst); 936 } 937 938 template<typename _ITp> 939 inline bool 940 atomic_compare_exchange_strong(atomic<_ITp>* __a, 941 _ITp* __i1, _ITp __i2) noexcept 942 { 943 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 944 memory_order_seq_cst, 945 memory_order_seq_cst); 946 } 947 948 template<typename _ITp> 949 inline bool 950 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a, 951 _ITp* __i1, _ITp __i2) noexcept 952 { 953 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 954 memory_order_seq_cst, 955 memory_order_seq_cst); 956 } 957 958 // Function templates for atomic_integral operations only, using 959 // __atomic_base. Template argument should be constricted to 960 // intergral types as specified in the standard, excluding address 961 // types. 962 template<typename _ITp> 963 inline _ITp 964 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i, 965 memory_order __m) noexcept 966 { return __a->fetch_add(__i, __m); } 967 968 template<typename _ITp> 969 inline _ITp 970 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 971 memory_order __m) noexcept 972 { return __a->fetch_add(__i, __m); } 973 974 template<typename _ITp> 975 inline _ITp 976 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i, 977 memory_order __m) noexcept 978 { return __a->fetch_sub(__i, __m); } 979 980 template<typename _ITp> 981 inline _ITp 982 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 983 memory_order __m) noexcept 984 { return __a->fetch_sub(__i, __m); } 985 986 template<typename _ITp> 987 inline _ITp 988 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i, 989 memory_order __m) noexcept 990 { return __a->fetch_and(__i, __m); } 991 992 template<typename _ITp> 993 inline _ITp 994 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 995 memory_order __m) noexcept 996 { return __a->fetch_and(__i, __m); } 997 998 template<typename _ITp> 999 inline _ITp 1000 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i, 1001 memory_order __m) noexcept 1002 { return __a->fetch_or(__i, __m); } 1003 1004 template<typename _ITp> 1005 inline _ITp 1006 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 1007 memory_order __m) noexcept 1008 { return __a->fetch_or(__i, __m); } 1009 1010 template<typename _ITp> 1011 inline _ITp 1012 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i, 1013 memory_order __m) noexcept 1014 { return __a->fetch_xor(__i, __m); } 1015 1016 template<typename _ITp> 1017 inline _ITp 1018 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 1019 memory_order __m) noexcept 1020 { return __a->fetch_xor(__i, __m); } 1021 1022 template<typename _ITp> 1023 inline _ITp 1024 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1025 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 1026 1027 template<typename _ITp> 1028 inline _ITp 1029 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1030 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 1031 1032 template<typename _ITp> 1033 inline _ITp 1034 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1035 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 1036 1037 template<typename _ITp> 1038 inline _ITp 1039 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1040 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 1041 1042 template<typename _ITp> 1043 inline _ITp 1044 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1045 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 1046 1047 template<typename _ITp> 1048 inline _ITp 1049 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1050 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 1051 1052 template<typename _ITp> 1053 inline _ITp 1054 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1055 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 1056 1057 template<typename _ITp> 1058 inline _ITp 1059 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1060 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 1061 1062 template<typename _ITp> 1063 inline _ITp 1064 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1065 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 1066 1067 template<typename _ITp> 1068 inline _ITp 1069 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1070 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 1071 1072 1073 // Partial specializations for pointers. 1074 template<typename _ITp> 1075 inline _ITp* 1076 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1077 memory_order __m) noexcept 1078 { return __a->fetch_add(__d, __m); } 1079 1080 template<typename _ITp> 1081 inline _ITp* 1082 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d, 1083 memory_order __m) noexcept 1084 { return __a->fetch_add(__d, __m); } 1085 1086 template<typename _ITp> 1087 inline _ITp* 1088 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1089 { return __a->fetch_add(__d); } 1090 1091 template<typename _ITp> 1092 inline _ITp* 1093 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1094 { return __a->fetch_add(__d); } 1095 1096 template<typename _ITp> 1097 inline _ITp* 1098 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a, 1099 ptrdiff_t __d, memory_order __m) noexcept 1100 { return __a->fetch_sub(__d, __m); } 1101 1102 template<typename _ITp> 1103 inline _ITp* 1104 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1105 memory_order __m) noexcept 1106 { return __a->fetch_sub(__d, __m); } 1107 1108 template<typename _ITp> 1109 inline _ITp* 1110 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1111 { return __a->fetch_sub(__d); } 1112 1113 template<typename _ITp> 1114 inline _ITp* 1115 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1116 { return __a->fetch_sub(__d); } 1117 // @} group atomics 1118 1119 _GLIBCXX_END_NAMESPACE_VERSION 1120 } // namespace 1121 1122 #endif 1123