1 // -*- C++ -*- header. 2 3 // Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc. 4 // 5 // This file is part of the GNU ISO C++ Library. This library is free 6 // software; you can redistribute it and/or modify it under the 7 // terms of the GNU General Public License as published by the 8 // Free Software Foundation; either version 3, or (at your option) 9 // any later version. 10 11 // This library is distributed in the hope that it will be useful, 12 // but WITHOUT ANY WARRANTY; without even the implied warranty of 13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 // GNU General Public License for more details. 15 16 // Under Section 7 of GPL version 3, you are granted additional 17 // permissions described in the GCC Runtime Library Exception, version 18 // 3.1, as published by the Free Software Foundation. 19 20 // You should have received a copy of the GNU General Public License and 21 // a copy of the GCC Runtime Library Exception along with this program; 22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 23 // <http://www.gnu.org/licenses/>. 24 25 /** @file include/atomic 26 * This is a Standard C++ Library header. 27 */ 28 29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl. 30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html 31 32 #ifndef _GLIBCXX_ATOMIC 33 #define _GLIBCXX_ATOMIC 1 34 35 #pragma GCC system_header 36 37 #ifndef __GXX_EXPERIMENTAL_CXX0X__ 38 # include <bits/c++0x_warning.h> 39 #endif 40 41 #include <bits/atomic_base.h> 42 43 namespace std _GLIBCXX_VISIBILITY(default) 44 { 45 _GLIBCXX_BEGIN_NAMESPACE_VERSION 46 47 /** 48 * @addtogroup atomics 49 * @{ 50 */ 51 52 /// atomic_bool 53 // NB: No operators or fetch-operations for this type. 54 struct atomic_bool 55 { 56 private: 57 __atomic_base<bool> _M_base; 58 59 public: 60 atomic_bool() noexcept = default; 61 ~atomic_bool() noexcept = default; 62 atomic_bool(const atomic_bool&) = delete; 63 atomic_bool& operator=(const atomic_bool&) = delete; 64 atomic_bool& operator=(const atomic_bool&) volatile = delete; 65 66 constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { } 67 68 bool 69 operator=(bool __i) noexcept 70 { return _M_base.operator=(__i); } 71 72 operator bool() const noexcept 73 { return _M_base.load(); } 74 75 operator bool() const volatile noexcept 76 { return _M_base.load(); } 77 78 bool 79 is_lock_free() const noexcept { return _M_base.is_lock_free(); } 80 81 bool 82 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); } 83 84 void 85 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept 86 { _M_base.store(__i, __m); } 87 88 void 89 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept 90 { _M_base.store(__i, __m); } 91 92 bool 93 load(memory_order __m = memory_order_seq_cst) const noexcept 94 { return _M_base.load(__m); } 95 96 bool 97 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 98 { return _M_base.load(__m); } 99 100 bool 101 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept 102 { return _M_base.exchange(__i, __m); } 103 104 bool 105 exchange(bool __i, 106 memory_order __m = memory_order_seq_cst) volatile noexcept 107 { return _M_base.exchange(__i, __m); } 108 109 bool 110 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 111 memory_order __m2) noexcept 112 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 113 114 bool 115 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 116 memory_order __m2) volatile noexcept 117 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 118 119 bool 120 compare_exchange_weak(bool& __i1, bool __i2, 121 memory_order __m = memory_order_seq_cst) noexcept 122 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 123 124 bool 125 compare_exchange_weak(bool& __i1, bool __i2, 126 memory_order __m = memory_order_seq_cst) volatile noexcept 127 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 128 129 bool 130 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 131 memory_order __m2) noexcept 132 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 133 134 bool 135 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 136 memory_order __m2) volatile noexcept 137 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 138 139 bool 140 compare_exchange_strong(bool& __i1, bool __i2, 141 memory_order __m = memory_order_seq_cst) noexcept 142 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 143 144 bool 145 compare_exchange_strong(bool& __i1, bool __i2, 146 memory_order __m = memory_order_seq_cst) volatile noexcept 147 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 148 }; 149 150 151 /// atomic 152 /// 29.4.3, Generic atomic type, primary class template. 153 template<typename _Tp> 154 struct atomic 155 { 156 private: 157 _Tp _M_i; 158 159 public: 160 atomic() noexcept = default; 161 ~atomic() noexcept = default; 162 atomic(const atomic&) = delete; 163 atomic& operator=(const atomic&) = delete; 164 atomic& operator=(const atomic&) volatile = delete; 165 166 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { } 167 168 operator _Tp() const noexcept 169 { return load(); } 170 171 operator _Tp() const volatile noexcept 172 { return load(); } 173 174 _Tp 175 operator=(_Tp __i) noexcept 176 { store(__i); return __i; } 177 178 _Tp 179 operator=(_Tp __i) volatile noexcept 180 { store(__i); return __i; } 181 182 bool 183 is_lock_free() const noexcept 184 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); } 185 186 bool 187 is_lock_free() const volatile noexcept 188 { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); } 189 190 void 191 store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept 192 { __atomic_store(&_M_i, &__i, _m); } 193 194 void 195 store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept 196 { __atomic_store(&_M_i, &__i, _m); } 197 198 _Tp 199 load(memory_order _m = memory_order_seq_cst) const noexcept 200 { 201 _Tp tmp; 202 __atomic_load(&_M_i, &tmp, _m); 203 return tmp; 204 } 205 206 _Tp 207 load(memory_order _m = memory_order_seq_cst) const volatile noexcept 208 { 209 _Tp tmp; 210 __atomic_load(&_M_i, &tmp, _m); 211 return tmp; 212 } 213 214 _Tp 215 exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept 216 { 217 _Tp tmp; 218 __atomic_exchange(&_M_i, &__i, &tmp, _m); 219 return tmp; 220 } 221 222 _Tp 223 exchange(_Tp __i, 224 memory_order _m = memory_order_seq_cst) volatile noexcept 225 { 226 _Tp tmp; 227 __atomic_exchange(&_M_i, &__i, &tmp, _m); 228 return tmp; 229 } 230 231 bool 232 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 233 memory_order __f) noexcept 234 { 235 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 236 } 237 238 bool 239 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s, 240 memory_order __f) volatile noexcept 241 { 242 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f); 243 } 244 245 bool 246 compare_exchange_weak(_Tp& __e, _Tp __i, 247 memory_order __m = memory_order_seq_cst) noexcept 248 { return compare_exchange_weak(__e, __i, __m, __m); } 249 250 bool 251 compare_exchange_weak(_Tp& __e, _Tp __i, 252 memory_order __m = memory_order_seq_cst) volatile noexcept 253 { return compare_exchange_weak(__e, __i, __m, __m); } 254 255 bool 256 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 257 memory_order __f) noexcept 258 { 259 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 260 } 261 262 bool 263 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s, 264 memory_order __f) volatile noexcept 265 { 266 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f); 267 } 268 269 bool 270 compare_exchange_strong(_Tp& __e, _Tp __i, 271 memory_order __m = memory_order_seq_cst) noexcept 272 { return compare_exchange_strong(__e, __i, __m, __m); } 273 274 bool 275 compare_exchange_strong(_Tp& __e, _Tp __i, 276 memory_order __m = memory_order_seq_cst) volatile noexcept 277 { return compare_exchange_strong(__e, __i, __m, __m); } 278 }; 279 280 281 /// Partial specialization for pointer types. 282 template<typename _Tp> 283 struct atomic<_Tp*> 284 { 285 typedef _Tp* __pointer_type; 286 typedef __atomic_base<_Tp*> __base_type; 287 __base_type _M_b; 288 289 atomic() noexcept = default; 290 ~atomic() noexcept = default; 291 atomic(const atomic&) = delete; 292 atomic& operator=(const atomic&) = delete; 293 atomic& operator=(const atomic&) volatile = delete; 294 295 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { } 296 297 operator __pointer_type() const noexcept 298 { return __pointer_type(_M_b); } 299 300 operator __pointer_type() const volatile noexcept 301 { return __pointer_type(_M_b); } 302 303 __pointer_type 304 operator=(__pointer_type __p) noexcept 305 { return _M_b.operator=(__p); } 306 307 __pointer_type 308 operator=(__pointer_type __p) volatile noexcept 309 { return _M_b.operator=(__p); } 310 311 __pointer_type 312 operator++(int) noexcept 313 { return _M_b++; } 314 315 __pointer_type 316 operator++(int) volatile noexcept 317 { return _M_b++; } 318 319 __pointer_type 320 operator--(int) noexcept 321 { return _M_b--; } 322 323 __pointer_type 324 operator--(int) volatile noexcept 325 { return _M_b--; } 326 327 __pointer_type 328 operator++() noexcept 329 { return ++_M_b; } 330 331 __pointer_type 332 operator++() volatile noexcept 333 { return ++_M_b; } 334 335 __pointer_type 336 operator--() noexcept 337 { return --_M_b; } 338 339 __pointer_type 340 operator--() volatile noexcept 341 { return --_M_b; } 342 343 __pointer_type 344 operator+=(ptrdiff_t __d) noexcept 345 { return _M_b.operator+=(__d); } 346 347 __pointer_type 348 operator+=(ptrdiff_t __d) volatile noexcept 349 { return _M_b.operator+=(__d); } 350 351 __pointer_type 352 operator-=(ptrdiff_t __d) noexcept 353 { return _M_b.operator-=(__d); } 354 355 __pointer_type 356 operator-=(ptrdiff_t __d) volatile noexcept 357 { return _M_b.operator-=(__d); } 358 359 bool 360 is_lock_free() const noexcept 361 { return _M_b.is_lock_free(); } 362 363 bool 364 is_lock_free() const volatile noexcept 365 { return _M_b.is_lock_free(); } 366 367 void 368 store(__pointer_type __p, 369 memory_order __m = memory_order_seq_cst) noexcept 370 { return _M_b.store(__p, __m); } 371 372 void 373 store(__pointer_type __p, 374 memory_order __m = memory_order_seq_cst) volatile noexcept 375 { return _M_b.store(__p, __m); } 376 377 __pointer_type 378 load(memory_order __m = memory_order_seq_cst) const noexcept 379 { return _M_b.load(__m); } 380 381 __pointer_type 382 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 383 { return _M_b.load(__m); } 384 385 __pointer_type 386 exchange(__pointer_type __p, 387 memory_order __m = memory_order_seq_cst) noexcept 388 { return _M_b.exchange(__p, __m); } 389 390 __pointer_type 391 exchange(__pointer_type __p, 392 memory_order __m = memory_order_seq_cst) volatile noexcept 393 { return _M_b.exchange(__p, __m); } 394 395 bool 396 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 397 memory_order __m1, memory_order __m2) noexcept 398 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 399 400 bool 401 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 402 memory_order __m1, 403 memory_order __m2) volatile noexcept 404 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 405 406 bool 407 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 408 memory_order __m = memory_order_seq_cst) noexcept 409 { 410 return compare_exchange_weak(__p1, __p2, __m, 411 __cmpexch_failure_order(__m)); 412 } 413 414 bool 415 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 416 memory_order __m = memory_order_seq_cst) volatile noexcept 417 { 418 return compare_exchange_weak(__p1, __p2, __m, 419 __cmpexch_failure_order(__m)); 420 } 421 422 bool 423 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 424 memory_order __m1, memory_order __m2) noexcept 425 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 426 427 bool 428 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 429 memory_order __m1, 430 memory_order __m2) volatile noexcept 431 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 432 433 bool 434 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 435 memory_order __m = memory_order_seq_cst) noexcept 436 { 437 return _M_b.compare_exchange_strong(__p1, __p2, __m, 438 __cmpexch_failure_order(__m)); 439 } 440 441 bool 442 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 443 memory_order __m = memory_order_seq_cst) volatile noexcept 444 { 445 return _M_b.compare_exchange_strong(__p1, __p2, __m, 446 __cmpexch_failure_order(__m)); 447 } 448 449 __pointer_type 450 fetch_add(ptrdiff_t __d, 451 memory_order __m = memory_order_seq_cst) noexcept 452 { return _M_b.fetch_add(__d, __m); } 453 454 __pointer_type 455 fetch_add(ptrdiff_t __d, 456 memory_order __m = memory_order_seq_cst) volatile noexcept 457 { return _M_b.fetch_add(__d, __m); } 458 459 __pointer_type 460 fetch_sub(ptrdiff_t __d, 461 memory_order __m = memory_order_seq_cst) noexcept 462 { return _M_b.fetch_sub(__d, __m); } 463 464 __pointer_type 465 fetch_sub(ptrdiff_t __d, 466 memory_order __m = memory_order_seq_cst) volatile noexcept 467 { return _M_b.fetch_sub(__d, __m); } 468 }; 469 470 471 /// Explicit specialization for bool. 472 template<> 473 struct atomic<bool> : public atomic_bool 474 { 475 typedef bool __integral_type; 476 typedef atomic_bool __base_type; 477 478 atomic() noexcept = default; 479 ~atomic() noexcept = default; 480 atomic(const atomic&) = delete; 481 atomic& operator=(const atomic&) = delete; 482 atomic& operator=(const atomic&) volatile = delete; 483 484 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 485 486 using __base_type::operator __integral_type; 487 using __base_type::operator=; 488 }; 489 490 /// Explicit specialization for char. 491 template<> 492 struct atomic<char> : public atomic_char 493 { 494 typedef char __integral_type; 495 typedef atomic_char __base_type; 496 497 atomic() noexcept = default; 498 ~atomic() noexcept = default; 499 atomic(const atomic&) = delete; 500 atomic& operator=(const atomic&) = delete; 501 atomic& operator=(const atomic&) volatile = delete; 502 503 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 504 505 using __base_type::operator __integral_type; 506 using __base_type::operator=; 507 }; 508 509 /// Explicit specialization for signed char. 510 template<> 511 struct atomic<signed char> : public atomic_schar 512 { 513 typedef signed char __integral_type; 514 typedef atomic_schar __base_type; 515 516 atomic() noexcept= default; 517 ~atomic() noexcept = default; 518 atomic(const atomic&) = delete; 519 atomic& operator=(const atomic&) = delete; 520 atomic& operator=(const atomic&) volatile = delete; 521 522 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 523 524 using __base_type::operator __integral_type; 525 using __base_type::operator=; 526 }; 527 528 /// Explicit specialization for unsigned char. 529 template<> 530 struct atomic<unsigned char> : public atomic_uchar 531 { 532 typedef unsigned char __integral_type; 533 typedef atomic_uchar __base_type; 534 535 atomic() noexcept= default; 536 ~atomic() noexcept = default; 537 atomic(const atomic&) = delete; 538 atomic& operator=(const atomic&) = delete; 539 atomic& operator=(const atomic&) volatile = delete; 540 541 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 542 543 using __base_type::operator __integral_type; 544 using __base_type::operator=; 545 }; 546 547 /// Explicit specialization for short. 548 template<> 549 struct atomic<short> : public atomic_short 550 { 551 typedef short __integral_type; 552 typedef atomic_short __base_type; 553 554 atomic() noexcept = default; 555 ~atomic() noexcept = default; 556 atomic(const atomic&) = delete; 557 atomic& operator=(const atomic&) = delete; 558 atomic& operator=(const atomic&) volatile = delete; 559 560 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 561 562 using __base_type::operator __integral_type; 563 using __base_type::operator=; 564 }; 565 566 /// Explicit specialization for unsigned short. 567 template<> 568 struct atomic<unsigned short> : public atomic_ushort 569 { 570 typedef unsigned short __integral_type; 571 typedef atomic_ushort __base_type; 572 573 atomic() noexcept = default; 574 ~atomic() noexcept = default; 575 atomic(const atomic&) = delete; 576 atomic& operator=(const atomic&) = delete; 577 atomic& operator=(const atomic&) volatile = delete; 578 579 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 580 581 using __base_type::operator __integral_type; 582 using __base_type::operator=; 583 }; 584 585 /// Explicit specialization for int. 586 template<> 587 struct atomic<int> : atomic_int 588 { 589 typedef int __integral_type; 590 typedef atomic_int __base_type; 591 592 atomic() noexcept = default; 593 ~atomic() noexcept = default; 594 atomic(const atomic&) = delete; 595 atomic& operator=(const atomic&) = delete; 596 atomic& operator=(const atomic&) volatile = delete; 597 598 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 599 600 using __base_type::operator __integral_type; 601 using __base_type::operator=; 602 }; 603 604 /// Explicit specialization for unsigned int. 605 template<> 606 struct atomic<unsigned int> : public atomic_uint 607 { 608 typedef unsigned int __integral_type; 609 typedef atomic_uint __base_type; 610 611 atomic() noexcept = default; 612 ~atomic() noexcept = default; 613 atomic(const atomic&) = delete; 614 atomic& operator=(const atomic&) = delete; 615 atomic& operator=(const atomic&) volatile = delete; 616 617 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 618 619 using __base_type::operator __integral_type; 620 using __base_type::operator=; 621 }; 622 623 /// Explicit specialization for long. 624 template<> 625 struct atomic<long> : public atomic_long 626 { 627 typedef long __integral_type; 628 typedef atomic_long __base_type; 629 630 atomic() noexcept = default; 631 ~atomic() noexcept = default; 632 atomic(const atomic&) = delete; 633 atomic& operator=(const atomic&) = delete; 634 atomic& operator=(const atomic&) volatile = delete; 635 636 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 637 638 using __base_type::operator __integral_type; 639 using __base_type::operator=; 640 }; 641 642 /// Explicit specialization for unsigned long. 643 template<> 644 struct atomic<unsigned long> : public atomic_ulong 645 { 646 typedef unsigned long __integral_type; 647 typedef atomic_ulong __base_type; 648 649 atomic() noexcept = default; 650 ~atomic() noexcept = default; 651 atomic(const atomic&) = delete; 652 atomic& operator=(const atomic&) = delete; 653 atomic& operator=(const atomic&) volatile = delete; 654 655 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 656 657 using __base_type::operator __integral_type; 658 using __base_type::operator=; 659 }; 660 661 /// Explicit specialization for long long. 662 template<> 663 struct atomic<long long> : public atomic_llong 664 { 665 typedef long long __integral_type; 666 typedef atomic_llong __base_type; 667 668 atomic() noexcept = default; 669 ~atomic() noexcept = default; 670 atomic(const atomic&) = delete; 671 atomic& operator=(const atomic&) = delete; 672 atomic& operator=(const atomic&) volatile = delete; 673 674 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 675 676 using __base_type::operator __integral_type; 677 using __base_type::operator=; 678 }; 679 680 /// Explicit specialization for unsigned long long. 681 template<> 682 struct atomic<unsigned long long> : public atomic_ullong 683 { 684 typedef unsigned long long __integral_type; 685 typedef atomic_ullong __base_type; 686 687 atomic() noexcept = default; 688 ~atomic() noexcept = default; 689 atomic(const atomic&) = delete; 690 atomic& operator=(const atomic&) = delete; 691 atomic& operator=(const atomic&) volatile = delete; 692 693 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 694 695 using __base_type::operator __integral_type; 696 using __base_type::operator=; 697 }; 698 699 /// Explicit specialization for wchar_t. 700 template<> 701 struct atomic<wchar_t> : public atomic_wchar_t 702 { 703 typedef wchar_t __integral_type; 704 typedef atomic_wchar_t __base_type; 705 706 atomic() noexcept = default; 707 ~atomic() noexcept = default; 708 atomic(const atomic&) = delete; 709 atomic& operator=(const atomic&) = delete; 710 atomic& operator=(const atomic&) volatile = delete; 711 712 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 713 714 using __base_type::operator __integral_type; 715 using __base_type::operator=; 716 }; 717 718 /// Explicit specialization for char16_t. 719 template<> 720 struct atomic<char16_t> : public atomic_char16_t 721 { 722 typedef char16_t __integral_type; 723 typedef atomic_char16_t __base_type; 724 725 atomic() noexcept = default; 726 ~atomic() noexcept = default; 727 atomic(const atomic&) = delete; 728 atomic& operator=(const atomic&) = delete; 729 atomic& operator=(const atomic&) volatile = delete; 730 731 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 732 733 using __base_type::operator __integral_type; 734 using __base_type::operator=; 735 }; 736 737 /// Explicit specialization for char32_t. 738 template<> 739 struct atomic<char32_t> : public atomic_char32_t 740 { 741 typedef char32_t __integral_type; 742 typedef atomic_char32_t __base_type; 743 744 atomic() noexcept = default; 745 ~atomic() noexcept = default; 746 atomic(const atomic&) = delete; 747 atomic& operator=(const atomic&) = delete; 748 atomic& operator=(const atomic&) volatile = delete; 749 750 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } 751 752 using __base_type::operator __integral_type; 753 using __base_type::operator=; 754 }; 755 756 757 // Function definitions, atomic_flag operations. 758 inline bool 759 atomic_flag_test_and_set_explicit(atomic_flag* __a, 760 memory_order __m) noexcept 761 { return __a->test_and_set(__m); } 762 763 inline bool 764 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, 765 memory_order __m) noexcept 766 { return __a->test_and_set(__m); } 767 768 inline void 769 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept 770 { __a->clear(__m); } 771 772 inline void 773 atomic_flag_clear_explicit(volatile atomic_flag* __a, 774 memory_order __m) noexcept 775 { __a->clear(__m); } 776 777 inline bool 778 atomic_flag_test_and_set(atomic_flag* __a) noexcept 779 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 780 781 inline bool 782 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept 783 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 784 785 inline void 786 atomic_flag_clear(atomic_flag* __a) noexcept 787 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 788 789 inline void 790 atomic_flag_clear(volatile atomic_flag* __a) noexcept 791 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 792 793 794 // Function templates generally applicable to atomic types. 795 template<typename _ITp> 796 inline bool 797 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept 798 { return __a->is_lock_free(); } 799 800 template<typename _ITp> 801 inline bool 802 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept 803 { return __a->is_lock_free(); } 804 805 template<typename _ITp> 806 inline void 807 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept; 808 809 template<typename _ITp> 810 inline void 811 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept; 812 813 template<typename _ITp> 814 inline void 815 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, 816 memory_order __m) noexcept 817 { __a->store(__i, __m); } 818 819 template<typename _ITp> 820 inline void 821 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i, 822 memory_order __m) noexcept 823 { __a->store(__i, __m); } 824 825 template<typename _ITp> 826 inline _ITp 827 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept 828 { return __a->load(__m); } 829 830 template<typename _ITp> 831 inline _ITp 832 atomic_load_explicit(const volatile atomic<_ITp>* __a, 833 memory_order __m) noexcept 834 { return __a->load(__m); } 835 836 template<typename _ITp> 837 inline _ITp 838 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i, 839 memory_order __m) noexcept 840 { return __a->exchange(__i, __m); } 841 842 template<typename _ITp> 843 inline _ITp 844 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i, 845 memory_order __m) noexcept 846 { return __a->exchange(__i, __m); } 847 848 template<typename _ITp> 849 inline bool 850 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a, 851 _ITp* __i1, _ITp __i2, 852 memory_order __m1, 853 memory_order __m2) noexcept 854 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 855 856 template<typename _ITp> 857 inline bool 858 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a, 859 _ITp* __i1, _ITp __i2, 860 memory_order __m1, 861 memory_order __m2) noexcept 862 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 863 864 template<typename _ITp> 865 inline bool 866 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a, 867 _ITp* __i1, _ITp __i2, 868 memory_order __m1, 869 memory_order __m2) noexcept 870 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 871 872 template<typename _ITp> 873 inline bool 874 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a, 875 _ITp* __i1, _ITp __i2, 876 memory_order __m1, 877 memory_order __m2) noexcept 878 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 879 880 881 template<typename _ITp> 882 inline void 883 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept 884 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 885 886 template<typename _ITp> 887 inline void 888 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept 889 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 890 891 template<typename _ITp> 892 inline _ITp 893 atomic_load(const atomic<_ITp>* __a) noexcept 894 { return atomic_load_explicit(__a, memory_order_seq_cst); } 895 896 template<typename _ITp> 897 inline _ITp 898 atomic_load(const volatile atomic<_ITp>* __a) noexcept 899 { return atomic_load_explicit(__a, memory_order_seq_cst); } 900 901 template<typename _ITp> 902 inline _ITp 903 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept 904 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 905 906 template<typename _ITp> 907 inline _ITp 908 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept 909 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 910 911 template<typename _ITp> 912 inline bool 913 atomic_compare_exchange_weak(atomic<_ITp>* __a, 914 _ITp* __i1, _ITp __i2) noexcept 915 { 916 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 917 memory_order_seq_cst, 918 memory_order_seq_cst); 919 } 920 921 template<typename _ITp> 922 inline bool 923 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a, 924 _ITp* __i1, _ITp __i2) noexcept 925 { 926 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 927 memory_order_seq_cst, 928 memory_order_seq_cst); 929 } 930 931 template<typename _ITp> 932 inline bool 933 atomic_compare_exchange_strong(atomic<_ITp>* __a, 934 _ITp* __i1, _ITp __i2) noexcept 935 { 936 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 937 memory_order_seq_cst, 938 memory_order_seq_cst); 939 } 940 941 template<typename _ITp> 942 inline bool 943 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a, 944 _ITp* __i1, _ITp __i2) noexcept 945 { 946 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 947 memory_order_seq_cst, 948 memory_order_seq_cst); 949 } 950 951 // Function templates for atomic_integral operations only, using 952 // __atomic_base. Template argument should be constricted to 953 // intergral types as specified in the standard, excluding address 954 // types. 955 template<typename _ITp> 956 inline _ITp 957 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i, 958 memory_order __m) noexcept 959 { return __a->fetch_add(__i, __m); } 960 961 template<typename _ITp> 962 inline _ITp 963 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 964 memory_order __m) noexcept 965 { return __a->fetch_add(__i, __m); } 966 967 template<typename _ITp> 968 inline _ITp 969 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i, 970 memory_order __m) noexcept 971 { return __a->fetch_sub(__i, __m); } 972 973 template<typename _ITp> 974 inline _ITp 975 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 976 memory_order __m) noexcept 977 { return __a->fetch_sub(__i, __m); } 978 979 template<typename _ITp> 980 inline _ITp 981 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i, 982 memory_order __m) noexcept 983 { return __a->fetch_and(__i, __m); } 984 985 template<typename _ITp> 986 inline _ITp 987 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 988 memory_order __m) noexcept 989 { return __a->fetch_and(__i, __m); } 990 991 template<typename _ITp> 992 inline _ITp 993 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i, 994 memory_order __m) noexcept 995 { return __a->fetch_or(__i, __m); } 996 997 template<typename _ITp> 998 inline _ITp 999 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 1000 memory_order __m) noexcept 1001 { return __a->fetch_or(__i, __m); } 1002 1003 template<typename _ITp> 1004 inline _ITp 1005 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i, 1006 memory_order __m) noexcept 1007 { return __a->fetch_xor(__i, __m); } 1008 1009 template<typename _ITp> 1010 inline _ITp 1011 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 1012 memory_order __m) noexcept 1013 { return __a->fetch_xor(__i, __m); } 1014 1015 template<typename _ITp> 1016 inline _ITp 1017 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1018 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 1019 1020 template<typename _ITp> 1021 inline _ITp 1022 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1023 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 1024 1025 template<typename _ITp> 1026 inline _ITp 1027 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1028 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 1029 1030 template<typename _ITp> 1031 inline _ITp 1032 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1033 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 1034 1035 template<typename _ITp> 1036 inline _ITp 1037 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1038 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 1039 1040 template<typename _ITp> 1041 inline _ITp 1042 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1043 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 1044 1045 template<typename _ITp> 1046 inline _ITp 1047 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1048 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 1049 1050 template<typename _ITp> 1051 inline _ITp 1052 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1053 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 1054 1055 template<typename _ITp> 1056 inline _ITp 1057 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept 1058 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 1059 1060 template<typename _ITp> 1061 inline _ITp 1062 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept 1063 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 1064 1065 1066 // Partial specializations for pointers. 1067 template<typename _ITp> 1068 inline _ITp* 1069 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1070 memory_order __m) noexcept 1071 { return __a->fetch_add(__d, __m); } 1072 1073 template<typename _ITp> 1074 inline _ITp* 1075 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d, 1076 memory_order __m) noexcept 1077 { return __a->fetch_add(__d, __m); } 1078 1079 template<typename _ITp> 1080 inline _ITp* 1081 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1082 { return __a->fetch_add(__d); } 1083 1084 template<typename _ITp> 1085 inline _ITp* 1086 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1087 { return __a->fetch_add(__d); } 1088 1089 template<typename _ITp> 1090 inline _ITp* 1091 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a, 1092 ptrdiff_t __d, memory_order __m) noexcept 1093 { return __a->fetch_sub(__d, __m); } 1094 1095 template<typename _ITp> 1096 inline _ITp* 1097 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1098 memory_order __m) noexcept 1099 { return __a->fetch_sub(__d, __m); } 1100 1101 template<typename _ITp> 1102 inline _ITp* 1103 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1104 { return __a->fetch_sub(__d); } 1105 1106 template<typename _ITp> 1107 inline _ITp* 1108 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept 1109 { return __a->fetch_sub(__d); } 1110 // @} group atomics 1111 1112 _GLIBCXX_END_NAMESPACE_VERSION 1113 } // namespace 1114 1115 #endif 1116