1 // -*- C++ -*- header. 2 3 // Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc. 4 // 5 // This file is part of the GNU ISO C++ Library. This library is free 6 // software; you can redistribute it and/or modify it under the 7 // terms of the GNU General Public License as published by the 8 // Free Software Foundation; either version 3, or (at your option) 9 // any later version. 10 11 // This library is distributed in the hope that it will be useful, 12 // but WITHOUT ANY WARRANTY; without even the implied warranty of 13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 // GNU General Public License for more details. 15 16 // Under Section 7 of GPL version 3, you are granted additional 17 // permissions described in the GCC Runtime Library Exception, version 18 // 3.1, as published by the Free Software Foundation. 19 20 // You should have received a copy of the GNU General Public License and 21 // a copy of the GCC Runtime Library Exception along with this program; 22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 23 // <http://www.gnu.org/licenses/>. 24 25 /** @file include/atomic 26 * This is a Standard C++ Library header. 27 */ 28 29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl. 30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html 31 32 #ifndef _GLIBCXX_ATOMIC 33 #define _GLIBCXX_ATOMIC 1 34 35 #pragma GCC system_header 36 37 #ifndef __GXX_EXPERIMENTAL_CXX0X__ 38 # include <bits/c++0x_warning.h> 39 #endif 40 41 #include <bits/atomic_base.h> 42 #include <bits/atomic_0.h> 43 #include <bits/atomic_2.h> 44 45 namespace std _GLIBCXX_VISIBILITY(default) 46 { 47 _GLIBCXX_BEGIN_NAMESPACE_VERSION 48 49 /** 50 * @addtogroup atomics 51 * @{ 52 */ 53 54 /// atomic_bool 55 // NB: No operators or fetch-operations for this type. 56 struct atomic_bool 57 { 58 private: 59 __atomic_base<bool> _M_base; 60 61 public: 62 atomic_bool() = default; 63 ~atomic_bool() = default; 64 atomic_bool(const atomic_bool&) = delete; 65 atomic_bool& operator=(const atomic_bool&) = delete; 66 atomic_bool& operator=(const atomic_bool&) volatile = delete; 67 68 constexpr atomic_bool(bool __i) : _M_base(__i) { } 69 70 bool 71 operator=(bool __i) 72 { return _M_base.operator=(__i); } 73 74 operator bool() const 75 { return _M_base.load(); } 76 77 operator bool() const volatile 78 { return _M_base.load(); } 79 80 bool 81 is_lock_free() const { return _M_base.is_lock_free(); } 82 83 bool 84 is_lock_free() const volatile { return _M_base.is_lock_free(); } 85 86 void 87 store(bool __i, memory_order __m = memory_order_seq_cst) 88 { _M_base.store(__i, __m); } 89 90 void 91 store(bool __i, memory_order __m = memory_order_seq_cst) volatile 92 { _M_base.store(__i, __m); } 93 94 bool 95 load(memory_order __m = memory_order_seq_cst) const 96 { return _M_base.load(__m); } 97 98 bool 99 load(memory_order __m = memory_order_seq_cst) const volatile 100 { return _M_base.load(__m); } 101 102 bool 103 exchange(bool __i, memory_order __m = memory_order_seq_cst) 104 { return _M_base.exchange(__i, __m); } 105 106 bool 107 exchange(bool __i, memory_order __m = memory_order_seq_cst) volatile 108 { return _M_base.exchange(__i, __m); } 109 110 bool 111 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 112 memory_order __m2) 113 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 114 115 bool 116 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, 117 memory_order __m2) volatile 118 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } 119 120 bool 121 compare_exchange_weak(bool& __i1, bool __i2, 122 memory_order __m = memory_order_seq_cst) 123 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 124 125 bool 126 compare_exchange_weak(bool& __i1, bool __i2, 127 memory_order __m = memory_order_seq_cst) volatile 128 { return _M_base.compare_exchange_weak(__i1, __i2, __m); } 129 130 bool 131 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 132 memory_order __m2) 133 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 134 135 bool 136 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, 137 memory_order __m2) volatile 138 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } 139 140 bool 141 compare_exchange_strong(bool& __i1, bool __i2, 142 memory_order __m = memory_order_seq_cst) 143 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 144 145 bool 146 compare_exchange_strong(bool& __i1, bool __i2, 147 memory_order __m = memory_order_seq_cst) volatile 148 { return _M_base.compare_exchange_strong(__i1, __i2, __m); } 149 }; 150 151 152 /// atomic 153 /// 29.4.3, Generic atomic type, primary class template. 154 template<typename _Tp> 155 struct atomic 156 { 157 private: 158 _Tp _M_i; 159 160 public: 161 atomic() = default; 162 ~atomic() = default; 163 atomic(const atomic&) = delete; 164 atomic& operator=(const atomic&) = delete; 165 atomic& operator=(const atomic&) volatile = delete; 166 167 constexpr atomic(_Tp __i) : _M_i(__i) { } 168 169 operator _Tp() const; 170 171 operator _Tp() const volatile; 172 173 _Tp 174 operator=(_Tp __i) { store(__i); return __i; } 175 176 _Tp 177 operator=(_Tp __i) volatile { store(__i); return __i; } 178 179 bool 180 is_lock_free() const; 181 182 bool 183 is_lock_free() const volatile; 184 185 void 186 store(_Tp, memory_order = memory_order_seq_cst); 187 188 void 189 store(_Tp, memory_order = memory_order_seq_cst) volatile; 190 191 _Tp 192 load(memory_order = memory_order_seq_cst) const; 193 194 _Tp 195 load(memory_order = memory_order_seq_cst) const volatile; 196 197 _Tp 198 exchange(_Tp __i, memory_order = memory_order_seq_cst); 199 200 _Tp 201 exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile; 202 203 bool 204 compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order); 205 206 bool 207 compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) volatile; 208 209 bool 210 compare_exchange_weak(_Tp&, _Tp, memory_order = memory_order_seq_cst); 211 212 bool 213 compare_exchange_weak(_Tp&, _Tp, 214 memory_order = memory_order_seq_cst) volatile; 215 216 bool 217 compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order); 218 219 bool 220 compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) volatile; 221 222 bool 223 compare_exchange_strong(_Tp&, _Tp, memory_order = memory_order_seq_cst); 224 225 bool 226 compare_exchange_strong(_Tp&, _Tp, 227 memory_order = memory_order_seq_cst) volatile; 228 }; 229 230 231 /// Partial specialization for pointer types. 232 template<typename _Tp> 233 struct atomic<_Tp*> 234 { 235 typedef _Tp* __pointer_type; 236 typedef __atomic_base<_Tp*> __base_type; 237 __base_type _M_b; 238 239 atomic() = default; 240 ~atomic() = default; 241 atomic(const atomic&) = delete; 242 atomic& operator=(const atomic&) = delete; 243 atomic& operator=(const atomic&) volatile = delete; 244 245 constexpr atomic(__pointer_type __p) : _M_b(__p) { } 246 247 operator __pointer_type() const 248 { return __pointer_type(_M_b); } 249 250 operator __pointer_type() const volatile 251 { return __pointer_type(_M_b); } 252 253 __pointer_type 254 operator=(__pointer_type __p) 255 { return _M_b.operator=(__p); } 256 257 __pointer_type 258 operator=(__pointer_type __p) volatile 259 { return _M_b.operator=(__p); } 260 261 __pointer_type 262 operator++(int) 263 { return _M_b++; } 264 265 __pointer_type 266 operator++(int) volatile 267 { return _M_b++; } 268 269 __pointer_type 270 operator--(int) 271 { return _M_b--; } 272 273 __pointer_type 274 operator--(int) volatile 275 { return _M_b--; } 276 277 __pointer_type 278 operator++() 279 { return ++_M_b; } 280 281 __pointer_type 282 operator++() volatile 283 { return ++_M_b; } 284 285 __pointer_type 286 operator--() 287 { return --_M_b; } 288 289 __pointer_type 290 operator--() volatile 291 { return --_M_b; } 292 293 __pointer_type 294 operator+=(ptrdiff_t __d) 295 { return _M_b.operator+=(__d); } 296 297 __pointer_type 298 operator+=(ptrdiff_t __d) volatile 299 { return _M_b.operator+=(__d); } 300 301 __pointer_type 302 operator-=(ptrdiff_t __d) 303 { return _M_b.operator-=(__d); } 304 305 __pointer_type 306 operator-=(ptrdiff_t __d) volatile 307 { return _M_b.operator-=(__d); } 308 309 bool 310 is_lock_free() const 311 { return _M_b.is_lock_free(); } 312 313 bool 314 is_lock_free() const volatile 315 { return _M_b.is_lock_free(); } 316 317 void 318 store(__pointer_type __p, memory_order __m = memory_order_seq_cst) 319 { return _M_b.store(__p, __m); } 320 321 void 322 store(__pointer_type __p, 323 memory_order __m = memory_order_seq_cst) volatile 324 { return _M_b.store(__p, __m); } 325 326 __pointer_type 327 load(memory_order __m = memory_order_seq_cst) const 328 { return _M_b.load(__m); } 329 330 __pointer_type 331 load(memory_order __m = memory_order_seq_cst) const volatile 332 { return _M_b.load(__m); } 333 334 __pointer_type 335 exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst) 336 { return _M_b.exchange(__p, __m); } 337 338 __pointer_type 339 exchange(__pointer_type __p, 340 memory_order __m = memory_order_seq_cst) volatile 341 { return _M_b.exchange(__p, __m); } 342 343 bool 344 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 345 memory_order __m1, memory_order __m2) 346 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 347 348 bool 349 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 350 memory_order __m1, memory_order __m2) volatile 351 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 352 353 bool 354 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 355 memory_order __m = memory_order_seq_cst) 356 { 357 return compare_exchange_weak(__p1, __p2, __m, 358 __calculate_memory_order(__m)); 359 } 360 361 bool 362 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, 363 memory_order __m = memory_order_seq_cst) volatile 364 { 365 return compare_exchange_weak(__p1, __p2, __m, 366 __calculate_memory_order(__m)); 367 } 368 369 bool 370 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 371 memory_order __m1, memory_order __m2) 372 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 373 374 bool 375 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 376 memory_order __m1, memory_order __m2) volatile 377 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } 378 379 bool 380 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 381 memory_order __m = memory_order_seq_cst) 382 { 383 return _M_b.compare_exchange_strong(__p1, __p2, __m, 384 __calculate_memory_order(__m)); 385 } 386 387 bool 388 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 389 memory_order __m = memory_order_seq_cst) volatile 390 { 391 return _M_b.compare_exchange_strong(__p1, __p2, __m, 392 __calculate_memory_order(__m)); 393 } 394 395 __pointer_type 396 fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) 397 { return _M_b.fetch_add(__d, __m); } 398 399 __pointer_type 400 fetch_add(ptrdiff_t __d, 401 memory_order __m = memory_order_seq_cst) volatile 402 { return _M_b.fetch_add(__d, __m); } 403 404 __pointer_type 405 fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) 406 { return _M_b.fetch_sub(__d, __m); } 407 408 __pointer_type 409 fetch_sub(ptrdiff_t __d, 410 memory_order __m = memory_order_seq_cst) volatile 411 { return _M_b.fetch_sub(__d, __m); } 412 }; 413 414 415 /// Explicit specialization for bool. 416 template<> 417 struct atomic<bool> : public atomic_bool 418 { 419 typedef bool __integral_type; 420 typedef atomic_bool __base_type; 421 422 atomic() = default; 423 ~atomic() = default; 424 atomic(const atomic&) = delete; 425 atomic& operator=(const atomic&) = delete; 426 atomic& operator=(const atomic&) volatile = delete; 427 428 constexpr atomic(__integral_type __i) : __base_type(__i) { } 429 430 using __base_type::operator __integral_type; 431 using __base_type::operator=; 432 }; 433 434 /// Explicit specialization for char. 435 template<> 436 struct atomic<char> : public atomic_char 437 { 438 typedef char __integral_type; 439 typedef atomic_char __base_type; 440 441 atomic() = default; 442 ~atomic() = default; 443 atomic(const atomic&) = delete; 444 atomic& operator=(const atomic&) = delete; 445 atomic& operator=(const atomic&) volatile = delete; 446 447 constexpr atomic(__integral_type __i) : __base_type(__i) { } 448 449 using __base_type::operator __integral_type; 450 using __base_type::operator=; 451 }; 452 453 /// Explicit specialization for signed char. 454 template<> 455 struct atomic<signed char> : public atomic_schar 456 { 457 typedef signed char __integral_type; 458 typedef atomic_schar __base_type; 459 460 atomic() = default; 461 ~atomic() = default; 462 atomic(const atomic&) = delete; 463 atomic& operator=(const atomic&) = delete; 464 atomic& operator=(const atomic&) volatile = delete; 465 466 constexpr atomic(__integral_type __i) : __base_type(__i) { } 467 468 using __base_type::operator __integral_type; 469 using __base_type::operator=; 470 }; 471 472 /// Explicit specialization for unsigned char. 473 template<> 474 struct atomic<unsigned char> : public atomic_uchar 475 { 476 typedef unsigned char __integral_type; 477 typedef atomic_uchar __base_type; 478 479 atomic() = default; 480 ~atomic() = default; 481 atomic(const atomic&) = delete; 482 atomic& operator=(const atomic&) = delete; 483 atomic& operator=(const atomic&) volatile = delete; 484 485 constexpr atomic(__integral_type __i) : __base_type(__i) { } 486 487 using __base_type::operator __integral_type; 488 using __base_type::operator=; 489 }; 490 491 /// Explicit specialization for short. 492 template<> 493 struct atomic<short> : public atomic_short 494 { 495 typedef short __integral_type; 496 typedef atomic_short __base_type; 497 498 atomic() = default; 499 ~atomic() = default; 500 atomic(const atomic&) = delete; 501 atomic& operator=(const atomic&) = delete; 502 atomic& operator=(const atomic&) volatile = delete; 503 504 constexpr atomic(__integral_type __i) : __base_type(__i) { } 505 506 using __base_type::operator __integral_type; 507 using __base_type::operator=; 508 }; 509 510 /// Explicit specialization for unsigned short. 511 template<> 512 struct atomic<unsigned short> : public atomic_ushort 513 { 514 typedef unsigned short __integral_type; 515 typedef atomic_ushort __base_type; 516 517 atomic() = default; 518 ~atomic() = default; 519 atomic(const atomic&) = delete; 520 atomic& operator=(const atomic&) = delete; 521 atomic& operator=(const atomic&) volatile = delete; 522 523 constexpr atomic(__integral_type __i) : __base_type(__i) { } 524 525 using __base_type::operator __integral_type; 526 using __base_type::operator=; 527 }; 528 529 /// Explicit specialization for int. 530 template<> 531 struct atomic<int> : atomic_int 532 { 533 typedef int __integral_type; 534 typedef atomic_int __base_type; 535 536 atomic() = default; 537 ~atomic() = default; 538 atomic(const atomic&) = delete; 539 atomic& operator=(const atomic&) = delete; 540 atomic& operator=(const atomic&) volatile = delete; 541 542 constexpr atomic(__integral_type __i) : __base_type(__i) { } 543 544 using __base_type::operator __integral_type; 545 using __base_type::operator=; 546 }; 547 548 /// Explicit specialization for unsigned int. 549 template<> 550 struct atomic<unsigned int> : public atomic_uint 551 { 552 typedef unsigned int __integral_type; 553 typedef atomic_uint __base_type; 554 555 atomic() = default; 556 ~atomic() = default; 557 atomic(const atomic&) = delete; 558 atomic& operator=(const atomic&) = delete; 559 atomic& operator=(const atomic&) volatile = delete; 560 561 constexpr atomic(__integral_type __i) : __base_type(__i) { } 562 563 using __base_type::operator __integral_type; 564 using __base_type::operator=; 565 }; 566 567 /// Explicit specialization for long. 568 template<> 569 struct atomic<long> : public atomic_long 570 { 571 typedef long __integral_type; 572 typedef atomic_long __base_type; 573 574 atomic() = default; 575 ~atomic() = default; 576 atomic(const atomic&) = delete; 577 atomic& operator=(const atomic&) = delete; 578 atomic& operator=(const atomic&) volatile = delete; 579 580 constexpr atomic(__integral_type __i) : __base_type(__i) { } 581 582 using __base_type::operator __integral_type; 583 using __base_type::operator=; 584 }; 585 586 /// Explicit specialization for unsigned long. 587 template<> 588 struct atomic<unsigned long> : public atomic_ulong 589 { 590 typedef unsigned long __integral_type; 591 typedef atomic_ulong __base_type; 592 593 atomic() = default; 594 ~atomic() = default; 595 atomic(const atomic&) = delete; 596 atomic& operator=(const atomic&) = delete; 597 atomic& operator=(const atomic&) volatile = delete; 598 599 constexpr atomic(__integral_type __i) : __base_type(__i) { } 600 601 using __base_type::operator __integral_type; 602 using __base_type::operator=; 603 }; 604 605 /// Explicit specialization for long long. 606 template<> 607 struct atomic<long long> : public atomic_llong 608 { 609 typedef long long __integral_type; 610 typedef atomic_llong __base_type; 611 612 atomic() = default; 613 ~atomic() = default; 614 atomic(const atomic&) = delete; 615 atomic& operator=(const atomic&) = delete; 616 atomic& operator=(const atomic&) volatile = delete; 617 618 constexpr atomic(__integral_type __i) : __base_type(__i) { } 619 620 using __base_type::operator __integral_type; 621 using __base_type::operator=; 622 }; 623 624 /// Explicit specialization for unsigned long long. 625 template<> 626 struct atomic<unsigned long long> : public atomic_ullong 627 { 628 typedef unsigned long long __integral_type; 629 typedef atomic_ullong __base_type; 630 631 atomic() = default; 632 ~atomic() = default; 633 atomic(const atomic&) = delete; 634 atomic& operator=(const atomic&) = delete; 635 atomic& operator=(const atomic&) volatile = delete; 636 637 constexpr atomic(__integral_type __i) : __base_type(__i) { } 638 639 using __base_type::operator __integral_type; 640 using __base_type::operator=; 641 }; 642 643 /// Explicit specialization for wchar_t. 644 template<> 645 struct atomic<wchar_t> : public atomic_wchar_t 646 { 647 typedef wchar_t __integral_type; 648 typedef atomic_wchar_t __base_type; 649 650 atomic() = default; 651 ~atomic() = default; 652 atomic(const atomic&) = delete; 653 atomic& operator=(const atomic&) = delete; 654 atomic& operator=(const atomic&) volatile = delete; 655 656 constexpr atomic(__integral_type __i) : __base_type(__i) { } 657 658 using __base_type::operator __integral_type; 659 using __base_type::operator=; 660 }; 661 662 /// Explicit specialization for char16_t. 663 template<> 664 struct atomic<char16_t> : public atomic_char16_t 665 { 666 typedef char16_t __integral_type; 667 typedef atomic_char16_t __base_type; 668 669 atomic() = default; 670 ~atomic() = default; 671 atomic(const atomic&) = delete; 672 atomic& operator=(const atomic&) = delete; 673 atomic& operator=(const atomic&) volatile = delete; 674 675 constexpr atomic(__integral_type __i) : __base_type(__i) { } 676 677 using __base_type::operator __integral_type; 678 using __base_type::operator=; 679 }; 680 681 /// Explicit specialization for char32_t. 682 template<> 683 struct atomic<char32_t> : public atomic_char32_t 684 { 685 typedef char32_t __integral_type; 686 typedef atomic_char32_t __base_type; 687 688 atomic() = default; 689 ~atomic() = default; 690 atomic(const atomic&) = delete; 691 atomic& operator=(const atomic&) = delete; 692 atomic& operator=(const atomic&) volatile = delete; 693 694 constexpr atomic(__integral_type __i) : __base_type(__i) { } 695 696 using __base_type::operator __integral_type; 697 using __base_type::operator=; 698 }; 699 700 701 // Function definitions, atomic_flag operations. 702 inline bool 703 atomic_flag_test_and_set_explicit(atomic_flag* __a, memory_order __m) 704 { return __a->test_and_set(__m); } 705 706 inline bool 707 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, 708 memory_order __m) 709 { return __a->test_and_set(__m); } 710 711 inline void 712 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) 713 { __a->clear(__m); } 714 715 inline void 716 atomic_flag_clear_explicit(volatile atomic_flag* __a, memory_order __m) 717 { __a->clear(__m); } 718 719 inline bool 720 atomic_flag_test_and_set(atomic_flag* __a) 721 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 722 723 inline bool 724 atomic_flag_test_and_set(volatile atomic_flag* __a) 725 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } 726 727 inline void 728 atomic_flag_clear(atomic_flag* __a) 729 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 730 731 inline void 732 atomic_flag_clear(volatile atomic_flag* __a) 733 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } 734 735 736 // Function templates generally applicable to atomic types. 737 template<typename _ITp> 738 inline bool 739 atomic_is_lock_free(const atomic<_ITp>* __a) 740 { return __a->is_lock_free(); } 741 742 template<typename _ITp> 743 inline bool 744 atomic_is_lock_free(const volatile atomic<_ITp>* __a) 745 { return __a->is_lock_free(); } 746 747 template<typename _ITp> 748 inline void 749 atomic_init(atomic<_ITp>* __a, _ITp __i); 750 751 template<typename _ITp> 752 inline void 753 atomic_init(volatile atomic<_ITp>* __a, _ITp __i); 754 755 template<typename _ITp> 756 inline void 757 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, memory_order __m) 758 { __a->store(__i, __m); } 759 760 template<typename _ITp> 761 inline void 762 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i, 763 memory_order __m) 764 { __a->store(__i, __m); } 765 766 template<typename _ITp> 767 inline _ITp 768 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) 769 { return __a->load(__m); } 770 771 template<typename _ITp> 772 inline _ITp 773 atomic_load_explicit(const volatile atomic<_ITp>* __a, 774 memory_order __m) 775 { return __a->load(__m); } 776 777 template<typename _ITp> 778 inline _ITp 779 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i, 780 memory_order __m) 781 { return __a->exchange(__i, __m); } 782 783 template<typename _ITp> 784 inline _ITp 785 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i, 786 memory_order __m) 787 { return __a->exchange(__i, __m); } 788 789 template<typename _ITp> 790 inline bool 791 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a, 792 _ITp* __i1, _ITp __i2, 793 memory_order __m1, memory_order __m2) 794 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 795 796 template<typename _ITp> 797 inline bool 798 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a, 799 _ITp* __i1, _ITp __i2, 800 memory_order __m1, memory_order __m2) 801 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } 802 803 template<typename _ITp> 804 inline bool 805 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a, 806 _ITp* __i1, _ITp __i2, 807 memory_order __m1, 808 memory_order __m2) 809 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 810 811 template<typename _ITp> 812 inline bool 813 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a, 814 _ITp* __i1, _ITp __i2, 815 memory_order __m1, 816 memory_order __m2) 817 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } 818 819 820 template<typename _ITp> 821 inline void 822 atomic_store(atomic<_ITp>* __a, _ITp __i) 823 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 824 825 template<typename _ITp> 826 inline void 827 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) 828 { atomic_store_explicit(__a, __i, memory_order_seq_cst); } 829 830 template<typename _ITp> 831 inline _ITp 832 atomic_load(const atomic<_ITp>* __a) 833 { return atomic_load_explicit(__a, memory_order_seq_cst); } 834 835 template<typename _ITp> 836 inline _ITp 837 atomic_load(const volatile atomic<_ITp>* __a) 838 { return atomic_load_explicit(__a, memory_order_seq_cst); } 839 840 template<typename _ITp> 841 inline _ITp 842 atomic_exchange(atomic<_ITp>* __a, _ITp __i) 843 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 844 845 template<typename _ITp> 846 inline _ITp 847 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) 848 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } 849 850 template<typename _ITp> 851 inline bool 852 atomic_compare_exchange_weak(atomic<_ITp>* __a, 853 _ITp* __i1, _ITp __i2) 854 { 855 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 856 memory_order_seq_cst, 857 memory_order_seq_cst); 858 } 859 860 template<typename _ITp> 861 inline bool 862 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a, 863 _ITp* __i1, _ITp __i2) 864 { 865 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, 866 memory_order_seq_cst, 867 memory_order_seq_cst); 868 } 869 870 template<typename _ITp> 871 inline bool 872 atomic_compare_exchange_strong(atomic<_ITp>* __a, 873 _ITp* __i1, _ITp __i2) 874 { 875 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 876 memory_order_seq_cst, 877 memory_order_seq_cst); 878 } 879 880 template<typename _ITp> 881 inline bool 882 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a, 883 _ITp* __i1, _ITp __i2) 884 { 885 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, 886 memory_order_seq_cst, 887 memory_order_seq_cst); 888 } 889 890 // Function templates for atomic_integral operations only, using 891 // __atomic_base. Template argument should be constricted to 892 // intergral types as specified in the standard, excluding address 893 // types. 894 template<typename _ITp> 895 inline _ITp 896 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i, 897 memory_order __m) 898 { return __a->fetch_add(__i, __m); } 899 900 template<typename _ITp> 901 inline _ITp 902 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 903 memory_order __m) 904 { return __a->fetch_add(__i, __m); } 905 906 template<typename _ITp> 907 inline _ITp 908 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i, 909 memory_order __m) 910 { return __a->fetch_sub(__i, __m); } 911 912 template<typename _ITp> 913 inline _ITp 914 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 915 memory_order __m) 916 { return __a->fetch_sub(__i, __m); } 917 918 template<typename _ITp> 919 inline _ITp 920 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i, 921 memory_order __m) 922 { return __a->fetch_and(__i, __m); } 923 924 template<typename _ITp> 925 inline _ITp 926 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 927 memory_order __m) 928 { return __a->fetch_and(__i, __m); } 929 930 template<typename _ITp> 931 inline _ITp 932 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i, 933 memory_order __m) 934 { return __a->fetch_or(__i, __m); } 935 936 template<typename _ITp> 937 inline _ITp 938 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 939 memory_order __m) 940 { return __a->fetch_or(__i, __m); } 941 942 template<typename _ITp> 943 inline _ITp 944 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i, 945 memory_order __m) 946 { return __a->fetch_xor(__i, __m); } 947 948 template<typename _ITp> 949 inline _ITp 950 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, 951 memory_order __m) 952 { return __a->fetch_xor(__i, __m); } 953 954 template<typename _ITp> 955 inline _ITp 956 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) 957 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 958 959 template<typename _ITp> 960 inline _ITp 961 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) 962 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } 963 964 template<typename _ITp> 965 inline _ITp 966 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) 967 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 968 969 template<typename _ITp> 970 inline _ITp 971 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) 972 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } 973 974 template<typename _ITp> 975 inline _ITp 976 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) 977 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 978 979 template<typename _ITp> 980 inline _ITp 981 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) 982 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } 983 984 template<typename _ITp> 985 inline _ITp 986 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) 987 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 988 989 template<typename _ITp> 990 inline _ITp 991 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) 992 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } 993 994 template<typename _ITp> 995 inline _ITp 996 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) 997 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 998 999 template<typename _ITp> 1000 inline _ITp 1001 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) 1002 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } 1003 1004 1005 // Partial specializations for pointers. 1006 template<typename _ITp> 1007 inline _ITp* 1008 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1009 memory_order __m) 1010 { return __a->fetch_add(__d, __m); } 1011 1012 template<typename _ITp> 1013 inline _ITp* 1014 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d, 1015 memory_order __m) 1016 { return __a->fetch_add(__d, __m); } 1017 1018 template<typename _ITp> 1019 inline _ITp* 1020 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) 1021 { return __a->fetch_add(__d); } 1022 1023 template<typename _ITp> 1024 inline _ITp* 1025 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) 1026 { return __a->fetch_add(__d); } 1027 1028 template<typename _ITp> 1029 inline _ITp* 1030 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a, 1031 ptrdiff_t __d, memory_order __m) 1032 { return __a->fetch_sub(__d, __m); } 1033 1034 template<typename _ITp> 1035 inline _ITp* 1036 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, 1037 memory_order __m) 1038 { return __a->fetch_sub(__d, __m); } 1039 1040 template<typename _ITp> 1041 inline _ITp* 1042 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) 1043 { return __a->fetch_sub(__d); } 1044 1045 template<typename _ITp> 1046 inline _ITp* 1047 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) 1048 { return __a->fetch_sub(__d); } 1049 // @} group atomics 1050 1051 _GLIBCXX_END_NAMESPACE_VERSION 1052 } // namespace 1053 1054 #endif 1055