Home | History | Annotate | Download | only in stubs
      1 // Protocol Buffers - Google's data interchange format
      2 // Copyright 2012 Google Inc.  All rights reserved.
      3 // https://developers.google.com/protocol-buffers/
      4 //
      5 // Redistribution and use in source and binary forms, with or without
      6 // modification, are permitted provided that the following conditions are
      7 // met:
      8 //
      9 //     * Redistributions of source code must retain the above copyright
     10 // notice, this list of conditions and the following disclaimer.
     11 //     * Redistributions in binary form must reproduce the above
     12 // copyright notice, this list of conditions and the following disclaimer
     13 // in the documentation and/or other materials provided with the
     14 // distribution.
     15 //     * Neither the name of Google Inc. nor the names of its
     16 // contributors may be used to endorse or promote products derived from
     17 // this software without specific prior written permission.
     18 //
     19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     20 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     21 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     22 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     23 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     24 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     25 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     26 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     27 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     28 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     29 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     30 
     31 // This file is an internal atomic implementation, use atomicops.h instead.
     32 
     33 #ifndef GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_PNACL_H_
     34 #define GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_PNACL_H_
     35 
     36 #include <atomic>
     37 
     38 namespace google {
     39 namespace protobuf {
     40 namespace internal {
     41 
     42 // This implementation is transitional and maintains the original API for
     43 // atomicops.h. This requires casting memory locations to the atomic types, and
     44 // assumes that the API and the C++11 implementation are layout-compatible,
     45 // which isn't true for all implementations or hardware platforms. The static
     46 // assertion should detect this issue, were it to fire then this header
     47 // shouldn't be used.
     48 //
     49 // TODO(jfb) If this header manages to stay committed then the API should be
     50 //           modified, and all call sites updated.
     51 typedef volatile std::atomic<Atomic32>* AtomicLocation32;
     52 static_assert(sizeof(*(AtomicLocation32) nullptr) == sizeof(Atomic32),
     53               "incompatible 32-bit atomic layout");
     54 
     55 inline void MemoryBarrier() {
     56 #if defined(__GLIBCXX__)
     57   // Work around libstdc++ bug 51038 where atomic_thread_fence was declared but
     58   // not defined, leading to the linker complaining about undefined references.
     59   __atomic_thread_fence(std::memory_order_seq_cst);
     60 #else
     61   std::atomic_thread_fence(std::memory_order_seq_cst);
     62 #endif
     63 }
     64 
     65 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
     66                                          Atomic32 old_value,
     67                                          Atomic32 new_value) {
     68   ((AtomicLocation32)ptr)
     69       ->compare_exchange_strong(old_value,
     70                                 new_value,
     71                                 std::memory_order_relaxed,
     72                                 std::memory_order_relaxed);
     73   return old_value;
     74 }
     75 
     76 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
     77                                          Atomic32 new_value) {
     78   return ((AtomicLocation32)ptr)
     79       ->exchange(new_value, std::memory_order_relaxed);
     80 }
     81 
     82 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
     83                                           Atomic32 increment) {
     84   return increment +
     85          ((AtomicLocation32)ptr)
     86              ->fetch_add(increment, std::memory_order_relaxed);
     87 }
     88 
     89 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
     90                                         Atomic32 increment) {
     91   return increment + ((AtomicLocation32)ptr)->fetch_add(increment);
     92 }
     93 
     94 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
     95                                        Atomic32 old_value,
     96                                        Atomic32 new_value) {
     97   ((AtomicLocation32)ptr)
     98       ->compare_exchange_strong(old_value,
     99                                 new_value,
    100                                 std::memory_order_acquire,
    101                                 std::memory_order_acquire);
    102   return old_value;
    103 }
    104 
    105 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
    106                                        Atomic32 old_value,
    107                                        Atomic32 new_value) {
    108   ((AtomicLocation32)ptr)
    109       ->compare_exchange_strong(old_value,
    110                                 new_value,
    111                                 std::memory_order_release,
    112                                 std::memory_order_relaxed);
    113   return old_value;
    114 }
    115 
    116 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
    117   ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed);
    118 }
    119 
    120 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
    121   ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed);
    122   MemoryBarrier();
    123 }
    124 
    125 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
    126   ((AtomicLocation32)ptr)->store(value, std::memory_order_release);
    127 }
    128 
    129 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
    130   return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed);
    131 }
    132 
    133 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
    134   return ((AtomicLocation32)ptr)->load(std::memory_order_acquire);
    135 }
    136 
    137 inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
    138   MemoryBarrier();
    139   return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed);
    140 }
    141 
    142 #if defined(GOOGLE_PROTOBUF_ARCH_64_BIT)
    143 
    144 typedef volatile std::atomic<Atomic64>* AtomicLocation64;
    145 static_assert(sizeof(*(AtomicLocation64) nullptr) == sizeof(Atomic64),
    146               "incompatible 64-bit atomic layout");
    147 
    148 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
    149                                          Atomic64 old_value,
    150                                          Atomic64 new_value) {
    151   ((AtomicLocation64)ptr)
    152       ->compare_exchange_strong(old_value,
    153                                 new_value,
    154                                 std::memory_order_relaxed,
    155                                 std::memory_order_relaxed);
    156   return old_value;
    157 }
    158 
    159 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
    160                                          Atomic64 new_value) {
    161   return ((AtomicLocation64)ptr)
    162       ->exchange(new_value, std::memory_order_relaxed);
    163 }
    164 
    165 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
    166                                           Atomic64 increment) {
    167   return increment +
    168          ((AtomicLocation64)ptr)
    169              ->fetch_add(increment, std::memory_order_relaxed);
    170 }
    171 
    172 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
    173                                         Atomic64 increment) {
    174   return increment + ((AtomicLocation64)ptr)->fetch_add(increment);
    175 }
    176 
    177 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
    178                                        Atomic64 old_value,
    179                                        Atomic64 new_value) {
    180   ((AtomicLocation64)ptr)
    181       ->compare_exchange_strong(old_value,
    182                                 new_value,
    183                                 std::memory_order_acquire,
    184                                 std::memory_order_acquire);
    185   return old_value;
    186 }
    187 
    188 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
    189                                        Atomic64 old_value,
    190                                        Atomic64 new_value) {
    191   ((AtomicLocation64)ptr)
    192       ->compare_exchange_strong(old_value,
    193                                 new_value,
    194                                 std::memory_order_release,
    195                                 std::memory_order_relaxed);
    196   return old_value;
    197 }
    198 
    199 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
    200   ((AtomicLocation64)ptr)->store(value, std::memory_order_relaxed);
    201 }
    202 
    203 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
    204   ((AtomicLocation64)ptr)->store(value, std::memory_order_relaxed);
    205   MemoryBarrier();
    206 }
    207 
    208 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
    209   ((AtomicLocation64)ptr)->store(value, std::memory_order_release);
    210 }
    211 
    212 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
    213   return ((AtomicLocation64)ptr)->load(std::memory_order_relaxed);
    214 }
    215 
    216 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
    217   return ((AtomicLocation64)ptr)->load(std::memory_order_acquire);
    218 }
    219 
    220 inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
    221   MemoryBarrier();
    222   return ((AtomicLocation64)ptr)->load(std::memory_order_relaxed);
    223 }
    224 
    225 #endif  // defined(GOOGLE_PROTOBUF_ARCH_64_BIT)
    226 
    227 }  // namespace internal
    228 }  // namespace protobuf
    229 }  // namespace google
    230 
    231 #endif  // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_PNACL_H_
    232