Home | History | Annotate | Download | only in base
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 // This file is an internal atomic implementation, use base/atomicops.h instead.
      6 
      7 #ifndef BASE_ATOMICOPS_INTERNALS_MAC_H_
      8 #define BASE_ATOMICOPS_INTERNALS_MAC_H_
      9 
     10 #include <libkern/OSAtomic.h>
     11 
     12 namespace base {
     13 namespace subtle {
     14 
     15 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
     16                                          Atomic32 old_value,
     17                                          Atomic32 new_value) {
     18   Atomic32 prev_value;
     19   do {
     20     if (OSAtomicCompareAndSwap32(old_value, new_value,
     21                                  const_cast<Atomic32*>(ptr))) {
     22       return old_value;
     23     }
     24     prev_value = *ptr;
     25   } while (prev_value == old_value);
     26   return prev_value;
     27 }
     28 
     29 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
     30                                          Atomic32 new_value) {
     31   Atomic32 old_value;
     32   do {
     33     old_value = *ptr;
     34   } while (!OSAtomicCompareAndSwap32(old_value, new_value,
     35                                      const_cast<Atomic32*>(ptr)));
     36   return old_value;
     37 }
     38 
     39 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
     40                                           Atomic32 increment) {
     41   return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
     42 }
     43 
     44 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
     45                                           Atomic32 increment) {
     46   return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
     47 }
     48 
     49 inline void MemoryBarrier() {
     50   OSMemoryBarrier();
     51 }
     52 
     53 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
     54                                        Atomic32 old_value,
     55                                        Atomic32 new_value) {
     56   Atomic32 prev_value;
     57   do {
     58     if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
     59                                         const_cast<Atomic32*>(ptr))) {
     60       return old_value;
     61     }
     62     prev_value = *ptr;
     63   } while (prev_value == old_value);
     64   return prev_value;
     65 }
     66 
     67 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
     68                                        Atomic32 old_value,
     69                                        Atomic32 new_value) {
     70   return Acquire_CompareAndSwap(ptr, old_value, new_value);
     71 }
     72 
     73 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
     74   *ptr = value;
     75 }
     76 
     77 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
     78   *ptr = value;
     79   MemoryBarrier();
     80 }
     81 
     82 inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
     83   MemoryBarrier();
     84   *ptr = value;
     85 }
     86 
     87 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
     88   return *ptr;
     89 }
     90 
     91 inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
     92   Atomic32 value = *ptr;
     93   MemoryBarrier();
     94   return value;
     95 }
     96 
     97 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
     98   MemoryBarrier();
     99   return *ptr;
    100 }
    101 
    102 #ifdef __LP64__
    103 
    104 // 64-bit implementation on 64-bit platform
    105 
    106 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
    107                                          Atomic64 old_value,
    108                                          Atomic64 new_value) {
    109   Atomic64 prev_value;
    110   do {
    111     if (OSAtomicCompareAndSwap64(old_value, new_value,
    112                                  reinterpret_cast<volatile int64_t*>(ptr))) {
    113       return old_value;
    114     }
    115     prev_value = *ptr;
    116   } while (prev_value == old_value);
    117   return prev_value;
    118 }
    119 
    120 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
    121                                          Atomic64 new_value) {
    122   Atomic64 old_value;
    123   do {
    124     old_value = *ptr;
    125   } while (!OSAtomicCompareAndSwap64(old_value, new_value,
    126                                      reinterpret_cast<volatile int64_t*>(ptr)));
    127   return old_value;
    128 }
    129 
    130 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
    131                                           Atomic64 increment) {
    132   return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
    133 }
    134 
    135 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
    136                                         Atomic64 increment) {
    137   return OSAtomicAdd64Barrier(increment,
    138                               reinterpret_cast<volatile int64_t*>(ptr));
    139 }
    140 
    141 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
    142                                        Atomic64 old_value,
    143                                        Atomic64 new_value) {
    144   Atomic64 prev_value;
    145   do {
    146     if (OSAtomicCompareAndSwap64Barrier(
    147         old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
    148       return old_value;
    149     }
    150     prev_value = *ptr;
    151   } while (prev_value == old_value);
    152   return prev_value;
    153 }
    154 
    155 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
    156                                        Atomic64 old_value,
    157                                        Atomic64 new_value) {
    158   // The lib kern interface does not distinguish between
    159   // Acquire and Release memory barriers; they are equivalent.
    160   return Acquire_CompareAndSwap(ptr, old_value, new_value);
    161 }
    162 
    163 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
    164   *ptr = value;
    165 }
    166 
    167 inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
    168   *ptr = value;
    169   MemoryBarrier();
    170 }
    171 
    172 inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
    173   MemoryBarrier();
    174   *ptr = value;
    175 }
    176 
    177 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
    178   return *ptr;
    179 }
    180 
    181 inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
    182   Atomic64 value = *ptr;
    183   MemoryBarrier();
    184   return value;
    185 }
    186 
    187 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
    188   MemoryBarrier();
    189   return *ptr;
    190 }
    191 
    192 #endif  // defined(__LP64__)
    193 
    194 }   // namespace base::subtle
    195 }   // namespace base
    196 
    197 #endif  // BASE_ATOMICOPS_INTERNALS_MAC_H_
    198