Home | History | Annotate | Download | only in base
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 // This file is an internal atomic implementation for compiler-based
      6 // ThreadSanitizer. Use base/atomicops.h instead.
      7 
      8 #ifndef BASE_ATOMICOPS_INTERNALS_TSAN_H_
      9 #define BASE_ATOMICOPS_INTERNALS_TSAN_H_
     10 
     11 #include <sanitizer/tsan_interface_atomic.h>
     12 
     13 namespace base {
     14 namespace subtle {
     15 
     16 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
     17                                          Atomic32 old_value,
     18                                          Atomic32 new_value) {
     19   Atomic32 cmp = old_value;
     20   __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
     21       __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
     22   return cmp;
     23 }
     24 
     25 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
     26                                          Atomic32 new_value) {
     27   return __tsan_atomic32_exchange(ptr, new_value,
     28       __tsan_memory_order_relaxed);
     29 }
     30 
     31 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32* ptr,
     32                                        Atomic32 new_value) {
     33   return __tsan_atomic32_exchange(ptr, new_value,
     34       __tsan_memory_order_acquire);
     35 }
     36 
     37 inline Atomic32 Release_AtomicExchange(volatile Atomic32* ptr,
     38                                        Atomic32 new_value) {
     39   return __tsan_atomic32_exchange(ptr, new_value,
     40       __tsan_memory_order_release);
     41 }
     42 
     43 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
     44                                           Atomic32 increment) {
     45   return increment + __tsan_atomic32_fetch_add(ptr, increment,
     46       __tsan_memory_order_relaxed);
     47 }
     48 
     49 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
     50                                         Atomic32 increment) {
     51   return increment + __tsan_atomic32_fetch_add(ptr, increment,
     52       __tsan_memory_order_acq_rel);
     53 }
     54 
     55 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
     56                                        Atomic32 old_value,
     57                                        Atomic32 new_value) {
     58   Atomic32 cmp = old_value;
     59   __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
     60       __tsan_memory_order_acquire, __tsan_memory_order_acquire);
     61   return cmp;
     62 }
     63 
     64 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
     65                                        Atomic32 old_value,
     66                                        Atomic32 new_value) {
     67   Atomic32 cmp = old_value;
     68   __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
     69       __tsan_memory_order_release, __tsan_memory_order_relaxed);
     70   return cmp;
     71 }
     72 
     73 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
     74   __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
     75 }
     76 
     77 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
     78   __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
     79   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
     80 }
     81 
     82 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
     83   __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
     84 }
     85 
     86 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
     87   return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
     88 }
     89 
     90 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
     91   return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
     92 }
     93 
     94 inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
     95   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
     96   return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
     97 }
     98 
     99 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
    100                                          Atomic64 old_value,
    101                                          Atomic64 new_value) {
    102   Atomic64 cmp = old_value;
    103   __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
    104       __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
    105   return cmp;
    106 }
    107 
    108 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
    109                                          Atomic64 new_value) {
    110   return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed);
    111 }
    112 
    113 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64* ptr,
    114                                        Atomic64 new_value) {
    115   return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire);
    116 }
    117 
    118 inline Atomic64 Release_AtomicExchange(volatile Atomic64* ptr,
    119                                        Atomic64 new_value) {
    120   return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release);
    121 }
    122 
    123 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
    124                                           Atomic64 increment) {
    125   return increment + __tsan_atomic64_fetch_add(ptr, increment,
    126       __tsan_memory_order_relaxed);
    127 }
    128 
    129 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
    130                                         Atomic64 increment) {
    131   return increment + __tsan_atomic64_fetch_add(ptr, increment,
    132       __tsan_memory_order_acq_rel);
    133 }
    134 
    135 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
    136   __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
    137 }
    138 
    139 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
    140   __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
    141   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
    142 }
    143 
    144 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
    145   __tsan_atomic64_store(ptr, value, __tsan_memory_order_release);
    146 }
    147 
    148 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
    149   return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
    150 }
    151 
    152 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
    153   return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire);
    154 }
    155 
    156 inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
    157   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
    158   return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
    159 }
    160 
    161 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
    162                                        Atomic64 old_value,
    163                                        Atomic64 new_value) {
    164   Atomic64 cmp = old_value;
    165   __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
    166       __tsan_memory_order_acquire, __tsan_memory_order_acquire);
    167   return cmp;
    168 }
    169 
    170 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
    171                                        Atomic64 old_value,
    172                                        Atomic64 new_value) {
    173   Atomic64 cmp = old_value;
    174   __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
    175       __tsan_memory_order_release, __tsan_memory_order_relaxed);
    176   return cmp;
    177 }
    178 
    179 inline void MemoryBarrier() {
    180   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
    181 }
    182 
    183 }  // namespace base::subtle
    184 }  // namespace base
    185 
    186 #endif  // BASE_ATOMICOPS_INTERNALS_TSAN_H_
    187