Home | History | Annotate | Download | only in base
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 // This file is an internal atomic implementation for compiler-based
      6 // ThreadSanitizer. Use base/atomicops.h instead.
      7 
      8 #ifndef BASE_ATOMICOPS_INTERNALS_TSAN_H_
      9 #define BASE_ATOMICOPS_INTERNALS_TSAN_H_
     10 
     11 #include "base/base_export.h"
     12 
     13 // This struct is not part of the public API of this module; clients may not
     14 // use it.  (However, it's exported via BASE_EXPORT because clients implicitly
     15 // do use it at link time by inlining these functions.)
     16 // Features of this x86.  Values may not be correct before main() is run,
     17 // but are set conservatively.
     18 struct AtomicOps_x86CPUFeatureStruct {
     19   bool has_amd_lock_mb_bug;  // Processor has AMD memory-barrier bug; do lfence
     20                              // after acquire compare-and-swap.
     21   bool has_sse2;             // Processor has SSE2.
     22 };
     23 BASE_EXPORT extern struct AtomicOps_x86CPUFeatureStruct
     24     AtomicOps_Internalx86CPUFeatures;
     25 
     26 #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory")
     27 
     28 namespace base {
     29 namespace subtle {
     30 
     31 #ifndef TSAN_INTERFACE_ATOMIC_H
     32 #define TSAN_INTERFACE_ATOMIC_H
     33 
     34 #ifdef __cplusplus
     35 extern "C" {
     36 #endif
     37 
     38 typedef char  __tsan_atomic8;
     39 typedef short __tsan_atomic16;  // NOLINT
     40 typedef int   __tsan_atomic32;
     41 typedef long  __tsan_atomic64;  // NOLINT
     42 
     43 #if defined(__SIZEOF_INT128__) \
     44     || (__clang_major__ * 100 + __clang_minor__ >= 302)
     45 typedef __int128 __tsan_atomic128;
     46 #define __TSAN_HAS_INT128 1
     47 #else
     48 typedef char     __tsan_atomic128;
     49 #define __TSAN_HAS_INT128 0
     50 #endif
     51 
     52 typedef enum {
     53   __tsan_memory_order_relaxed,
     54   __tsan_memory_order_consume,
     55   __tsan_memory_order_acquire,
     56   __tsan_memory_order_release,
     57   __tsan_memory_order_acq_rel,
     58   __tsan_memory_order_seq_cst,
     59 } __tsan_memory_order;
     60 
     61 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a,
     62     __tsan_memory_order mo);
     63 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a,
     64     __tsan_memory_order mo);
     65 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a,
     66     __tsan_memory_order mo);
     67 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a,
     68     __tsan_memory_order mo);
     69 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a,
     70     __tsan_memory_order mo);
     71 
     72 void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v,
     73     __tsan_memory_order mo);
     74 void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v,
     75     __tsan_memory_order mo);
     76 void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v,
     77     __tsan_memory_order mo);
     78 void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v,
     79     __tsan_memory_order mo);
     80 void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v,
     81     __tsan_memory_order mo);
     82 
     83 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a,
     84     __tsan_atomic8 v, __tsan_memory_order mo);
     85 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a,
     86     __tsan_atomic16 v, __tsan_memory_order mo);
     87 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a,
     88     __tsan_atomic32 v, __tsan_memory_order mo);
     89 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a,
     90     __tsan_atomic64 v, __tsan_memory_order mo);
     91 __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128 *a,
     92     __tsan_atomic128 v, __tsan_memory_order mo);
     93 
     94 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a,
     95     __tsan_atomic8 v, __tsan_memory_order mo);
     96 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a,
     97     __tsan_atomic16 v, __tsan_memory_order mo);
     98 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a,
     99     __tsan_atomic32 v, __tsan_memory_order mo);
    100 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a,
    101     __tsan_atomic64 v, __tsan_memory_order mo);
    102 __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128 *a,
    103     __tsan_atomic128 v, __tsan_memory_order mo);
    104 
    105 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a,
    106     __tsan_atomic8 v, __tsan_memory_order mo);
    107 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a,
    108     __tsan_atomic16 v, __tsan_memory_order mo);
    109 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a,
    110     __tsan_atomic32 v, __tsan_memory_order mo);
    111 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a,
    112     __tsan_atomic64 v, __tsan_memory_order mo);
    113 __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128 *a,
    114     __tsan_atomic128 v, __tsan_memory_order mo);
    115 
    116 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a,
    117     __tsan_atomic8 v, __tsan_memory_order mo);
    118 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a,
    119     __tsan_atomic16 v, __tsan_memory_order mo);
    120 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a,
    121     __tsan_atomic32 v, __tsan_memory_order mo);
    122 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a,
    123     __tsan_atomic64 v, __tsan_memory_order mo);
    124 __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128 *a,
    125     __tsan_atomic128 v, __tsan_memory_order mo);
    126 
    127 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a,
    128     __tsan_atomic8 v, __tsan_memory_order mo);
    129 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a,
    130     __tsan_atomic16 v, __tsan_memory_order mo);
    131 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a,
    132     __tsan_atomic32 v, __tsan_memory_order mo);
    133 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a,
    134     __tsan_atomic64 v, __tsan_memory_order mo);
    135 __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128 *a,
    136     __tsan_atomic128 v, __tsan_memory_order mo);
    137 
    138 __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a,
    139     __tsan_atomic8 v, __tsan_memory_order mo);
    140 __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16 *a,
    141     __tsan_atomic16 v, __tsan_memory_order mo);
    142 __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32 *a,
    143     __tsan_atomic32 v, __tsan_memory_order mo);
    144 __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64 *a,
    145     __tsan_atomic64 v, __tsan_memory_order mo);
    146 __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128 *a,
    147     __tsan_atomic128 v, __tsan_memory_order mo);
    148 
    149 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a,
    150     __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo,
    151     __tsan_memory_order fail_mo);
    152 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a,
    153     __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo,
    154     __tsan_memory_order fail_mo);
    155 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a,
    156     __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo,
    157     __tsan_memory_order fail_mo);
    158 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a,
    159     __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo,
    160     __tsan_memory_order fail_mo);
    161 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128 *a,
    162     __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
    163     __tsan_memory_order fail_mo);
    164 
    165 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a,
    166     __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo,
    167     __tsan_memory_order fail_mo);
    168 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a,
    169     __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo,
    170     __tsan_memory_order fail_mo);
    171 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a,
    172     __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo,
    173     __tsan_memory_order fail_mo);
    174 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a,
    175     __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo,
    176     __tsan_memory_order fail_mo);
    177 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128 *a,
    178     __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
    179     __tsan_memory_order fail_mo);
    180 
    181 __tsan_atomic8 __tsan_atomic8_compare_exchange_val(
    182     volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v,
    183     __tsan_memory_order mo, __tsan_memory_order fail_mo);
    184 __tsan_atomic16 __tsan_atomic16_compare_exchange_val(
    185     volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v,
    186     __tsan_memory_order mo, __tsan_memory_order fail_mo);
    187 __tsan_atomic32 __tsan_atomic32_compare_exchange_val(
    188     volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v,
    189     __tsan_memory_order mo, __tsan_memory_order fail_mo);
    190 __tsan_atomic64 __tsan_atomic64_compare_exchange_val(
    191     volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v,
    192     __tsan_memory_order mo, __tsan_memory_order fail_mo);
    193 __tsan_atomic128 __tsan_atomic128_compare_exchange_val(
    194     volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v,
    195     __tsan_memory_order mo, __tsan_memory_order fail_mo);
    196 
    197 void __tsan_atomic_thread_fence(__tsan_memory_order mo);
    198 void __tsan_atomic_signal_fence(__tsan_memory_order mo);
    199 
    200 #ifdef __cplusplus
    201 }  // extern "C"
    202 #endif
    203 
    204 #endif  // #ifndef TSAN_INTERFACE_ATOMIC_H
    205 
    206 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
    207                                   Atomic32 old_value,
    208                                   Atomic32 new_value) {
    209   Atomic32 cmp = old_value;
    210   __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
    211       __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
    212   return cmp;
    213 }
    214 
    215 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
    216                                   Atomic32 new_value) {
    217   return __tsan_atomic32_exchange(ptr, new_value,
    218       __tsan_memory_order_relaxed);
    219 }
    220 
    221 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32 *ptr,
    222                                 Atomic32 new_value) {
    223   return __tsan_atomic32_exchange(ptr, new_value,
    224       __tsan_memory_order_acquire);
    225 }
    226 
    227 inline Atomic32 Release_AtomicExchange(volatile Atomic32 *ptr,
    228                                 Atomic32 new_value) {
    229   return __tsan_atomic32_exchange(ptr, new_value,
    230       __tsan_memory_order_release);
    231 }
    232 
    233 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
    234                                    Atomic32 increment) {
    235   return increment + __tsan_atomic32_fetch_add(ptr, increment,
    236       __tsan_memory_order_relaxed);
    237 }
    238 
    239 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
    240                                  Atomic32 increment) {
    241   return increment + __tsan_atomic32_fetch_add(ptr, increment,
    242       __tsan_memory_order_acq_rel);
    243 }
    244 
    245 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
    246                                 Atomic32 old_value,
    247                                 Atomic32 new_value) {
    248   Atomic32 cmp = old_value;
    249   __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
    250       __tsan_memory_order_acquire, __tsan_memory_order_acquire);
    251   return cmp;
    252 }
    253 
    254 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
    255                                 Atomic32 old_value,
    256                                 Atomic32 new_value) {
    257   Atomic32 cmp = old_value;
    258   __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
    259       __tsan_memory_order_release, __tsan_memory_order_relaxed);
    260   return cmp;
    261 }
    262 
    263 inline void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value) {
    264   __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
    265 }
    266 
    267 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
    268   __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
    269   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
    270 }
    271 
    272 inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
    273   __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
    274 }
    275 
    276 inline Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr) {
    277   return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
    278 }
    279 
    280 inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
    281   return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
    282 }
    283 
    284 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
    285   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
    286   return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
    287 }
    288 
    289 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
    290                                       Atomic64 old_value,
    291                                       Atomic64 new_value) {
    292   Atomic64 cmp = old_value;
    293   __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
    294       __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
    295   return cmp;
    296 }
    297 
    298 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
    299                                       Atomic64 new_value) {
    300   return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed);
    301 }
    302 
    303 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64 *ptr,
    304                                     Atomic64 new_value) {
    305   return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire);
    306 }
    307 
    308 inline Atomic64 Release_AtomicExchange(volatile Atomic64 *ptr,
    309                                     Atomic64 new_value) {
    310   return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release);
    311 }
    312 
    313 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
    314                                        Atomic64 increment) {
    315   return increment + __tsan_atomic64_fetch_add(ptr, increment,
    316       __tsan_memory_order_relaxed);
    317 }
    318 
    319 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
    320                                      Atomic64 increment) {
    321   return increment + __tsan_atomic64_fetch_add(ptr, increment,
    322       __tsan_memory_order_acq_rel);
    323 }
    324 
    325 inline void NoBarrier_Store(volatile Atomic64 *ptr, Atomic64 value) {
    326   __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
    327 }
    328 
    329 inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
    330   __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
    331   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
    332 }
    333 
    334 inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
    335   __tsan_atomic64_store(ptr, value, __tsan_memory_order_release);
    336 }
    337 
    338 inline Atomic64 NoBarrier_Load(volatile const Atomic64 *ptr) {
    339   return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
    340 }
    341 
    342 inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
    343   return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire);
    344 }
    345 
    346 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
    347   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
    348   return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
    349 }
    350 
    351 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
    352                                     Atomic64 old_value,
    353                                     Atomic64 new_value) {
    354   Atomic64 cmp = old_value;
    355   __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
    356       __tsan_memory_order_acquire, __tsan_memory_order_acquire);
    357   return cmp;
    358 }
    359 
    360 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
    361                                     Atomic64 old_value,
    362                                     Atomic64 new_value) {
    363   Atomic64 cmp = old_value;
    364   __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
    365       __tsan_memory_order_release, __tsan_memory_order_relaxed);
    366   return cmp;
    367 }
    368 
    369 inline void MemoryBarrier() {
    370   __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
    371 }
    372 
    373 }  // namespace base::subtle
    374 }  // namespace base
    375 
    376 #undef ATOMICOPS_COMPILER_BARRIER
    377 
    378 #endif  // BASE_ATOMICOPS_INTERNALS_TSAN_H_
    379