Home | History | Annotate | Download | only in cutils
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  * All rights reserved.
      4  *
      5  * Redistribution and use in source and binary forms, with or without
      6  * modification, are permitted provided that the following conditions
      7  * are met:
      8  *  * Redistributions of source code must retain the above copyright
      9  *    notice, this list of conditions and the following disclaimer.
     10  *  * Redistributions in binary form must reproduce the above copyright
     11  *    notice, this list of conditions and the following disclaimer in
     12  *    the documentation and/or other materials provided with the
     13  *    distribution.
     14  *
     15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     16  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     17  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
     18  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
     19  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
     20  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
     21  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
     22  * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
     23  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
     24  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
     25  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     26  * SUCH DAMAGE.
     27  */
     28 
     29 #ifndef ANDROID_CUTILS_ATOMIC_X86_64_H
     30 #define ANDROID_CUTILS_ATOMIC_X86_64_H
     31 
     32 #include <stdint.h>
     33 
     34 #ifndef ANDROID_ATOMIC_INLINE
     35 #define ANDROID_ATOMIC_INLINE inline __attribute__((always_inline))
     36 #endif
     37 
     38 extern ANDROID_ATOMIC_INLINE
     39 void android_compiler_barrier(void)
     40 {
     41     __asm__ __volatile__ ("" : : : "memory");
     42 }
     43 
     44 extern ANDROID_ATOMIC_INLINE
     45 void android_memory_barrier(void)
     46 {
     47     __asm__ __volatile__ ("mfence" : : : "memory");
     48 }
     49 
     50 extern ANDROID_ATOMIC_INLINE
     51 int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
     52 {
     53     int32_t value = *ptr;
     54     android_compiler_barrier();
     55     return value;
     56 }
     57 
     58 extern ANDROID_ATOMIC_INLINE
     59 int32_t android_atomic_release_load(volatile const int32_t *ptr)
     60 {
     61     android_memory_barrier();
     62     return *ptr;
     63 }
     64 
     65 extern ANDROID_ATOMIC_INLINE
     66 void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
     67 {
     68     *ptr = value;
     69     android_memory_barrier();
     70 }
     71 
     72 extern ANDROID_ATOMIC_INLINE
     73 void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
     74 {
     75     android_compiler_barrier();
     76     *ptr = value;
     77 }
     78 
     79 extern ANDROID_ATOMIC_INLINE
     80 int android_atomic_cas(int32_t old_value, int32_t new_value,
     81                        volatile int32_t *ptr)
     82 {
     83     int32_t prev;
     84     __asm__ __volatile__ ("lock; cmpxchgl %1, %2"
     85                           : "=a" (prev)
     86                           : "q" (new_value), "m" (*ptr), "0" (old_value)
     87                           : "memory");
     88     return prev != old_value;
     89 }
     90 
     91 extern ANDROID_ATOMIC_INLINE
     92 int android_atomic_acquire_cas(int32_t old_value, int32_t new_value,
     93                                volatile int32_t *ptr)
     94 {
     95     /* Loads are not reordered with other loads. */
     96     return android_atomic_cas(old_value, new_value, ptr);
     97 }
     98 
     99 extern ANDROID_ATOMIC_INLINE
    100 int android_atomic_release_cas(int32_t old_value, int32_t new_value,
    101                                volatile int32_t *ptr)
    102 {
    103     /* Stores are not reordered with other stores. */
    104     return android_atomic_cas(old_value, new_value, ptr);
    105 }
    106 
    107 extern ANDROID_ATOMIC_INLINE
    108 int32_t android_atomic_add(int32_t increment, volatile int32_t *ptr)
    109 {
    110     __asm__ __volatile__ ("lock; xaddl %0, %1"
    111                           : "+r" (increment), "+m" (*ptr)
    112                           : : "memory");
    113     /* increment now holds the old value of *ptr */
    114     return increment;
    115 }
    116 
    117 extern ANDROID_ATOMIC_INLINE
    118 int32_t android_atomic_inc(volatile int32_t *addr)
    119 {
    120     return android_atomic_add(1, addr);
    121 }
    122 
    123 extern ANDROID_ATOMIC_INLINE
    124 int32_t android_atomic_dec(volatile int32_t *addr)
    125 {
    126     return android_atomic_add(-1, addr);
    127 }
    128 
    129 extern ANDROID_ATOMIC_INLINE
    130 int32_t android_atomic_and(int32_t value, volatile int32_t *ptr)
    131 {
    132     int32_t prev, status;
    133     do {
    134         prev = *ptr;
    135         status = android_atomic_cas(prev, prev & value, ptr);
    136     } while (__builtin_expect(status != 0, 0));
    137     return prev;
    138 }
    139 
    140 extern ANDROID_ATOMIC_INLINE
    141 int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
    142 {
    143     int32_t prev, status;
    144     do {
    145         prev = *ptr;
    146         status = android_atomic_cas(prev, prev | value, ptr);
    147     } while (__builtin_expect(status != 0, 0));
    148     return prev;
    149 }
    150 
    151 #endif /* ANDROID_CUTILS_ATOMIC_X86_64_H */
    152