Home | History | Annotate | Download | only in cutils
      1 /*
      2  * Copyright (C) 2010 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ANDROID_CUTILS_ATOMIC_X86_H
     18 #define ANDROID_CUTILS_ATOMIC_X86_H
     19 
     20 #include <stdint.h>
     21 
     22 extern inline void android_compiler_barrier(void)
     23 {
     24     __asm__ __volatile__ ("" : : : "memory");
     25 }
     26 
     27 #if ANDROID_SMP == 0
     28 extern inline void android_memory_barrier(void)
     29 {
     30     android_compiler_barrier();
     31 }
     32 #else
     33 extern inline void android_memory_barrier(void)
     34 {
     35     __asm__ __volatile__ ("mfence" : : : "memory");
     36 }
     37 #endif
     38 
     39 extern inline int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
     40 {
     41     int32_t value = *ptr;
     42     android_compiler_barrier();
     43     return value;
     44 }
     45 
     46 extern inline int32_t android_atomic_release_load(volatile const int32_t *ptr)
     47 {
     48     android_memory_barrier();
     49     return *ptr;
     50 }
     51 
     52 extern inline void android_atomic_acquire_store(int32_t value,
     53                                                 volatile int32_t *ptr)
     54 {
     55     *ptr = value;
     56     android_memory_barrier();
     57 }
     58 
     59 extern inline void android_atomic_release_store(int32_t value,
     60                                                 volatile int32_t *ptr)
     61 {
     62     android_compiler_barrier();
     63     *ptr = value;
     64 }
     65 
     66 extern inline int android_atomic_cas(int32_t old_value, int32_t new_value,
     67                                      volatile int32_t *ptr)
     68 {
     69     int32_t prev;
     70     __asm__ __volatile__ ("lock; cmpxchgl %1, %2"
     71                           : "=a" (prev)
     72                           : "q" (new_value), "m" (*ptr), "0" (old_value)
     73                           : "memory");
     74     return prev != old_value;
     75 }
     76 
     77 extern inline int android_atomic_acquire_cas(int32_t old_value,
     78                                              int32_t new_value,
     79                                              volatile int32_t *ptr)
     80 {
     81     /* Loads are not reordered with other loads. */
     82     return android_atomic_cas(old_value, new_value, ptr);
     83 }
     84 
     85 extern inline int android_atomic_release_cas(int32_t old_value,
     86                                              int32_t new_value,
     87                                              volatile int32_t *ptr)
     88 {
     89     /* Stores are not reordered with other stores. */
     90     return android_atomic_cas(old_value, new_value, ptr);
     91 }
     92 
     93 extern inline int32_t android_atomic_swap(int32_t new_value,
     94                                           volatile int32_t *ptr)
     95 {
     96     __asm__ __volatile__ ("xchgl %1, %0"
     97                           : "=r" (new_value)
     98                           : "m" (*ptr), "0" (new_value)
     99                           : "memory");
    100     /* new_value now holds the old value of *ptr */
    101     return new_value;
    102 }
    103 
    104 extern inline int32_t android_atomic_add(int32_t increment,
    105                                          volatile int32_t *ptr)
    106 {
    107     __asm__ __volatile__ ("lock; xaddl %0, %1"
    108                           : "+r" (increment), "+m" (*ptr)
    109                           : : "memory");
    110     /* increment now holds the old value of *ptr */
    111     return increment;
    112 }
    113 
    114 extern inline int32_t android_atomic_inc(volatile int32_t *addr)
    115 {
    116     return android_atomic_add(1, addr);
    117 }
    118 
    119 extern inline int32_t android_atomic_dec(volatile int32_t *addr)
    120 {
    121     return android_atomic_add(-1, addr);
    122 }
    123 
    124 extern inline int32_t android_atomic_and(int32_t value,
    125                                          volatile int32_t *ptr)
    126 {
    127     int32_t prev, status;
    128     do {
    129         prev = *ptr;
    130         status = android_atomic_cas(prev, prev & value, ptr);
    131     } while (__builtin_expect(status != 0, 0));
    132     return prev;
    133 }
    134 
    135 extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
    136 {
    137     int32_t prev, status;
    138     do {
    139         prev = *ptr;
    140         status = android_atomic_cas(prev, prev | value, ptr);
    141     } while (__builtin_expect(status != 0, 0));
    142     return prev;
    143 }
    144 
    145 #endif /* ANDROID_CUTILS_ATOMIC_X86_H */
    146