Home | History | Annotate | Download | only in cutils
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  * All rights reserved.
      4  *
      5  * Redistribution and use in source and binary forms, with or without
      6  * modification, are permitted provided that the following conditions
      7  * are met:
      8  *  * Redistributions of source code must retain the above copyright
      9  *    notice, this list of conditions and the following disclaimer.
     10  *  * Redistributions in binary form must reproduce the above copyright
     11  *    notice, this list of conditions and the following disclaimer in
     12  *    the documentation and/or other materials provided with the
     13  *    distribution.
     14  *
     15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     16  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     17  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
     18  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
     19  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
     20  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
     21  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
     22  * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
     23  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
     24  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
     25  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     26  * SUCH DAMAGE.
     27  */
     28 
     29 #ifndef ANDROID_CUTILS_ATOMIC_AARCH64_H
     30 #define ANDROID_CUTILS_ATOMIC_AARCH64_H
     31 
     32 #include <stdint.h>
     33 
     34 #ifndef ANDROID_ATOMIC_INLINE
     35 #define ANDROID_ATOMIC_INLINE inline __attribute__((always_inline))
     36 #endif
     37 
     38 /*
     39    TODOAArch64: Revisit the below functions and check for potential
     40    optimizations using assembly code or otherwise.
     41 */
     42 
     43 extern ANDROID_ATOMIC_INLINE
     44 void android_compiler_barrier(void)
     45 {
     46     __asm__ __volatile__ ("" : : : "memory");
     47 }
     48 
     49 extern ANDROID_ATOMIC_INLINE
     50 void android_memory_barrier(void)
     51 {
     52     __asm__ __volatile__ ("dmb ish" : : : "memory");
     53 }
     54 
     55 extern ANDROID_ATOMIC_INLINE
     56 int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
     57 {
     58     int32_t value = *ptr;
     59     android_memory_barrier();
     60     return value;
     61 }
     62 
     63 extern ANDROID_ATOMIC_INLINE
     64 int32_t android_atomic_release_load(volatile const int32_t *ptr)
     65 {
     66     android_memory_barrier();
     67     return *ptr;
     68 }
     69 
     70 extern ANDROID_ATOMIC_INLINE
     71 void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
     72 {
     73     *ptr = value;
     74     android_memory_barrier();
     75 }
     76 
     77 extern ANDROID_ATOMIC_INLINE
     78 void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
     79 {
     80     android_memory_barrier();
     81     *ptr = value;
     82 }
     83 
     84 extern ANDROID_ATOMIC_INLINE
     85 int android_atomic_cas(int32_t old_value, int32_t new_value,
     86                        volatile int32_t *ptr)
     87 {
     88     return __sync_val_compare_and_swap(ptr, old_value, new_value) != old_value;
     89 }
     90 
     91 extern ANDROID_ATOMIC_INLINE
     92 int android_atomic_acquire_cas(int32_t old_value, int32_t new_value,
     93                                volatile int32_t *ptr)
     94 {
     95     int status = android_atomic_cas(old_value, new_value, ptr);
     96     android_memory_barrier();
     97     return status;
     98 }
     99 
    100 extern ANDROID_ATOMIC_INLINE
    101 int android_atomic_release_cas(int32_t old_value, int32_t new_value,
    102                                volatile int32_t *ptr)
    103 {
    104     android_memory_barrier();
    105     return android_atomic_cas(old_value, new_value, ptr);
    106 }
    107 
    108 extern ANDROID_ATOMIC_INLINE
    109 int32_t android_atomic_add(int32_t increment, volatile int32_t *ptr)
    110 {
    111     int32_t prev, status;
    112     android_memory_barrier();
    113     do {
    114         prev = *ptr;
    115         status = android_atomic_cas(prev, prev + increment, ptr);
    116     } while (__builtin_expect(status != 0, 0));
    117     return prev;
    118 }
    119 
    120 extern ANDROID_ATOMIC_INLINE
    121 int32_t android_atomic_inc(volatile int32_t *addr)
    122 {
    123     return android_atomic_add(1, addr);
    124 }
    125 
    126 extern ANDROID_ATOMIC_INLINE
    127 int32_t android_atomic_dec(volatile int32_t *addr)
    128 {
    129     return android_atomic_add(-1, addr);
    130 }
    131 
    132 extern ANDROID_ATOMIC_INLINE
    133 int32_t android_atomic_and(int32_t value, volatile int32_t *ptr)
    134 {
    135     int32_t prev, status;
    136     android_memory_barrier();
    137     do {
    138         prev = *ptr;
    139         status = android_atomic_cas(prev, prev & value, ptr);
    140     } while (__builtin_expect(status != 0, 0));
    141     return prev;
    142 }
    143 
    144 extern ANDROID_ATOMIC_INLINE
    145 int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
    146 {
    147     int32_t prev, status;
    148     android_memory_barrier();
    149     do {
    150         prev = *ptr;
    151         status = android_atomic_cas(prev, prev | value, ptr);
    152     } while (__builtin_expect(status != 0, 0));
    153     return prev;
    154 }
    155 
    156 #endif /* ANDROID_CUTILS_ATOMIC_AARCH64_H */
    157