Home | History | Annotate | Download | only in include
      1 /*-
      2  * Copyright (c) 2011 Ed Schouten <ed (at) FreeBSD.org>
      3  *                    David Chisnall <theraven (at) FreeBSD.org>
      4  * All rights reserved.
      5  *
      6  * Redistribution and use in source and binary forms, with or without
      7  * modification, are permitted provided that the following conditions
      8  * are met:
      9  * 1. Redistributions of source code must retain the above copyright
     10  *    notice, this list of conditions and the following disclaimer.
     11  * 2. Redistributions in binary form must reproduce the above copyright
     12  *    notice, this list of conditions and the following disclaimer in the
     13  *    documentation and/or other materials provided with the distribution.
     14  *
     15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
     16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
     18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
     19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
     20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
     21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
     23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
     24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     25  * SUCH DAMAGE.
     26  *
     27  * $FreeBSD$
     28  */
     29 
     30 #ifndef _STDATOMIC_H_
     31 #define	_STDATOMIC_H_
     32 
     33 #include <sys/cdefs.h>
     34 #include <sys/types.h>
     35 #include <stdbool.h>
     36 
     37 #if __has_extension(c_atomic) || __has_extension(cxx_atomic)
     38 #define	__CLANG_ATOMICS
     39 #elif __GNUC_PREREQ__(4, 7)
     40 #define	__GNUC_ATOMICS
     41 #elif defined(__GNUC__)
     42 #define	__SYNC_ATOMICS
     43 #else
     44 #error "stdatomic.h does not support your compiler"
     45 #endif
     46 
     47 /*
     48  * 7.17.1 Atomic lock-free macros.
     49  */
     50 
     51 #ifdef __GCC_ATOMIC_BOOL_LOCK_FREE
     52 #define	ATOMIC_BOOL_LOCK_FREE		__GCC_ATOMIC_BOOL_LOCK_FREE
     53 #endif
     54 #ifdef __GCC_ATOMIC_CHAR_LOCK_FREE
     55 #define	ATOMIC_CHAR_LOCK_FREE		__GCC_ATOMIC_CHAR_LOCK_FREE
     56 #endif
     57 #ifdef __GCC_ATOMIC_CHAR16_T_LOCK_FREE
     58 #define	ATOMIC_CHAR16_T_LOCK_FREE	__GCC_ATOMIC_CHAR16_T_LOCK_FREE
     59 #endif
     60 #ifdef __GCC_ATOMIC_CHAR32_T_LOCK_FREE
     61 #define	ATOMIC_CHAR32_T_LOCK_FREE	__GCC_ATOMIC_CHAR32_T_LOCK_FREE
     62 #endif
     63 #ifdef __GCC_ATOMIC_WCHAR_T_LOCK_FREE
     64 #define	ATOMIC_WCHAR_T_LOCK_FREE	__GCC_ATOMIC_WCHAR_T_LOCK_FREE
     65 #endif
     66 #ifdef __GCC_ATOMIC_SHORT_LOCK_FREE
     67 #define	ATOMIC_SHORT_LOCK_FREE		__GCC_ATOMIC_SHORT_LOCK_FREE
     68 #endif
     69 #ifdef __GCC_ATOMIC_INT_LOCK_FREE
     70 #define	ATOMIC_INT_LOCK_FREE		__GCC_ATOMIC_INT_LOCK_FREE
     71 #endif
     72 #ifdef __GCC_ATOMIC_LONG_LOCK_FREE
     73 #define	ATOMIC_LONG_LOCK_FREE		__GCC_ATOMIC_LONG_LOCK_FREE
     74 #endif
     75 #ifdef __GCC_ATOMIC_LLONG_LOCK_FREE
     76 #define	ATOMIC_LLONG_LOCK_FREE		__GCC_ATOMIC_LLONG_LOCK_FREE
     77 #endif
     78 #ifdef __GCC_ATOMIC_POINTER_LOCK_FREE
     79 #define	ATOMIC_POINTER_LOCK_FREE	__GCC_ATOMIC_POINTER_LOCK_FREE
     80 #endif
     81 
     82 /*
     83  * 7.17.2 Initialization.
     84  */
     85 
     86 #if defined(__CLANG_ATOMICS)
     87 #define	ATOMIC_VAR_INIT(value)		(value)
     88 #define	atomic_init(obj, value)		__c11_atomic_init(obj, value)
     89 #else
     90 #define	ATOMIC_VAR_INIT(value)		{ .__val = (value) }
     91 #define	atomic_init(obj, value)		((void)((obj)->__val = (value)))
     92 #endif
     93 
     94 /*
     95  * Clang and recent GCC both provide predefined macros for the memory
     96  * orderings.  If we are using a compiler that doesn't define them, use the
     97  * clang values - these will be ignored in the fallback path.
     98  */
     99 
    100 #ifndef __ATOMIC_RELAXED
    101 #define __ATOMIC_RELAXED		0
    102 #endif
    103 #ifndef __ATOMIC_CONSUME
    104 #define __ATOMIC_CONSUME		1
    105 #endif
    106 #ifndef __ATOMIC_ACQUIRE
    107 #define __ATOMIC_ACQUIRE		2
    108 #endif
    109 #ifndef __ATOMIC_RELEASE
    110 #define __ATOMIC_RELEASE		3
    111 #endif
    112 #ifndef __ATOMIC_ACQ_REL
    113 #define __ATOMIC_ACQ_REL		4
    114 #endif
    115 #ifndef __ATOMIC_SEQ_CST
    116 #define __ATOMIC_SEQ_CST		5
    117 #endif
    118 
    119 /*
    120  * 7.17.3 Order and consistency.
    121  *
    122  * The memory_order_* constants that denote the barrier behaviour of the
    123  * atomic operations.
    124  */
    125 
    126 typedef enum {
    127 	memory_order_relaxed = __ATOMIC_RELAXED,
    128 	memory_order_consume = __ATOMIC_CONSUME,
    129 	memory_order_acquire = __ATOMIC_ACQUIRE,
    130 	memory_order_release = __ATOMIC_RELEASE,
    131 	memory_order_acq_rel = __ATOMIC_ACQ_REL,
    132 	memory_order_seq_cst = __ATOMIC_SEQ_CST
    133 } memory_order;
    134 
    135 /*
    136  * 7.17.4 Fences.
    137  */
    138 
    139 static __inline void
    140 atomic_thread_fence(memory_order __order __unused)
    141 {
    142 
    143 #ifdef __CLANG_ATOMICS
    144 	__c11_atomic_thread_fence(__order);
    145 #elif defined(__GNUC_ATOMICS)
    146 	__atomic_thread_fence(__order);
    147 #else
    148 	__sync_synchronize();
    149 #endif
    150 }
    151 
    152 static __inline void
    153 atomic_signal_fence(memory_order __order __unused)
    154 {
    155 
    156 #ifdef __CLANG_ATOMICS
    157 	__c11_atomic_signal_fence(__order);
    158 #elif defined(__GNUC_ATOMICS)
    159 	__atomic_signal_fence(__order);
    160 #else
    161 	__asm volatile ("" ::: "memory");
    162 #endif
    163 }
    164 
    165 /*
    166  * 7.17.5 Lock-free property.
    167  */
    168 
    169 #if defined(_KERNEL)
    170 /* Atomics in kernelspace are always lock-free. */
    171 #define	atomic_is_lock_free(obj) \
    172 	((void)(obj), (_Bool)1)
    173 #elif defined(__CLANG_ATOMICS)
    174 #define	atomic_is_lock_free(obj) \
    175 	__atomic_is_lock_free(sizeof(*(obj)), obj)
    176 #elif defined(__GNUC_ATOMICS)
    177 #define	atomic_is_lock_free(obj) \
    178 	__atomic_is_lock_free(sizeof((obj)->__val), &(obj)->__val)
    179 #else
    180 #define	atomic_is_lock_free(obj) \
    181 	((void)(obj), sizeof((obj)->__val) <= sizeof(void *))
    182 #endif
    183 
    184 /*
    185  * 7.17.6 Atomic integer types.
    186  */
    187 
    188 #if !__has_extension(c_atomic) && !__has_extension(cxx_atomic)
    189 /*
    190  * No native support for _Atomic(). Place object in structure to prevent
    191  * most forms of direct non-atomic access.
    192  */
    193 #define _Atomic(T)              struct { T volatile __val; }
    194 #endif
    195 
    196 typedef _Atomic(bool)			atomic_bool;
    197 typedef _Atomic(char)			atomic_char;
    198 typedef _Atomic(signed char)		atomic_schar;
    199 typedef _Atomic(unsigned char)		atomic_uchar;
    200 typedef _Atomic(short)			atomic_short;
    201 typedef _Atomic(unsigned short)		atomic_ushort;
    202 typedef _Atomic(int)			atomic_int;
    203 typedef _Atomic(unsigned int)		atomic_uint;
    204 typedef _Atomic(long)			atomic_long;
    205 typedef _Atomic(unsigned long)		atomic_ulong;
    206 typedef _Atomic(long long)		atomic_llong;
    207 typedef _Atomic(unsigned long long)	atomic_ullong;
    208 typedef _Atomic(char16_t)		atomic_char16_t;
    209 typedef _Atomic(char32_t)		atomic_char32_t;
    210 typedef _Atomic(wchar_t)		atomic_wchar_t;
    211 typedef _Atomic(int_least8_t)		atomic_int_least8_t;
    212 typedef _Atomic(uint_least8_t)	atomic_uint_least8_t;
    213 typedef _Atomic(int_least16_t)	atomic_int_least16_t;
    214 typedef _Atomic(uint_least16_t)	atomic_uint_least16_t;
    215 typedef _Atomic(int_least32_t)	atomic_int_least32_t;
    216 typedef _Atomic(uint_least32_t)	atomic_uint_least32_t;
    217 typedef _Atomic(int_least64_t)	atomic_int_least64_t;
    218 typedef _Atomic(uint_least64_t)	atomic_uint_least64_t;
    219 typedef _Atomic(int_fast8_t)		atomic_int_fast8_t;
    220 typedef _Atomic(uint_fast8_t)		atomic_uint_fast8_t;
    221 typedef _Atomic(int_fast16_t)		atomic_int_fast16_t;
    222 typedef _Atomic(uint_fast16_t)	atomic_uint_fast16_t;
    223 typedef _Atomic(int_fast32_t)		atomic_int_fast32_t;
    224 typedef _Atomic(uint_fast32_t)	atomic_uint_fast32_t;
    225 typedef _Atomic(int_fast64_t)		atomic_int_fast64_t;
    226 typedef _Atomic(uint_fast64_t)	atomic_uint_fast64_t;
    227 typedef _Atomic(intptr_t)		atomic_intptr_t;
    228 typedef _Atomic(uintptr_t)		atomic_uintptr_t;
    229 typedef _Atomic(size_t)		atomic_size_t;
    230 typedef _Atomic(ptrdiff_t)		atomic_ptrdiff_t;
    231 typedef _Atomic(intmax_t)		atomic_intmax_t;
    232 typedef _Atomic(uintmax_t)		atomic_uintmax_t;
    233 
    234 /*
    235  * 7.17.7 Operations on atomic types.
    236  */
    237 
    238 /*
    239  * Compiler-specific operations.
    240  */
    241 
    242 #if defined(__CLANG_ATOMICS)
    243 #define	atomic_compare_exchange_strong_explicit(object, expected,	\
    244     desired, success, failure)						\
    245 	__c11_atomic_compare_exchange_strong(object, expected, desired,	\
    246 	    success, failure)
    247 #define	atomic_compare_exchange_weak_explicit(object, expected,		\
    248     desired, success, failure)						\
    249 	__c11_atomic_compare_exchange_weak(object, expected, desired,	\
    250 	    success, failure)
    251 #define	atomic_exchange_explicit(object, desired, order)		\
    252 	__c11_atomic_exchange(object, desired, order)
    253 #define	atomic_fetch_add_explicit(object, operand, order)		\
    254 	__c11_atomic_fetch_add(object, operand, order)
    255 #define	atomic_fetch_and_explicit(object, operand, order)		\
    256 	__c11_atomic_fetch_and(object, operand, order)
    257 #define	atomic_fetch_or_explicit(object, operand, order)		\
    258 	__c11_atomic_fetch_or(object, operand, order)
    259 #define	atomic_fetch_sub_explicit(object, operand, order)		\
    260 	__c11_atomic_fetch_sub(object, operand, order)
    261 #define	atomic_fetch_xor_explicit(object, operand, order)		\
    262 	__c11_atomic_fetch_xor(object, operand, order)
    263 #define	atomic_load_explicit(object, order)				\
    264 	__c11_atomic_load(object, order)
    265 #define	atomic_store_explicit(object, desired, order)			\
    266 	__c11_atomic_store(object, desired, order)
    267 #elif defined(__GNUC_ATOMICS)
    268 #define	atomic_compare_exchange_strong_explicit(object, expected,	\
    269     desired, success, failure)						\
    270 	__atomic_compare_exchange_n(&(object)->__val, expected,		\
    271 	    desired, 0, success, failure)
    272 #define	atomic_compare_exchange_weak_explicit(object, expected,		\
    273     desired, success, failure)						\
    274 	__atomic_compare_exchange_n(&(object)->__val, expected,		\
    275 	    desired, 1, success, failure)
    276 #define	atomic_exchange_explicit(object, desired, order)		\
    277 	__atomic_exchange_n(&(object)->__val, desired, order)
    278 #define	atomic_fetch_add_explicit(object, operand, order)		\
    279 	__atomic_fetch_add(&(object)->__val, operand, order)
    280 #define	atomic_fetch_and_explicit(object, operand, order)		\
    281 	__atomic_fetch_and(&(object)->__val, operand, order)
    282 #define	atomic_fetch_or_explicit(object, operand, order)		\
    283 	__atomic_fetch_or(&(object)->__val, operand, order)
    284 #define	atomic_fetch_sub_explicit(object, operand, order)		\
    285 	__atomic_fetch_sub(&(object)->__val, operand, order)
    286 #define	atomic_fetch_xor_explicit(object, operand, order)		\
    287 	__atomic_fetch_xor(&(object)->__val, operand, order)
    288 #define	atomic_load_explicit(object, order)				\
    289 	__atomic_load_n(&(object)->__val, order)
    290 #define	atomic_store_explicit(object, desired, order)			\
    291 	__atomic_store_n(&(object)->__val, desired, order)
    292 #else
    293 #define	__atomic_apply_stride(object, operand) \
    294 	(((__typeof__((object)->__val))0) + (operand))
    295 #define	atomic_compare_exchange_strong_explicit(object, expected,	\
    296     desired, success, failure)	__extension__ ({			\
    297 	__typeof__(expected) __ep = (expected);				\
    298 	__typeof__(*__ep) __e = *__ep;					\
    299 	(void)(success); (void)(failure);				\
    300 	(bool)((*__ep = __sync_val_compare_and_swap(&(object)->__val,	\
    301 	    __e, desired)) == __e);					\
    302 })
    303 #define	atomic_compare_exchange_weak_explicit(object, expected,		\
    304     desired, success, failure)						\
    305 	atomic_compare_exchange_strong_explicit(object, expected,	\
    306 		desired, success, failure)
    307 #if __has_builtin(__sync_swap)
    308 /* Clang provides a full-barrier atomic exchange - use it if available. */
    309 #define	atomic_exchange_explicit(object, desired, order)		\
    310 	((void)(order), __sync_swap(&(object)->__val, desired))
    311 #else
    312 /*
    313  * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
    314  * practice it is usually a full barrier) so we need an explicit barrier before
    315  * it.
    316  */
    317 #define	atomic_exchange_explicit(object, desired, order)		\
    318 __extension__ ({							\
    319 	__typeof__(object) __o = (object);				\
    320 	__typeof__(desired) __d = (desired);				\
    321 	(void)(order);							\
    322 	__sync_synchronize();						\
    323 	__sync_lock_test_and_set(&(__o)->__val, __d);			\
    324 })
    325 #endif
    326 #define	atomic_fetch_add_explicit(object, operand, order)		\
    327 	((void)(order), __sync_fetch_and_add(&(object)->__val,		\
    328 	    __atomic_apply_stride(object, operand)))
    329 #define	atomic_fetch_and_explicit(object, operand, order)		\
    330 	((void)(order), __sync_fetch_and_and(&(object)->__val, operand))
    331 #define	atomic_fetch_or_explicit(object, operand, order)		\
    332 	((void)(order), __sync_fetch_and_or(&(object)->__val, operand))
    333 #define	atomic_fetch_sub_explicit(object, operand, order)		\
    334 	((void)(order), __sync_fetch_and_sub(&(object)->__val,		\
    335 	    __atomic_apply_stride(object, operand)))
    336 #define	atomic_fetch_xor_explicit(object, operand, order)		\
    337 	((void)(order), __sync_fetch_and_xor(&(object)->__val, operand))
    338 #define	atomic_load_explicit(object, order)				\
    339 	((void)(order), __sync_fetch_and_add(&(object)->__val, 0))
    340 #define	atomic_store_explicit(object, desired, order)			\
    341 	((void)atomic_exchange_explicit(object, desired, order))
    342 #endif
    343 
    344 /*
    345  * Convenience functions.
    346  *
    347  * Don't provide these in kernel space. In kernel space, we should be
    348  * disciplined enough to always provide explicit barriers.
    349  */
    350 
    351 #ifndef _KERNEL
    352 #define	atomic_compare_exchange_strong(object, expected, desired)	\
    353 	atomic_compare_exchange_strong_explicit(object, expected,	\
    354 	    desired, memory_order_seq_cst, memory_order_seq_cst)
    355 #define	atomic_compare_exchange_weak(object, expected, desired)		\
    356 	atomic_compare_exchange_weak_explicit(object, expected,		\
    357 	    desired, memory_order_seq_cst, memory_order_seq_cst)
    358 #define	atomic_exchange(object, desired)				\
    359 	atomic_exchange_explicit(object, desired, memory_order_seq_cst)
    360 #define	atomic_fetch_add(object, operand)				\
    361 	atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
    362 #define	atomic_fetch_and(object, operand)				\
    363 	atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
    364 #define	atomic_fetch_or(object, operand)				\
    365 	atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
    366 #define	atomic_fetch_sub(object, operand)				\
    367 	atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
    368 #define	atomic_fetch_xor(object, operand)				\
    369 	atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
    370 #define	atomic_load(object)						\
    371 	atomic_load_explicit(object, memory_order_seq_cst)
    372 #define	atomic_store(object, desired)					\
    373 	atomic_store_explicit(object, desired, memory_order_seq_cst)
    374 #endif /* !_KERNEL */
    375 
    376 /*
    377  * 7.17.8 Atomic flag type and operations.
    378  *
    379  * XXX: Assume atomic_bool can be used as an atomic_flag. Is there some
    380  * kind of compiler built-in type we could use?
    381  */
    382 
    383 typedef struct {
    384 	atomic_bool	__flag;
    385 } atomic_flag;
    386 
    387 #define	ATOMIC_FLAG_INIT		{ ATOMIC_VAR_INIT(0) }
    388 
    389 static __inline bool
    390 atomic_flag_test_and_set_explicit(volatile atomic_flag *__object,
    391     memory_order __order)
    392 {
    393 	return (atomic_exchange_explicit(&__object->__flag, 1, __order));
    394 }
    395 
    396 static __inline void
    397 atomic_flag_clear_explicit(volatile atomic_flag *__object, memory_order __order)
    398 {
    399 
    400 	atomic_store_explicit(&__object->__flag, 0, __order);
    401 }
    402 
    403 #ifndef _KERNEL
    404 static __inline bool
    405 atomic_flag_test_and_set(volatile atomic_flag *__object)
    406 {
    407 
    408 	return (atomic_flag_test_and_set_explicit(__object,
    409 	    memory_order_seq_cst));
    410 }
    411 
    412 static __inline void
    413 atomic_flag_clear(volatile atomic_flag *__object)
    414 {
    415 
    416 	atomic_flag_clear_explicit(__object, memory_order_seq_cst);
    417 }
    418 #endif /* !_KERNEL */
    419 
    420 #endif /* !_STDATOMIC_H_ */
    421