Home | History | Annotate | Download | only in include
      1 /*-
      2  * Copyright (c) 2011 Ed Schouten <ed (at) FreeBSD.org>
      3  *                    David Chisnall <theraven (at) FreeBSD.org>
      4  * All rights reserved.
      5  *
      6  * Redistribution and use in source and binary forms, with or without
      7  * modification, are permitted provided that the following conditions
      8  * are met:
      9  * 1. Redistributions of source code must retain the above copyright
     10  *    notice, this list of conditions and the following disclaimer.
     11  * 2. Redistributions in binary form must reproduce the above copyright
     12  *    notice, this list of conditions and the following disclaimer in the
     13  *    documentation and/or other materials provided with the distribution.
     14  *
     15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
     16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
     18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
     19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
     20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
     21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
     23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
     24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     25  * SUCH DAMAGE.
     26  *
     27  * $FreeBSD$
     28  */
     29 
     30 #ifndef _STDATOMIC_H_
     31 #define	_STDATOMIC_H_
     32 
     33 #include <sys/cdefs.h>
     34 
     35 
     36 #if defined(__cplusplus) && __cplusplus >= 201103L && defined(_USING_LIBCXX)
     37 # ifdef __clang__
     38 #  if __has_feature(cxx_atomic)
     39 #   define _STDATOMIC_HAVE_ATOMIC
     40 #  endif
     41 # else /* gcc */
     42 #  if __GNUC_PREREQ(4, 7)
     43 #   define _STDATOMIC_HAVE_ATOMIC
     44 #  endif
     45 # endif
     46 #endif
     47 
     48 #ifdef _STDATOMIC_HAVE_ATOMIC
     49 
     50 /* We have a usable C++ <atomic>; use it instead.  */
     51 
     52 #include <atomic>
     53 
     54 #undef _Atomic
     55         /* Also defined by <atomic> for gcc.  But not used in macros. */
     56         /* Also a clang intrinsic.                                    */
     57         /* Should not be used by client code before this file is      */
     58         /* included.  The definitions in <atomic> themselves see      */
     59         /* the old definition, as they should.                        */
     60         /* Client code sees the following definition.                 */
     61 
     62 #define _Atomic(t) std::atomic<t>
     63 
     64 using std::atomic_is_lock_free;
     65 using std::atomic_init;
     66 using std::atomic_store;
     67 using std::atomic_store_explicit;
     68 using std::atomic_load;
     69 using std::atomic_load_explicit;
     70 using std::atomic_exchange;
     71 using std::atomic_exchange_explicit;
     72 using std::atomic_compare_exchange_strong;
     73 using std::atomic_compare_exchange_strong_explicit;
     74 using std::atomic_compare_exchange_weak;
     75 using std::atomic_compare_exchange_weak_explicit;
     76 using std::atomic_fetch_add;
     77 using std::atomic_fetch_add_explicit;
     78 using std::atomic_fetch_sub;
     79 using std::atomic_fetch_sub_explicit;
     80 using std::atomic_fetch_or;
     81 using std::atomic_fetch_or_explicit;
     82 using std::atomic_fetch_xor;
     83 using std::atomic_fetch_xor_explicit;
     84 using std::atomic_fetch_and;
     85 using std::atomic_fetch_and_explicit;
     86 using std::atomic_thread_fence;
     87 using std::atomic_signal_fence;
     88 
     89 using std::memory_order;
     90 using std::memory_order_relaxed;
     91 using std::memory_order_consume;
     92 using std::memory_order_acquire;
     93 using std::memory_order_release;
     94 using std::memory_order_acq_rel;
     95 using std::memory_order_seq_cst;
     96 
     97 using std::atomic_bool;
     98 using std::atomic_char;
     99 using std::atomic_schar;
    100 using std::atomic_uchar;
    101 using std::atomic_short;
    102 using std::atomic_ushort;
    103 using std::atomic_int;
    104 using std::atomic_uint;
    105 using std::atomic_long;
    106 using std::atomic_ulong;
    107 using std::atomic_llong;
    108 using std::atomic_ullong;
    109 using std::atomic_char16_t;
    110 using std::atomic_char32_t;
    111 using std::atomic_wchar_t;
    112 using std::atomic_int_least8_t;
    113 using std::atomic_uint_least8_t;
    114 using std::atomic_int_least16_t;
    115 using std::atomic_uint_least16_t;
    116 using std::atomic_int_least32_t;
    117 using std::atomic_uint_least32_t;
    118 using std::atomic_int_least64_t;
    119 using std::atomic_uint_least64_t;
    120 using std::atomic_int_fast8_t;
    121 using std::atomic_uint_fast8_t;
    122 using std::atomic_int_fast16_t;
    123 using std::atomic_uint_fast16_t;
    124 using std::atomic_int_fast32_t;
    125 using std::atomic_uint_fast32_t;
    126 using std::atomic_int_fast64_t;
    127 using std::atomic_uint_fast64_t;
    128 using std::atomic_intptr_t;
    129 using std::atomic_uintptr_t;
    130 using std::atomic_size_t;
    131 using std::atomic_ptrdiff_t;
    132 using std::atomic_intmax_t;
    133 using std::atomic_uintmax_t;
    134 
    135 #else /* <atomic> unavailable, possibly because this is C, not C++ */
    136 
    137 #include <sys/types.h>
    138 #include <stdbool.h>
    139 
    140 /*
    141  * C: Do it ourselves.
    142  * Note that the runtime representation defined here should be compatible
    143  * with the C++ one, i.e. an _Atomic(T) needs to contain the same
    144  * bits as a T.
    145  */
    146 
    147 #include <stddef.h>  /* For ptrdiff_t.                          */
    148 #include <stdint.h>  /* TODO: Should pollute namespace less.    */
    149 #if __STDC_VERSION__ >= 201112L
    150 # include <uchar.h>  /* For char16_t and char32_t.              */
    151 #endif
    152 
    153 #ifdef __clang__
    154 # if __has_extension(c_atomic) || __has_extension(cxx_atomic)
    155 #  define       __CLANG_ATOMICS
    156 # else
    157 #  error "stdatomic.h does not support your compiler"
    158 # endif
    159 # if __has_builtin(__sync_swap)
    160 #  define __HAS_BUILTIN_SYNC_SWAP
    161 # endif
    162 #else
    163 # if __GNUC_PREREQ(4, 7)
    164 #  define	__GNUC_ATOMICS
    165 # else
    166 #  define	__SYNC_ATOMICS
    167 #  ifdef __cplusplus
    168 #   define       __ATOMICS_AVOID_DOT_INIT
    169 #  endif
    170 # endif
    171 #endif
    172 
    173 /*
    174  * 7.17.1 Atomic lock-free macros.
    175  */
    176 
    177 #ifdef __GCC_ATOMIC_BOOL_LOCK_FREE
    178 #define	ATOMIC_BOOL_LOCK_FREE		__GCC_ATOMIC_BOOL_LOCK_FREE
    179 #elif defined(__SYNC_ATOMICS)
    180 #define	ATOMIC_BOOL_LOCK_FREE           2 /* For all modern platforms */
    181 #endif
    182 #ifdef __GCC_ATOMIC_CHAR_LOCK_FREE
    183 #define	ATOMIC_CHAR_LOCK_FREE		__GCC_ATOMIC_CHAR_LOCK_FREE
    184 #elif defined(__SYNC_ATOMICS)
    185 #define	ATOMIC_CHAR_LOCK_FREE           2
    186 #endif
    187 #ifdef __GCC_ATOMIC_CHAR16_T_LOCK_FREE
    188 #define	ATOMIC_CHAR16_T_LOCK_FREE	__GCC_ATOMIC_CHAR16_T_LOCK_FREE
    189 #elif defined(__SYNC_ATOMICS)
    190 #define	ATOMIC_CHAR16_T_LOCK_FREE       2
    191 #endif
    192 #ifdef __GCC_ATOMIC_CHAR32_T_LOCK_FREE
    193 #define	ATOMIC_CHAR32_T_LOCK_FREE	__GCC_ATOMIC_CHAR32_T_LOCK_FREE
    194 #elif defined(__SYNC_ATOMICS)
    195 #define	ATOMIC_CHAR32_T_LOCK_FREE       2
    196 #endif
    197 #ifdef __GCC_ATOMIC_WCHAR_T_LOCK_FREE
    198 #define	ATOMIC_WCHAR_T_LOCK_FREE	__GCC_ATOMIC_WCHAR_T_LOCK_FREE
    199 #elif defined(__SYNC_ATOMICS)
    200 #define	ATOMIC_WCHAR_T_LOCK_FREE        2
    201 #endif
    202 #ifdef __GCC_ATOMIC_SHORT_LOCK_FREE
    203 #define	ATOMIC_SHORT_LOCK_FREE		__GCC_ATOMIC_SHORT_LOCK_FREE
    204 #elif defined(__SYNC_ATOMICS)
    205 #define	ATOMIC_SHORT_LOCK_FREE          2
    206 #endif
    207 #ifdef __GCC_ATOMIC_INT_LOCK_FREE
    208 #define	ATOMIC_INT_LOCK_FREE		__GCC_ATOMIC_INT_LOCK_FREE
    209 #elif defined(__SYNC_ATOMICS)
    210 #define	ATOMIC_INT_LOCK_FREE            2
    211 #endif
    212 #ifdef __GCC_ATOMIC_LONG_LOCK_FREE
    213 #define	ATOMIC_LONG_LOCK_FREE		__GCC_ATOMIC_LONG_LOCK_FREE
    214 #elif defined(__SYNC_ATOMICS)
    215 #define	ATOMIC_LONG_LOCK_FREE           2
    216 #endif
    217 #ifdef __GCC_ATOMIC_LLONG_LOCK_FREE
    218 #define	ATOMIC_LLONG_LOCK_FREE		__GCC_ATOMIC_LLONG_LOCK_FREE
    219 #elif defined(__SYNC_ATOMICS)
    220 #define	ATOMIC_LLONG_LOCK_FREE          1 /* maybe */
    221 #endif
    222 #ifdef __GCC_ATOMIC_POINTER_LOCK_FREE
    223 #define	ATOMIC_POINTER_LOCK_FREE	__GCC_ATOMIC_POINTER_LOCK_FREE
    224 #elif defined(__SYNC_ATOMICS)
    225 #define	ATOMIC_POINTER_LOCK_FREE        2
    226 #endif
    227 
    228 /*
    229  * 7.17.2 Initialization.
    230  */
    231 
    232 #if defined(__CLANG_ATOMICS)
    233 #define	ATOMIC_VAR_INIT(value)		(value)
    234 #define	atomic_init(obj, value)		__c11_atomic_init(obj, value)
    235 #else
    236 #ifdef __ATOMICS_AVOID_DOT_INIT
    237 #define	ATOMIC_VAR_INIT(value)		{ value }
    238 #else
    239 #define	ATOMIC_VAR_INIT(value)		{ .__val = (value) }
    240 #endif
    241 #define	atomic_init(obj, value)		((void)((obj)->__val = (value)))
    242 #endif
    243 
    244 /*
    245  * Clang and recent GCC both provide predefined macros for the memory
    246  * orderings.  If we are using a compiler that doesn't define them, use the
    247  * clang values - these will be ignored in the fallback path.
    248  */
    249 
    250 #ifndef __ATOMIC_RELAXED
    251 #define __ATOMIC_RELAXED		0
    252 #endif
    253 #ifndef __ATOMIC_CONSUME
    254 #define __ATOMIC_CONSUME		1
    255 #endif
    256 #ifndef __ATOMIC_ACQUIRE
    257 #define __ATOMIC_ACQUIRE		2
    258 #endif
    259 #ifndef __ATOMIC_RELEASE
    260 #define __ATOMIC_RELEASE		3
    261 #endif
    262 #ifndef __ATOMIC_ACQ_REL
    263 #define __ATOMIC_ACQ_REL		4
    264 #endif
    265 #ifndef __ATOMIC_SEQ_CST
    266 #define __ATOMIC_SEQ_CST		5
    267 #endif
    268 
    269 /*
    270  * 7.17.3 Order and consistency.
    271  *
    272  * The memory_order_* constants that denote the barrier behaviour of the
    273  * atomic operations.
    274  * The enum values must be identical to those used by the
    275  * C++ <atomic> header.
    276  */
    277 
    278 typedef enum {
    279 	memory_order_relaxed = __ATOMIC_RELAXED,
    280 	memory_order_consume = __ATOMIC_CONSUME,
    281 	memory_order_acquire = __ATOMIC_ACQUIRE,
    282 	memory_order_release = __ATOMIC_RELEASE,
    283 	memory_order_acq_rel = __ATOMIC_ACQ_REL,
    284 	memory_order_seq_cst = __ATOMIC_SEQ_CST
    285 } memory_order;
    286 
    287 /*
    288  * 7.17.4 Fences.
    289  */
    290 
    291 static __inline void
    292 atomic_thread_fence(memory_order __order __attribute__((unused)))
    293 {
    294 
    295 #ifdef __CLANG_ATOMICS
    296 	__c11_atomic_thread_fence(__order);
    297 #elif defined(__GNUC_ATOMICS)
    298 	__atomic_thread_fence(__order);
    299 #else
    300 	__sync_synchronize();
    301 #endif
    302 }
    303 
    304 static __inline void
    305 atomic_signal_fence(memory_order __order __attribute__((unused)))
    306 {
    307 
    308 #ifdef __CLANG_ATOMICS
    309 	__c11_atomic_signal_fence(__order);
    310 #elif defined(__GNUC_ATOMICS)
    311 	__atomic_signal_fence(__order);
    312 #else
    313 	__asm volatile ("" ::: "memory");
    314 #endif
    315 }
    316 
    317 /*
    318  * 7.17.5 Lock-free property.
    319  */
    320 
    321 #if defined(_KERNEL)
    322 /* Atomics in kernelspace are always lock-free. */
    323 #define	atomic_is_lock_free(obj) \
    324 	((void)(obj), (_Bool)1)
    325 #elif defined(__CLANG_ATOMICS)
    326 #define	atomic_is_lock_free(obj) \
    327 	__c11_atomic_is_lock_free(sizeof(*(obj)))
    328 #elif defined(__GNUC_ATOMICS)
    329 #define	atomic_is_lock_free(obj) \
    330 	__atomic_is_lock_free(sizeof((obj)->__val), &(obj)->__val)
    331 #else
    332 #define	atomic_is_lock_free(obj) \
    333 	((void)(obj), sizeof((obj)->__val) <= sizeof(void *))
    334 #endif
    335 
    336 /*
    337  * 7.17.6 Atomic integer types.
    338  */
    339 
    340 #ifndef __CLANG_ATOMICS
    341 /*
    342  * No native support for _Atomic(). Place object in structure to prevent
    343  * most forms of direct non-atomic access.
    344  */
    345 #define _Atomic(T)              struct { T volatile __val; }
    346 #endif
    347 
    348 typedef _Atomic(bool)			atomic_bool;
    349 typedef _Atomic(char)			atomic_char;
    350 typedef _Atomic(signed char)		atomic_schar;
    351 typedef _Atomic(unsigned char)		atomic_uchar;
    352 typedef _Atomic(short)			atomic_short;
    353 typedef _Atomic(unsigned short)		atomic_ushort;
    354 typedef _Atomic(int)			atomic_int;
    355 typedef _Atomic(unsigned int)		atomic_uint;
    356 typedef _Atomic(long)			atomic_long;
    357 typedef _Atomic(unsigned long)		atomic_ulong;
    358 typedef _Atomic(long long)		atomic_llong;
    359 typedef _Atomic(unsigned long long)	atomic_ullong;
    360 #if __STDC_VERSION__ >= 201112L || __cplusplus >= 201103L
    361   typedef _Atomic(char16_t)		atomic_char16_t;
    362   typedef _Atomic(char32_t)		atomic_char32_t;
    363 #endif
    364 typedef _Atomic(wchar_t)		atomic_wchar_t;
    365 typedef _Atomic(int_least8_t)		atomic_int_least8_t;
    366 typedef _Atomic(uint_least8_t)	atomic_uint_least8_t;
    367 typedef _Atomic(int_least16_t)	atomic_int_least16_t;
    368 typedef _Atomic(uint_least16_t)	atomic_uint_least16_t;
    369 typedef _Atomic(int_least32_t)	atomic_int_least32_t;
    370 typedef _Atomic(uint_least32_t)	atomic_uint_least32_t;
    371 typedef _Atomic(int_least64_t)	atomic_int_least64_t;
    372 typedef _Atomic(uint_least64_t)	atomic_uint_least64_t;
    373 typedef _Atomic(int_fast8_t)		atomic_int_fast8_t;
    374 typedef _Atomic(uint_fast8_t)		atomic_uint_fast8_t;
    375 typedef _Atomic(int_fast16_t)		atomic_int_fast16_t;
    376 typedef _Atomic(uint_fast16_t)	atomic_uint_fast16_t;
    377 typedef _Atomic(int_fast32_t)		atomic_int_fast32_t;
    378 typedef _Atomic(uint_fast32_t)	atomic_uint_fast32_t;
    379 typedef _Atomic(int_fast64_t)		atomic_int_fast64_t;
    380 typedef _Atomic(uint_fast64_t)	atomic_uint_fast64_t;
    381 typedef _Atomic(intptr_t)		atomic_intptr_t;
    382 typedef _Atomic(uintptr_t)		atomic_uintptr_t;
    383 typedef _Atomic(size_t)		atomic_size_t;
    384 typedef _Atomic(ptrdiff_t)		atomic_ptrdiff_t;
    385 typedef _Atomic(intmax_t)		atomic_intmax_t;
    386 typedef _Atomic(uintmax_t)		atomic_uintmax_t;
    387 
    388 /*
    389  * 7.17.7 Operations on atomic types.
    390  */
    391 
    392 /*
    393  * Compiler-specific operations.
    394  */
    395 
    396 #if defined(__CLANG_ATOMICS)
    397 #define	atomic_compare_exchange_strong_explicit(object, expected,	\
    398     desired, success, failure)						\
    399 	__c11_atomic_compare_exchange_strong(object, expected, desired,	\
    400 	    success, failure)
    401 #define	atomic_compare_exchange_weak_explicit(object, expected,		\
    402     desired, success, failure)						\
    403 	__c11_atomic_compare_exchange_weak(object, expected, desired,	\
    404 	    success, failure)
    405 #define	atomic_exchange_explicit(object, desired, order)		\
    406 	__c11_atomic_exchange(object, desired, order)
    407 #define	atomic_fetch_add_explicit(object, operand, order)		\
    408 	__c11_atomic_fetch_add(object, operand, order)
    409 #define	atomic_fetch_and_explicit(object, operand, order)		\
    410 	__c11_atomic_fetch_and(object, operand, order)
    411 #define	atomic_fetch_or_explicit(object, operand, order)		\
    412 	__c11_atomic_fetch_or(object, operand, order)
    413 #define	atomic_fetch_sub_explicit(object, operand, order)		\
    414 	__c11_atomic_fetch_sub(object, operand, order)
    415 #define	atomic_fetch_xor_explicit(object, operand, order)		\
    416 	__c11_atomic_fetch_xor(object, operand, order)
    417 #define	atomic_load_explicit(object, order)				\
    418 	__c11_atomic_load(object, order)
    419 #define	atomic_store_explicit(object, desired, order)			\
    420 	__c11_atomic_store(object, desired, order)
    421 #elif defined(__GNUC_ATOMICS)
    422 #define	atomic_compare_exchange_strong_explicit(object, expected,	\
    423     desired, success, failure)						\
    424 	__atomic_compare_exchange_n(&(object)->__val, expected,		\
    425 	    desired, 0, success, failure)
    426 #define	atomic_compare_exchange_weak_explicit(object, expected,		\
    427     desired, success, failure)						\
    428 	__atomic_compare_exchange_n(&(object)->__val, expected,		\
    429 	    desired, 1, success, failure)
    430 #define	atomic_exchange_explicit(object, desired, order)		\
    431 	__atomic_exchange_n(&(object)->__val, desired, order)
    432 #define	atomic_fetch_add_explicit(object, operand, order)		\
    433 	__atomic_fetch_add(&(object)->__val, operand, order)
    434 #define	atomic_fetch_and_explicit(object, operand, order)		\
    435 	__atomic_fetch_and(&(object)->__val, operand, order)
    436 #define	atomic_fetch_or_explicit(object, operand, order)		\
    437 	__atomic_fetch_or(&(object)->__val, operand, order)
    438 #define	atomic_fetch_sub_explicit(object, operand, order)		\
    439 	__atomic_fetch_sub(&(object)->__val, operand, order)
    440 #define	atomic_fetch_xor_explicit(object, operand, order)		\
    441 	__atomic_fetch_xor(&(object)->__val, operand, order)
    442 #define	atomic_load_explicit(object, order)				\
    443 	__atomic_load_n(&(object)->__val, order)
    444 #define	atomic_store_explicit(object, desired, order)			\
    445 	__atomic_store_n(&(object)->__val, desired, order)
    446 #else
    447 #define	__atomic_apply_stride(object, operand) \
    448 	(((__typeof__((object)->__val))0) + (operand))
    449 #define	atomic_compare_exchange_strong_explicit(object, expected,	\
    450     desired, success, failure)	__extension__ ({			\
    451 	__typeof__(expected) __ep = (expected);				\
    452 	__typeof__(*__ep) __e = *__ep;					\
    453 	(void)(success); (void)(failure);				\
    454 	(bool)((*__ep = __sync_val_compare_and_swap(&(object)->__val,	\
    455 	    __e, desired)) == __e);					\
    456 })
    457 #define	atomic_compare_exchange_weak_explicit(object, expected,		\
    458     desired, success, failure)						\
    459 	atomic_compare_exchange_strong_explicit(object, expected,	\
    460 		desired, success, failure)
    461 #ifdef __HAS_BUILTIN_SYNC_SWAP
    462 /* Clang provides a full-barrier atomic exchange - use it if available. */
    463 #define	atomic_exchange_explicit(object, desired, order)		\
    464 	((void)(order), __sync_swap(&(object)->__val, desired))
    465 #else
    466 /*
    467  * __sync_lock_test_and_set() is only an acquire barrier in theory (although in
    468  * practice it is usually a full barrier) so we need an explicit barrier before
    469  * it.
    470  */
    471 #define	atomic_exchange_explicit(object, desired, order)		\
    472 __extension__ ({							\
    473 	__typeof__(object) __o = (object);				\
    474 	__typeof__(desired) __d = (desired);				\
    475 	(void)(order);							\
    476 	__sync_synchronize();						\
    477 	__sync_lock_test_and_set(&(__o)->__val, __d);			\
    478 })
    479 #endif
    480 #define	atomic_fetch_add_explicit(object, operand, order)		\
    481 	((void)(order), __sync_fetch_and_add(&(object)->__val,		\
    482 	    __atomic_apply_stride(object, operand)))
    483 #define	atomic_fetch_and_explicit(object, operand, order)		\
    484 	((void)(order), __sync_fetch_and_and(&(object)->__val, operand))
    485 #define	atomic_fetch_or_explicit(object, operand, order)		\
    486 	((void)(order), __sync_fetch_and_or(&(object)->__val, operand))
    487 #define	atomic_fetch_sub_explicit(object, operand, order)		\
    488 	((void)(order), __sync_fetch_and_sub(&(object)->__val,		\
    489 	    __atomic_apply_stride(object, operand)))
    490 #define	atomic_fetch_xor_explicit(object, operand, order)		\
    491 	((void)(order), __sync_fetch_and_xor(&(object)->__val, operand))
    492 #define	atomic_load_explicit(object, order)				\
    493 	((void)(order), __sync_fetch_and_add(&(object)->__val, 0))
    494 #define	atomic_store_explicit(object, desired, order)			\
    495 	((void)atomic_exchange_explicit(object, desired, order))
    496 #endif
    497 
    498 /*
    499  * Convenience functions.
    500  *
    501  * Don't provide these in kernel space. In kernel space, we should be
    502  * disciplined enough to always provide explicit barriers.
    503  */
    504 
    505 #ifndef _KERNEL
    506 #define	atomic_compare_exchange_strong(object, expected, desired)	\
    507 	atomic_compare_exchange_strong_explicit(object, expected,	\
    508 	    desired, memory_order_seq_cst, memory_order_seq_cst)
    509 #define	atomic_compare_exchange_weak(object, expected, desired)		\
    510 	atomic_compare_exchange_weak_explicit(object, expected,		\
    511 	    desired, memory_order_seq_cst, memory_order_seq_cst)
    512 #define	atomic_exchange(object, desired)				\
    513 	atomic_exchange_explicit(object, desired, memory_order_seq_cst)
    514 #define	atomic_fetch_add(object, operand)				\
    515 	atomic_fetch_add_explicit(object, operand, memory_order_seq_cst)
    516 #define	atomic_fetch_and(object, operand)				\
    517 	atomic_fetch_and_explicit(object, operand, memory_order_seq_cst)
    518 #define	atomic_fetch_or(object, operand)				\
    519 	atomic_fetch_or_explicit(object, operand, memory_order_seq_cst)
    520 #define	atomic_fetch_sub(object, operand)				\
    521 	atomic_fetch_sub_explicit(object, operand, memory_order_seq_cst)
    522 #define	atomic_fetch_xor(object, operand)				\
    523 	atomic_fetch_xor_explicit(object, operand, memory_order_seq_cst)
    524 #define	atomic_load(object)						\
    525 	atomic_load_explicit(object, memory_order_seq_cst)
    526 #define	atomic_store(object, desired)					\
    527 	atomic_store_explicit(object, desired, memory_order_seq_cst)
    528 #endif /* !_KERNEL */
    529 
    530 /*
    531  * 7.17.8 Atomic flag type and operations.
    532  *
    533  * XXX: Assume atomic_bool can be used as an atomic_flag. Is there some
    534  * kind of compiler built-in type we could use?
    535  */
    536 
    537 typedef struct {
    538 	atomic_bool	__flag;
    539 } atomic_flag;
    540 
    541 #define	ATOMIC_FLAG_INIT		{ ATOMIC_VAR_INIT(false) }
    542 
    543 static __inline bool
    544 atomic_flag_test_and_set_explicit(volatile atomic_flag *__object,
    545     memory_order __order)
    546 {
    547 	return (atomic_exchange_explicit(&__object->__flag, 1, __order));
    548 }
    549 
    550 static __inline void
    551 atomic_flag_clear_explicit(volatile atomic_flag *__object, memory_order __order)
    552 {
    553 
    554 	atomic_store_explicit(&__object->__flag, 0, __order);
    555 }
    556 
    557 #ifndef _KERNEL
    558 static __inline bool
    559 atomic_flag_test_and_set(volatile atomic_flag *__object)
    560 {
    561 
    562 	return (atomic_flag_test_and_set_explicit(__object,
    563 	    memory_order_seq_cst));
    564 }
    565 
    566 static __inline void
    567 atomic_flag_clear(volatile atomic_flag *__object)
    568 {
    569 
    570 	atomic_flag_clear_explicit(__object, memory_order_seq_cst);
    571 }
    572 #endif /* !_KERNEL */
    573 
    574 #endif /* <atomic> unavailable */
    575 
    576 #endif /* !_STDATOMIC_H_ */
    577