/external/libcxx/test/std/atomics/atomics.types.operations/atomics.types.operations.req/ |
atomic_load_explicit.pass.cpp | 17 // atomic_load_explicit(const volatile atomic<T>* obj, memory_order m); 21 // atomic_load_explicit(const atomic<T>* obj, memory_order m); 34 assert(std::atomic_load_explicit(&t, std::memory_order_seq_cst) == T(1)); 37 assert(std::atomic_load_explicit(&vt, std::memory_order_seq_cst) == T(2));
|
/ndk/sources/cxx-stl/llvm-libc++/libcxx/test/atomics/atomics.types.operations/atomics.types.operations.req/ |
atomic_load_explicit.pass.cpp | 14 // atomic_load_explicit(const volatile atomic<T>* obj, memory_order m); 18 // atomic_load_explicit(const atomic<T>* obj, memory_order m); 31 assert(std::atomic_load_explicit(&t, std::memory_order_seq_cst) == T(1)); 34 assert(std::atomic_load_explicit(&vt, std::memory_order_seq_cst) == T(2));
|
/cts/apps/CtsVerifier/jni/audio_loopback/audio_utils/ |
atomic.c | 24 return atomic_load_explicit(a, memory_order_acquire);
|
/external/libcxx/test/std/utilities/memory/util.smartptr/util.smartptr.shared.atomic/ |
atomic_load_explicit.pass.cpp | 23 // atomic_load_explicit(const shared_ptr<T>* p, memory_order mo) 33 std::shared_ptr<int> q = std::atomic_load_explicit(&p, std::memory_order_relaxed);
|
/ndk/sources/cxx-stl/llvm-libc++/libcxx/test/utilities/memory/util.smartptr/util.smartptr.shared.atomic/ |
atomic_load_explicit.pass.cpp | 21 // atomic_load_explicit(const shared_ptr<T>* p, memory_order mo) 31 std::shared_ptr<int> q = std::atomic_load_explicit(&p, std::memory_order_relaxed);
|
/bionic/libc/bionic/ |
pthread_key.cpp | 80 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed); 90 atomic_load_explicit(&key_map[i].key_destructor, memory_order_relaxed)); 95 if (atomic_load_explicit(&key_map[i].seq, memory_order_relaxed) != seq) { 120 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed); 142 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); 156 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); 174 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed);
|
pthread_barrier.cpp | 120 while(atomic_load_explicit(&barrier->state, memory_order_acquire) == RELEASE) { 124 uint32_t prev_wait_count = atomic_load_explicit(&barrier->wait_count, memory_order_relaxed); 154 while (atomic_load_explicit(&barrier->state, memory_order_acquire) == WAIT) { 175 while (atomic_load_explicit(&barrier->state, memory_order_acquire) == RELEASE) { 178 if (atomic_load_explicit(&barrier->wait_count, memory_order_relaxed) != 0) {
|
pthread_rwlock.cpp | 257 if (atomic_load_explicit(&rwlock->state, memory_order_relaxed) != 0) { 273 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); 292 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) == __get_thread()->tid) { 306 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); 349 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); 365 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) == __get_thread()->tid) { 378 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); 453 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); 455 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) != __get_thread()->tid) {
|
pthread_once.cpp | 54 int old_value = atomic_load_explicit(once_control_ptr, memory_order_acquire); 83 old_value = atomic_load_explicit(once_control_ptr, memory_order_acquire);
|
system_properties.cpp | 396 uint_least32_t off = atomic_load_explicit(off_p, memory_order_consume); 401 uint_least32_t off = atomic_load_explicit(off_p, memory_order_consume); 437 uint_least32_t left_offset = atomic_load_explicit(¤t->left, memory_order_relaxed); 453 uint_least32_t right_offset = atomic_load_explicit(¤t->right, memory_order_relaxed); 491 uint_least32_t children_offset = atomic_load_explicit(¤t->children, memory_order_relaxed); 517 uint_least32_t prop_offset = atomic_load_explicit(¤t->prop, memory_order_relaxed); 604 uint_least32_t left_offset = atomic_load_explicit(&trie->left, memory_order_relaxed); 610 uint_least32_t prop_offset = atomic_load_explicit(&trie->prop, memory_order_relaxed); 617 uint_least32_t children_offset = atomic_load_explicit(&trie->children, memory_order_relaxed); 623 uint_least32_t right_offset = atomic_load_explicit(&trie->right, memory_order_relaxed) [all...] |
pthread_mutex.cpp | 415 uint32_t owner_tid = atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed); 424 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); 435 if (tid == atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed)) { 499 old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); 512 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); 533 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); 545 if ( tid != atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed) ) { 577 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); 591 if (tid == atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed)) { 636 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed) [all...] |
pthread_cond.cpp | 111 return COND_IS_SHARED(atomic_load_explicit(&state, memory_order_relaxed)); 115 return COND_GET_CLOCK(atomic_load_explicit(&state, memory_order_relaxed)) == CLOCK_REALTIME; 180 unsigned int old_state = atomic_load_explicit(&cond->state, memory_order_relaxed);
|
semaphore.cpp | 101 return (atomic_load_explicit(sem_count_ptr, memory_order_relaxed) & SEMCOUNT_SHARED_MASK); 145 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); 165 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); 189 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed);
|
__cxa_guard.cpp | 82 int old_value = atomic_load_explicit(&gv->state, memory_order_relaxed); 113 old_value = atomic_load_explicit(&gv->state, memory_order_relaxed);
|
/bionic/tests/ |
stdatomic_test.cpp | 100 ASSERT_EQ(123, atomic_load_explicit(&i, memory_order_relaxed)); 207 yval = atomic_load_explicit(&a->y, memory_order_acquire); 208 zval = atomic_load_explicit(&a->z, memory_order_relaxed); 209 xval = atomic_load_explicit(&a->x, memory_order_relaxed); 247 EXPECT_EQ(atomic_load_explicit(&a.x, memory_order_consume), BIG + 1); 248 EXPECT_EQ(atomic_load_explicit(&a.y, memory_order_seq_cst), BIG + 1);
|
/prebuilts/gcc/darwin-x86/aarch64/aarch64-linux-android-4.9/lib/gcc/aarch64-linux-android/4.9/include/ |
stdatomic.h | 147 #define atomic_load_explicit(PTR, MO) \ macro 156 #define atomic_load(PTR) atomic_load_explicit (PTR, __ATOMIC_SEQ_CST)
|
/prebuilts/gcc/darwin-x86/arm/arm-linux-androideabi-4.9/lib/gcc/arm-linux-androideabi/4.9/include/ |
stdatomic.h | 147 #define atomic_load_explicit(PTR, MO) \ macro 156 #define atomic_load(PTR) atomic_load_explicit (PTR, __ATOMIC_SEQ_CST)
|
/prebuilts/gcc/darwin-x86/mips/mips64el-linux-android-4.9/lib/gcc/mips64el-linux-android/4.9/include/ |
stdatomic.h | 147 #define atomic_load_explicit(PTR, MO) \ macro 156 #define atomic_load(PTR) atomic_load_explicit (PTR, __ATOMIC_SEQ_CST)
|
/prebuilts/gcc/darwin-x86/x86/x86_64-linux-android-4.9/lib/gcc/x86_64-linux-android/4.9/include/ |
stdatomic.h | 147 #define atomic_load_explicit(PTR, MO) \ macro 156 #define atomic_load(PTR) atomic_load_explicit (PTR, __ATOMIC_SEQ_CST)
|
/prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9/lib/gcc/aarch64-linux-android/4.9/include/ |
stdatomic.h | 147 #define atomic_load_explicit(PTR, MO) \ macro 156 #define atomic_load(PTR) atomic_load_explicit (PTR, __ATOMIC_SEQ_CST)
|
/prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9/lib/gcc/arm-linux-androideabi/4.9/include/ |
stdatomic.h | 147 #define atomic_load_explicit(PTR, MO) \ macro 156 #define atomic_load(PTR) atomic_load_explicit (PTR, __ATOMIC_SEQ_CST)
|
/prebuilts/gcc/linux-x86/mips/mips64el-linux-android-4.9/lib/gcc/mips64el-linux-android/4.9/include/ |
stdatomic.h | 147 #define atomic_load_explicit(PTR, MO) \ macro 156 #define atomic_load(PTR) atomic_load_explicit (PTR, __ATOMIC_SEQ_CST)
|
/prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9/lib/gcc/x86_64-linux-android/4.9/include/ |
stdatomic.h | 147 #define atomic_load_explicit(PTR, MO) \ macro 156 #define atomic_load(PTR) atomic_load_explicit (PTR, __ATOMIC_SEQ_CST)
|
/system/core/libcutils/ |
trace-dev.c | 151 if (CC_UNLIKELY(atomic_load_explicit(&atrace_is_ready, memory_order_acquire))) { 152 if (atomic_load_explicit(&atrace_is_enabled, memory_order_acquire)) {
|
/bionic/libc/include/ |
stdatomic.h | 75 using std::atomic_load_explicit; 423 #define atomic_load_explicit(object, order) \ 448 #define atomic_load_explicit(object, order) \ 498 #define atomic_load_explicit(object, order) \ 531 atomic_load_explicit(object, memory_order_seq_cst)
|