HomeSort by relevance Sort by last modified time
    Searched refs:atomic_load_explicit (Results 1 - 25 of 47) sorted by null

1 2

  /external/libcxx/test/std/utilities/memory/util.smartptr/util.smartptr.shared.atomic/
atomic_load_explicit.pass.cpp 23 // atomic_load_explicit(const shared_ptr<T>* p, memory_order mo)
33 std::shared_ptr<int> q = std::atomic_load_explicit(&p, std::memory_order_relaxed);
  /ndk/sources/cxx-stl/llvm-libc++/libcxx/test/utilities/memory/util.smartptr/util.smartptr.shared.atomic/
atomic_load_explicit.pass.cpp 21 // atomic_load_explicit(const shared_ptr<T>* p, memory_order mo)
31 std::shared_ptr<int> q = std::atomic_load_explicit(&p, std::memory_order_relaxed);
  /bionic/libc/bionic/
pthread_once.cpp 54 int old_value = atomic_load_explicit(once_control_ptr, memory_order_acquire);
83 old_value = atomic_load_explicit(once_control_ptr, memory_order_acquire);
pthread_key.cpp 80 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed);
90 atomic_load_explicit(&key_map[i].key_destructor, memory_order_relaxed));
95 if (atomic_load_explicit(&key_map[i].seq, memory_order_relaxed) != seq) {
120 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed);
142 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed);
156 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed);
174 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed);
__cxa_guard.cpp 82 int old_value = atomic_load_explicit(&gv->state, memory_order_relaxed);
113 old_value = atomic_load_explicit(&gv->state, memory_order_relaxed);
pthread_rwlock.cpp 257 if (atomic_load_explicit(&rwlock->state, memory_order_relaxed) != 0) {
273 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed);
292 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) == __get_thread()->tid) {
302 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed);
355 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed);
371 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) == __get_thread()->tid) {
380 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed);
465 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed);
467 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) != __get_thread()->tid) {
semaphore.cpp 100 return (atomic_load_explicit(sem_count_ptr, memory_order_relaxed) & SEMCOUNT_SHARED_MASK);
144 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed);
164 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed);
188 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed);
system_properties.cpp 344 uint_least32_t off = atomic_load_explicit(off_p, memory_order_consume);
349 uint_least32_t off = atomic_load_explicit(off_p, memory_order_consume);
385 uint_least32_t left_offset = atomic_load_explicit(&current->left, memory_order_relaxed);
401 uint_least32_t right_offset = atomic_load_explicit(&current->right, memory_order_relaxed);
439 uint_least32_t children_offset = atomic_load_explicit(&current->children, memory_order_relaxed);
465 uint_least32_t prop_offset = atomic_load_explicit(&current->prop, memory_order_relaxed);
552 uint_least32_t left_offset = atomic_load_explicit(&trie->left, memory_order_relaxed);
558 uint_least32_t prop_offset = atomic_load_explicit(&trie->prop, memory_order_relaxed);
565 uint_least32_t children_offset = atomic_load_explicit(&trie->children, memory_order_relaxed);
571 uint_least32_t right_offset = atomic_load_explicit(&trie->right, memory_order_relaxed)
    [all...]
pthread_mutex.cpp 414 uint32_t owner_tid = atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed);
421 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed);
432 if (tid == atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed)) {
499 old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed);
512 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed);
533 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed);
545 if ( tid != atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed) ) {
577 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed);
591 if (tid == atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed)) {
pthread_cond.cpp 111 return COND_IS_SHARED(atomic_load_explicit(&state, memory_order_relaxed));
115 return COND_GET_CLOCK(atomic_load_explicit(&state, memory_order_relaxed));
175 unsigned int old_state = atomic_load_explicit(&cond->state, memory_order_relaxed);
  /external/libcxx/test/std/atomics/atomics.types.operations/atomics.types.operations.req/
atomic_load_explicit.pass.cpp 17 // atomic_load_explicit(const volatile atomic<T>* obj, memory_order m);
21 // atomic_load_explicit(const atomic<T>* obj, memory_order m);
34 assert(std::atomic_load_explicit(&t, std::memory_order_seq_cst) == T(1));
37 assert(std::atomic_load_explicit(&vt, std::memory_order_seq_cst) == T(2));
  /ndk/sources/cxx-stl/llvm-libc++/libcxx/test/atomics/atomics.types.operations/atomics.types.operations.req/
atomic_load_explicit.pass.cpp 14 // atomic_load_explicit(const volatile atomic<T>* obj, memory_order m);
18 // atomic_load_explicit(const atomic<T>* obj, memory_order m);
31 assert(std::atomic_load_explicit(&t, std::memory_order_seq_cst) == T(1));
34 assert(std::atomic_load_explicit(&vt, std::memory_order_seq_cst) == T(2));
  /frameworks/native/libs/binder/
Binder.cpp 143 atomic_load_explicit(&mExtras, memory_order_acquire));
168 return atomic_load_explicit(non_const_p, mo);
184 atomic_load_explicit(&mExtras, memory_order_acquire));
199 atomic_load_explicit(&mExtras, memory_order_relaxed));
  /bionic/tests/
stdatomic_test.cpp 100 ASSERT_EQ(123, atomic_load_explicit(&i, memory_order_relaxed));
207 yval = atomic_load_explicit(&a->y, memory_order_acquire);
208 zval = atomic_load_explicit(&a->z, memory_order_relaxed);
209 xval = atomic_load_explicit(&a->x, memory_order_relaxed);
247 EXPECT_EQ(atomic_load_explicit(&a.x, memory_order_consume), BIG + 1);
248 EXPECT_EQ(atomic_load_explicit(&a.y, memory_order_seq_cst), BIG + 1);
  /prebuilts/ndk/5/sources/cxx-stl/gnu-libstdc++/include/bits/
atomicfwd_c.h 122 #define atomic_load_explicit(__a, __x) \ macro
126 atomic_load_explicit(__a, memory_order_seq_cst)
  /prebuilts/ndk/6/sources/cxx-stl/gnu-libstdc++/include/bits/
atomicfwd_c.h 122 #define atomic_load_explicit(__a, __x) \ macro
126 atomic_load_explicit(__a, memory_order_seq_cst)
  /prebuilts/ndk/7/sources/cxx-stl/gnu-libstdc++/include/bits/
atomicfwd_c.h 122 #define atomic_load_explicit(__a, __x) \ macro
126 atomic_load_explicit(__a, memory_order_seq_cst)
  /prebuilts/ndk/8/sources/cxx-stl/gnu-libstdc++/4.4.3/include/bits/
atomicfwd_c.h 122 #define atomic_load_explicit(__a, __x) \ macro
126 atomic_load_explicit(__a, memory_order_seq_cst)
  /system/core/include/cutils/
atomic.h 135 return atomic_load_explicit(a, memory_order_acquire);
148 return atomic_load_explicit(a, memory_order_relaxed);
  /system/core/libcutils/
trace-dev.c 158 if (CC_UNLIKELY(atomic_load_explicit(&atrace_is_ready, memory_order_acquire))) {
159 if (atomic_load_explicit(&atrace_is_enabled, memory_order_acquire)) {
  /external/skia/include/ports/
SkAtomics_std.h 22 return std::atomic_load_explicit(ap, (std::memory_order)mo);
  /bionic/libc/include/
stdatomic.h 75 using std::atomic_load_explicit;
423 #define atomic_load_explicit(object, order) \
448 #define atomic_load_explicit(object, order) \
498 #define atomic_load_explicit(object, order) \
531 atomic_load_explicit(object, memory_order_seq_cst)
  /development/ndk/platforms/android-21/include/
stdatomic.h 69 using std::atomic_load_explicit;
417 #define atomic_load_explicit(object, order) \
442 #define atomic_load_explicit(object, order) \
492 #define atomic_load_explicit(object, order) \
525 atomic_load_explicit(object, memory_order_seq_cst)
  /prebuilts/clang/darwin-x86/host/3.6/lib/clang/3.6/include/
stdatomic.h 75 using std::atomic_load_explicit;
423 #define atomic_load_explicit(object, order) \
448 #define atomic_load_explicit(object, order) \
498 #define atomic_load_explicit(object, order) \
531 atomic_load_explicit(object, memory_order_seq_cst)
  /prebuilts/clang/linux-x86/host/3.6/lib/clang/3.6/include/
stdatomic.h 75 using std::atomic_load_explicit;
423 #define atomic_load_explicit(object, order) \
448 #define atomic_load_explicit(object, order) \
498 #define atomic_load_explicit(object, order) \
531 atomic_load_explicit(object, memory_order_seq_cst)

Completed in 1334 milliseconds

1 2