/external/libcxx/test/utilities/memory/util.smartptr/util.smartptr.shared.atomic/ |
atomic_load.pass.cpp | 21 // atomic_load(const shared_ptr<T>* p) 31 std::shared_ptr<int> q = std::atomic_load(&p);
|
Android.mk | 39 test_name := utilities/memory/util.smartptr/util.smartptr.shared.atomic/atomic_load 40 test_src := atomic_load.pass.cpp
|
/external/compiler-rt/lib/sanitizer_common/ |
sanitizer_mutex.h | 44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); 56 if (atomic_load(&state_, memory_order_relaxed) == 0 94 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); 126 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked); 144 u32 cmp = atomic_load(&state_, memory_order_relaxed); 158 u32 prev = atomic_load(&state_, memory_order_acquire);
|
sanitizer_lfstack.h | 33 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0; 37 u64 cmp = atomic_load(&head_, memory_order_relaxed); 49 u64 cmp = atomic_load(&head_, memory_order_acquire);
|
sanitizer_addrhashmap.h | 176 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); 184 if (atomic_load(&b->add, memory_order_relaxed)) { 186 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); 189 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 205 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 217 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); 221 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 245 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 278 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); 289 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed) [all...] |
sanitizer_persistent_allocator.h | 37 uptr cmp = atomic_load(®ion_pos, memory_order_acquire); 38 uptr end = atomic_load(®ion_end, memory_order_acquire);
|
sanitizer_libignore.h | 74 const uptr n = atomic_load(&loaded_count_, memory_order_acquire);
|
sanitizer_stackdepotbase.h | 74 uptr cmp = atomic_load(p, memory_order_relaxed); 100 uptr v = atomic_load(p, memory_order_consume); 145 uptr v = atomic_load(p, memory_order_consume);
|
sanitizer_allocator.cc | 60 if (atomic_load(&internal_allocator_initialized, memory_order_acquire) == 0) { 62 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) ==
|
sanitizer_atomic_clang_other.h | 25 INLINE typename T::Type atomic_load( function in namespace:__sanitizer
|
sanitizer_atomic_clang_x86.h | 28 INLINE typename T::Type atomic_load( function in namespace:__sanitizer
|
sanitizer_coverage_libcdep.cc | 129 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); 155 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); 185 atomic_load(&pc_array_size, memory_order_acquire)); 194 return atomic_load(&pc_array_index, memory_order_relaxed);
|
sanitizer_stackdepot.cc | 64 atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask; 93 return atomic_load(&node_->hash_and_use_count, memory_order_relaxed) & 141 uptr v = atomic_load(p, memory_order_consume);
|
sanitizer_libignore.cc | 85 const uptr idx = atomic_load(&loaded_count_, memory_order_relaxed);
|
sanitizer_quarantine.h | 124 return atomic_load(&size_, memory_order_relaxed);
|
sanitizer_tls_get_addr.cc | 93 atomic_load(&number_of_live_dtls, memory_order_relaxed));
|
/external/libcxx/test/atomics/atomics.types.operations/atomics.types.operations.req/ |
atomic_load.pass.cpp | 14 // atomic_load(const volatile atomic<T>* obj); 18 // atomic_load(const atomic<T>* obj); 31 assert(std::atomic_load(&t) == T(1)); 34 assert(std::atomic_load(&vt) == T(2));
|
Android.mk | 59 test_name := atomics/atomics.types.operations/atomics.types.operations.req/atomic_load 60 test_src := atomic_load.pass.cpp
|
/bionic/tests/ |
stdatomic_test.cpp | 37 ASSERT_EQ(123, atomic_load(&v)); 40 ASSERT_EQ(456, atomic_load(&v)); 96 ASSERT_EQ(123, atomic_load(&i)); 141 ASSERT_EQ(125, atomic_load(&i)); 148 ASSERT_EQ(121, atomic_load(&i)); 155 ASSERT_EQ(0x123, atomic_load(&i)); 162 ASSERT_EQ(0x123, atomic_load(&i)); 169 ASSERT_EQ(0x002, atomic_load(&i)); 247 EXPECT_EQ(atomic_load(&a.z), BIG + 1);
|
/external/compiler-rt/lib/tsan/rtl/ |
tsan_mutex.cc | 221 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); 233 if (atomic_load(&state_, memory_order_relaxed) == kUnlocked) { 263 prev = atomic_load(&state_, memory_order_acquire); 284 CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0);
|
tsan_fd.cc | 57 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) 63 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { 77 uptr l1 = atomic_load(pl1, memory_order_consume); 126 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); 138 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
|
/external/compiler-rt/lib/dfsan/ |
dfsan.cc | 118 label = atomic_load(table_ent, memory_order_acquire); 236 atomic_load(&__dfsan_last_label, memory_order_relaxed);
|
/external/compiler-rt/lib/lsan/ |
lsan_interceptors.cc | 231 while ((tid = atomic_load(&p->tid, memory_order_acquire)) == 0) 260 while (atomic_load(&p.tid, memory_order_acquire) != 0)
|
/external/compiler-rt/lib/tsan/tests/rtl/ |
tsan_mutex.cc | 169 int *val = (int *)atomic_load(singleton, memory_order_acquire); 215 uptr v = atomic_load(&flag, memory_order_acquire);
|
tsan_test_util_linux.cc | 323 Event* ev = (Event*)atomic_load(&impl->event, memory_order_acquire); 343 CHECK_EQ(atomic_load(&event, memory_order_relaxed), 0); 345 while (atomic_load(&event, memory_order_acquire) != 0)
|