HomeSort by relevance Sort by last modified time
    Searched full:atomic_load (Results 1 - 25 of 356) sorted by null

1 2 3 4 5 6 7 8 91011>>

  /external/libcxx/test/std/atomics/atomics.types.operations/atomics.types.operations.req/
atomic_load.pass.cpp 17 // atomic_load(const volatile atomic<T>* obj);
21 // atomic_load(const atomic<T>* obj);
35 assert(std::atomic_load(&t) == T(1));
38 assert(std::atomic_load(&vt) == T(2));
  /prebuilts/ndk/r13/sources/cxx-stl/llvm-libc++/test/std/atomics/atomics.types.operations/atomics.types.operations.req/
atomic_load.pass.cpp 17 // atomic_load(const volatile atomic<T>* obj);
21 // atomic_load(const atomic<T>* obj);
35 assert(std::atomic_load(&t) == T(1));
38 assert(std::atomic_load(&vt) == T(2));
  /external/clang/test/CodeGenObjC/
property-atomic-bool.m 4 // CHECK: %[[ATOMIC_LOAD:.*]] = load atomic i8, i8* %{{.*}} seq_cst
5 // CHECK: %[[TOBOOL:.*]] = trunc i8 %[[ATOMIC_LOAD]] to i1
13 // CHECK: %[[ATOMIC_LOAD:.*]] = load atomic i8, i8* %{{.*}} unordered
  /prebuilts/ndk/r11/sources/cxx-stl/llvm-libc++/libcxx/test/atomics/atomics.types.operations/atomics.types.operations.req/
atomic_load.pass.cpp 14 // atomic_load(const volatile atomic<T>* obj);
18 // atomic_load(const atomic<T>* obj);
31 assert(std::atomic_load(&t) == T(1));
34 assert(std::atomic_load(&vt) == T(2));
  /bionic/tests/
stdatomic_test.cpp 39 ASSERT_EQ(123, atomic_load(&v));
42 ASSERT_EQ(456, atomic_load(&v));
95 ASSERT_EQ(123, atomic_load(&i));
154 ASSERT_EQ(125, atomic_load(&i));
161 ASSERT_EQ(121, atomic_load(&i));
168 ASSERT_EQ(0x123, atomic_load(&i));
175 ASSERT_EQ(0x123, atomic_load(&i));
182 ASSERT_EQ(0x002, atomic_load(&i));
260 EXPECT_EQ(atomic_load(&a.z), BIG + 1);
  /external/compiler-rt/lib/sanitizer_common/
sanitizer_addrhashmap.h 176 uptr addr1 = atomic_load(&c->addr, memory_order_acquire);
184 if (atomic_load(&b->add, memory_order_relaxed)) {
186 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed);
189 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
205 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
217 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed);
221 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
245 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
278 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0);
289 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed)
    [all...]
sanitizer_lfstack.h 33 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0;
37 u64 cmp = atomic_load(&head_, memory_order_relaxed);
49 u64 cmp = atomic_load(&head_, memory_order_acquire);
sanitizer_mutex.h 44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1);
56 if (atomic_load(&state_, memory_order_relaxed) == 0
100 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked);
132 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked);
150 u32 cmp = atomic_load(&state_, memory_order_relaxed);
164 u32 prev = atomic_load(&state_, memory_order_acquire);
sanitizer_persistent_allocator.h 38 uptr cmp = atomic_load(&region_pos, memory_order_acquire);
39 uptr end = atomic_load(&region_end, memory_order_acquire);
sanitizer_stackdepotbase.h 78 uptr cmp = atomic_load(p, memory_order_relaxed);
104 uptr v = atomic_load(p, memory_order_consume);
149 uptr v = atomic_load(p, memory_order_consume);
171 uptr s = atomic_load(p, memory_order_relaxed);
sanitizer_quarantine.h 58 uptr GetSize() const { return atomic_load(&max_size_, memory_order_acquire); }
89 uptr min_size = atomic_load(&min_size_, memory_order_acquire);
130 return atomic_load(&size_, memory_order_relaxed);
sanitizer_stackdepot.cc 40 atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask;
92 return atomic_load(&node_->hash_and_use_count, memory_order_relaxed) &
142 uptr v = atomic_load(p, memory_order_consume);
sanitizer_coverage_libcdep.cc 291 uptr size = atomic_load(&pc_array_size, memory_order_relaxed);
302 CHECK_EQ(atomic_load(&pc_array_index, memory_order_relaxed), 0);
321 uptr size = atomic_load(&pc_array_size, memory_order_relaxed);
383 uptr range_end = atomic_load(&pc_array_index, memory_order_relaxed);
420 s32 guard_value = atomic_load(atomic_guard, memory_order_relaxed);
427 if (idx >= atomic_load(&pc_array_index, memory_order_acquire))
430 atomic_load(&pc_array_size, memory_order_acquire));
453 atomic_load(&cc_array_size, memory_order_acquire));
457 CHECK_EQ(atomic_load(&atomic_callee_cache[0], memory_order_relaxed), caller);
533 return atomic_load(&pc_array_index, memory_order_relaxed)
    [all...]
  /external/boringssl/src/crypto/
refcount_c11.c 39 uint32_t expected = atomic_load(count);
51 uint32_t expected = atomic_load(count);
  /external/libcxx/test/std/utilities/memory/util.smartptr/util.smartptr.shared.atomic/
atomic_load.pass.cpp 23 // atomic_load(const shared_ptr<T>* p)
36 std::shared_ptr<int> q = std::atomic_load(&p);
  /prebuilts/ndk/r11/sources/cxx-stl/llvm-libc++/libcxx/test/utilities/memory/util.smartptr/util.smartptr.shared.atomic/
atomic_load.pass.cpp 21 // atomic_load(const shared_ptr<T>* p)
31 std::shared_ptr<int> q = std::atomic_load(&p);
  /prebuilts/ndk/r13/sources/cxx-stl/llvm-libc++/test/std/utilities/memory/util.smartptr/util.smartptr.shared.atomic/
atomic_load.pass.cpp 23 // atomic_load(const shared_ptr<T>* p)
33 std::shared_ptr<int> q = std::atomic_load(&p);
  /system/core/liblog/
pmsg_writer.c 54 int fd = atomic_load(&pmsgLoggerWrite.context.fd);
83 if (atomic_load(&pmsgLoggerWrite.context.fd) < 0) {
118 if (atomic_load(&pmsgLoggerWrite.context.fd) < 0) {
173 writev(atomic_load(&pmsgLoggerWrite.context.fd), newVec, i));
274 if (atomic_load(&pmsgLoggerWrite.context.fd) < 0) {
278 weOpened = atomic_load(&pmsgLoggerWrite.context.fd) < 0;
logd_writer.c 65 i = atomic_load(&logdLoggerWrite.context.sock);
118 if (atomic_load(&logdLoggerWrite.context.sock) < 0) {
138 sock = atomic_load(&logdLoggerWrite.context.sock);
271 writev(atomic_load(&logdLoggerWrite.context.sock), newVec, i));
  /frameworks/av/media/libstagefright/
ACodecBufferChannel.cpp 92 std::atomic_load(&mInputBuffers));
114 std::atomic_load(&mInputBuffers));
238 std::atomic_load(&mOutputBuffers));
256 std::atomic_load(&mInputBuffers));
260 array = std::atomic_load(&mOutputBuffers);
278 std::atomic_load(&mInputBuffers));
287 std::atomic_load(&mOutputBuffers));
371 std::atomic_load(&mInputBuffers));
392 std::atomic_load(&mOutputBuffers));
  /external/llvm/test/CodeGen/PowerPC/
atomic-2.ll 93 define i64 @atomic_load(i64* %mem) nounwind {
95 ; CHECK: @atomic_load
  /system/extras/memory_replay/
Pointers.cpp 80 if (atomic_load(&pointers_[index].key_pointer) == key_pointer) {
111 if (atomic_load(&pointers_[i].key_pointer) != 0) {
  /external/compiler-rt/lib/tsan/rtl/
tsan_mutex.cc 224 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked);
236 if (atomic_load(&state_, memory_order_relaxed) == kUnlocked) {
266 prev = atomic_load(&state_, memory_order_acquire);
287 CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0);
tsan_fd.cc 58 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1)
64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) {
78 uptr l1 = atomic_load(pl1, memory_order_consume);
133 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
145 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
  /external/compiler-rt/lib/asan/
asan_stack.cc 27 return atomic_load(&malloc_context_size, memory_order_acquire);

Completed in 588 milliseconds

1 2 3 4 5 6 7 8 91011>>