/external/llvm/test/CodeGen/AArch64/ |
callee-save.ll | 15 %val1 = load volatile float* @var 16 %val2 = load volatile float* @var 17 %val3 = load volatile float* @var 18 %val4 = load volatile float* @var 19 %val5 = load volatile float* @var 20 %val6 = load volatile float* @var 21 %val7 = load volatile float* @var 22 %val8 = load volatile float* @var 23 %val9 = load volatile float* @var 24 %val10 = load volatile float* @va [all...] |
regress-w29-reserved-with-fp.ll | 10 %val1 = load volatile i32* @var 11 %val2 = load volatile i32* @var 12 %val3 = load volatile i32* @var 13 %val4 = load volatile i32* @var 14 %val5 = load volatile i32* @var 15 %val6 = load volatile i32* @var 16 %val7 = load volatile i32* @var 17 %val8 = load volatile i32* @var 18 %val9 = load volatile i32* @var 25 store volatile i32 %val1, i32* @va [all...] |
/external/llvm/test/DebugInfo/Inputs/ |
dwarfdump-inl-test.h | 2 volatile int z = 0; 7 volatile int y = inlined_h();
|
/external/libcxxabi/test/ |
catch_pointer_reference.cpp | 104 assert_catches< volatile Base * , Derived *, Derived>(); 105 assert_catches<const volatile Base * , Derived *, Derived>(); 108 assert_catches< volatile Base * const , Derived *, Derived>(); 109 assert_catches<const volatile Base * const , Derived *, Derived>(); 110 assert_catches< Base * volatile, Derived *, Derived>(); 111 assert_catches<const Base * volatile, Derived *, Derived>(); 112 assert_catches< volatile Base * volatile, Derived *, Derived>(); 113 assert_catches<const volatile Base * volatile, Derived *, Derived>() [all...] |
/external/clang/test/CXX/expr/ |
p10-0x.cpp | 3 volatile int g1; 5 volatile int a; 8 volatile int& refcall(); 11 void f1(volatile int *x, volatile S* s) { 13 // CHECK: load volatile i32* 15 // CHECK: load volatile i32* 17 // CHECK: load volatile i32* 19 // CHECK: load volatile i32* 21 // CHECK: load volatile i32 [all...] |
/frameworks/rs/java/tests/RSTest_CompatLib/src/com/android/rs/test/ |
min.rs | 4 volatile uchar2 res_uc_2 = 1; 5 volatile uchar2 src1_uc_2 = 1; 6 volatile uchar2 src2_uc_2 = 1;
|
/frameworks/rs/java/tests/RsTest/src/com/android/rs/test/ |
min.rs | 4 volatile uchar2 res_uc_2 = 1; 5 volatile uchar2 src1_uc_2 = 1; 6 volatile uchar2 src2_uc_2 = 1;
|
/external/clang/test/CodeGen/ |
2007-10-02-VolatileArray.c | 1 // RUN: %clang_cc1 -emit-llvm %s -o - | grep volatile 4 void foo(volatile int *p)
|
volatile-1.c | 4 volatile int i, j, k; 5 volatile int ar[5]; 6 volatile char c; 8 volatile _Complex int ci; 9 volatile struct S { 11 void operator =(volatile struct S&o) volatile; 16 //void operator =(volatile struct S&o1, volatile struct S&o2) volatile; [all...] |
2007-10-30-Volatile.c | 3 char * volatile p = 0; 4 volatile int cc = 0;
|
builtins-ppc.c | 6 volatile int res; 7 res = __builtin_eh_return_data_regno(0); // CHECK: store volatile i32 3 8 res = __builtin_eh_return_data_regno(1); // CHECK: store volatile i32 4
|
/external/llvm/test/CodeGen/Mips/ |
nacl-reserved-regs.ll | 8 %val1 = load volatile i32* @var 9 %val2 = load volatile i32* @var 10 %val3 = load volatile i32* @var 11 %val4 = load volatile i32* @var 12 %val5 = load volatile i32* @var 13 %val6 = load volatile i32* @var 14 %val7 = load volatile i32* @var 15 %val8 = load volatile i32* @var 16 %val9 = load volatile i32* @var 17 %val10 = load volatile i32* @va [all...] |
/external/libcxx/www/ |
atomic_design_b.html | 51 type __atomic_load_relaxed(const volatile type* atomic_obj); 52 type __atomic_load_consume(const volatile type* atomic_obj); 53 type __atomic_load_acquire(const volatile type* atomic_obj); 54 type __atomic_load_seq_cst(const volatile type* atomic_obj); 57 type __atomic_store_relaxed(volatile type* atomic_obj, type desired); 58 type __atomic_store_release(volatile type* atomic_obj, type desired); 59 type __atomic_store_seq_cst(volatile type* atomic_obj, type desired); 62 type __atomic_exchange_relaxed(volatile type* atomic_obj, type desired); 63 type __atomic_exchange_consume(volatile type* atomic_obj, type desired); 64 type __atomic_exchange_acquire(volatile type* atomic_obj, type desired) [all...] |
/external/chromium_org/v8/src/base/ |
atomicops_internals_tsan.h | 57 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8* a, 59 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16* a, 61 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32* a, 63 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64* a, 65 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128* a, 68 void __tsan_atomic8_store(volatile __tsan_atomic8* a, __tsan_atomic8 v, 70 void __tsan_atomic16_store(volatile __tsan_atomic16* a, __tsan_atomic16 v, 72 void __tsan_atomic32_store(volatile __tsan_atomic32* a, __tsan_atomic32 v, 74 void __tsan_atomic64_store(volatile __tsan_atomic64* a, __tsan_atomic64 v, 76 void __tsan_atomic128_store(volatile __tsan_atomic128* a, __tsan_atomic128 v [all...] |
atomicops_internals_atomicword_compat.h | 26 inline AtomicWord NoBarrier_CompareAndSwap(volatile AtomicWord* ptr, 30 reinterpret_cast<volatile Atomic32*>(ptr), old_value, new_value); 33 inline AtomicWord NoBarrier_AtomicExchange(volatile AtomicWord* ptr, 36 reinterpret_cast<volatile Atomic32*>(ptr), new_value); 39 inline AtomicWord NoBarrier_AtomicIncrement(volatile AtomicWord* ptr, 42 reinterpret_cast<volatile Atomic32*>(ptr), increment); 45 inline AtomicWord Barrier_AtomicIncrement(volatile AtomicWord* ptr, 48 reinterpret_cast<volatile Atomic32*>(ptr), increment); 51 inline AtomicWord Acquire_CompareAndSwap(volatile AtomicWord* ptr, 55 reinterpret_cast<volatile Atomic32*>(ptr), old_value, new_value) [all...] |
atomicops_internals_mac.h | 15 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, 29 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, 39 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 44 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, 53 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 67 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 73 inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) { 77 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 81 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 86 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) [all...] |
/external/clang/test/CodeGenCXX/ |
volatile-1.cpp | 4 volatile int i, j, k; 5 volatile int ar[5]; 6 volatile char c; 8 volatile _Complex int ci; 9 volatile struct S { 11 void operator =(volatile struct S&o) volatile; 16 //void operator =(volatile struct S&o1, volatile struct S&o2) volatile; [all...] |
/external/compiler-rt/include/sanitizer/ |
tsan_interface_atomic.h | 44 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a, 46 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a, 48 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a, 50 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a, 53 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a, 57 void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v, 59 void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v, 61 void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v, 63 void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v, 66 void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v [all...] |
/external/llvm/test/CodeGen/SystemZ/ |
frame-18.ll | 19 %l0 = load volatile i32 *%ptr 20 %l1 = load volatile i32 *%ptr 21 %l3 = load volatile i32 *%ptr 22 %l4 = load volatile i32 *%ptr 23 %l5 = load volatile i32 *%ptr 24 %l6 = load volatile i32 *%ptr 25 %l7 = load volatile i32 *%ptr 26 %l8 = load volatile i32 *%ptr 27 %l9 = load volatile i32 *%ptr 28 %l10 = load volatile i32 *%pt [all...] |
frame-17.ll | 34 %l0 = load volatile float *%ptr 35 %l1 = load volatile float *%ptr 36 %l2 = load volatile float *%ptr 37 %l3 = load volatile float *%ptr 38 %l4 = load volatile float *%ptr 39 %l5 = load volatile float *%ptr 40 %l6 = load volatile float *%ptr 41 %l7 = load volatile float *%ptr 42 %l8 = load volatile float *%ptr 43 %l9 = load volatile float *%pt [all...] |
/external/chromium_org/base/ |
atomicops_internals_atomicword_compat.h | 26 inline AtomicWord NoBarrier_CompareAndSwap(volatile AtomicWord* ptr, 30 reinterpret_cast<volatile Atomic32*>(ptr), old_value, new_value); 33 inline AtomicWord NoBarrier_AtomicExchange(volatile AtomicWord* ptr, 36 reinterpret_cast<volatile Atomic32*>(ptr), new_value); 39 inline AtomicWord NoBarrier_AtomicIncrement(volatile AtomicWord* ptr, 42 reinterpret_cast<volatile Atomic32*>(ptr), increment); 45 inline AtomicWord Barrier_AtomicIncrement(volatile AtomicWord* ptr, 48 reinterpret_cast<volatile Atomic32*>(ptr), increment); 51 inline AtomicWord Acquire_CompareAndSwap(volatile AtomicWord* ptr, 55 reinterpret_cast<volatile Atomic32*>(ptr), old_value, new_value) [all...] |
/external/llvm/test/Transforms/InstCombine/ |
volatile_store.ll | 1 ; RUN: opt < %s -instcombine -S | grep "store volatile" 2 ; RUN: opt < %s -instcombine -S | grep "load volatile" 8 %tmp = load volatile i32* @x ; <i32> [#uses=1] 9 store volatile i32 %tmp, i32* @x
|
/external/llvm/test/Transforms/ScalarRepl/ |
volatile.ll | 6 store volatile i32 %T, i32* %B 7 ; CHECK: store volatile 10 %X = load volatile i32* %C 11 ; CHECK: load volatile
|
/external/valgrind/main/memcheck/tests/ |
nanoleak_supp.c | 6 volatile int* a = malloc(1000);
|
/external/valgrind/main/none/tests/s390x/ |
op00.c | 4 asm volatile (".hword 0\n");
|