/frameworks/rs/java/tests/RsTest_14/src/com/android/rs/test/ |
math.rs | 5 volatile float f1; 6 volatile float2 f2; 7 volatile float3 f3; 8 volatile float4 f4; 10 volatile int i1; 11 volatile int2 i2; 12 volatile int3 i3; 13 volatile int4 i4; 15 volatile uint ui1; 16 volatile uint2 ui2 [all...] |
math.rs.orig | 5 volatile float f1; 6 volatile float2 f2; 7 volatile float3 f3; 8 volatile float4 f4; 10 volatile int i1; 11 volatile int2 i2; 12 volatile int3 i3; 13 volatile int4 i4; 15 volatile uint ui1; 16 volatile uint2 ui2 [all...] |
/frameworks/rs/java/tests/RsTest_16/src/com/android/rs/test/ |
math.rs | 5 volatile float f1; 6 volatile float2 f2; 7 volatile float3 f3; 8 volatile float4 f4; 10 volatile int i1; 11 volatile int2 i2; 12 volatile int3 i3; 13 volatile int4 i4; 15 volatile uint ui1; 16 volatile uint2 ui2 [all...] |
/external/libcxx/test/std/utilities/function.objects/func.invoke/ |
invoke.pass.cpp | 57 int volatile& operator()(NonCopyable&&) volatile & { return data; } 58 int const volatile& operator()(NonCopyable&&) const volatile & { return data; } 62 int volatile&& operator()(NonCopyable&&) volatile && { return std::move(data); } 63 int const volatile&& operator()(NonCopyable&&) const volatile && { return std::move(data); } 154 test_b12<int volatile&(NonCopyable&&) volatile &, int volatile&>(cl) [all...] |
/external/llvm/test/CodeGen/AArch64/ |
ldst-unscaledimm.ll | 23 %val8_sext32 = load volatile i8, i8* %addr_sext32 25 store volatile i32 %val32_signed, i32* @var_32bit 28 ; match a zero-extending load volatile 8-bit -> 32-bit 30 %val8_zext32 = load volatile i8, i8* %addr_zext32 32 store volatile i32 %val32_unsigned, i32* @var_32bit 35 ; match an any-extending load volatile 8-bit -> 32-bit 37 %val8_anyext = load volatile i8, i8* %addr_anyext 39 store volatile i8 %newval8, i8* @var_8bit 42 ; match a sign-extending load volatile 8-bit -> 64-bit 44 %val8_sext64 = load volatile i8, i8* %addr_sext6 [all...] |
ldst-unsignedimm.ll | 19 %val8_sext32 = load volatile i8, i8* @var_8bit 21 store volatile i32 %val32_signed, i32* @var_32bit 25 ; match a zero-extending load volatile 8-bit -> 32-bit 26 %val8_zext32 = load volatile i8, i8* @var_8bit 28 store volatile i32 %val32_unsigned, i32* @var_32bit 31 ; match an any-extending load volatile 8-bit -> 32-bit 32 %val8_anyext = load volatile i8, i8* @var_8bit 34 store volatile i8 %newval8, i8* @var_8bit 37 ; match a sign-extending load volatile 8-bit -> 64-bit 38 %val8_sext64 = load volatile i8, i8* @var_8bi [all...] |
/external/clang/test/CodeGen/ |
volatile-complex.c | 3 // Validate that volatile _Complex loads and stores are generated 10 volatile _Complex float cf; 11 volatile _Complex double cd; 12 volatile _Complex float cf32 __attribute__((aligned(32))); 13 volatile _Complex double cd32 __attribute__((aligned(32))); 17 // CHECK: load volatile float, float* getelementptr inbounds ({ float, float }, { float, float }* @cf, i32 0, i32 0), align 4 18 // CHECK-NEXT: load volatile float, float* getelementptr inbounds ({ float, float }, { float, float }* @cf, i32 0, i32 1), align 4 20 // CHECK-NEXT: [[R:%.*]] = load volatile float, float* getelementptr inbounds ({ float, float }, { float, float }* @cf, i32 0, i32 0), align 4 21 // CHECK-NEXT: [[I:%.*]] = load volatile float, float* getelementptr inbounds ({ float, float }, { float, float }* @cf, i32 0, i32 1), align 4 22 // CHECK-NEXT: store volatile float [[R]], float* getelementptr inbounds ({ float, float }, { float, float }* @cf, i32 0, i32 0) (…) [all...] |
/external/protobuf/src/google/protobuf/stubs/ |
atomicops_internals_macosx.h | 42 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, 56 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, 66 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 71 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, 80 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 100 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 104 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 109 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 114 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) [all...] |
atomicops_internals_x86_msvc.h | 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 70 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 75 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { 79 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { 84 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { 93 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr [all...] |
atomicops_internals_tsan.h | 46 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, 55 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr, 61 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32 *ptr, 67 inline Atomic32 Release_AtomicExchange(volatile Atomic32 *ptr, 73 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr, 79 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr, 85 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr, 94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr, 103 inline void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value) { 107 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) [all...] |
/prebuilts/misc/darwin-x86_64/protobuf2.5/include/google/protobuf/stubs/ |
atomicops_internals_macosx.h | 42 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, 56 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, 66 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 71 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, 80 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 100 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 104 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 109 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 114 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) [all...] |
atomicops_internals_x86_msvc.h | 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 70 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 75 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { 79 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { 84 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { 93 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr [all...] |
/prebuilts/misc/linux-x86_64/protobuf2.5/include/google/protobuf/stubs/ |
atomicops_internals_macosx.h | 42 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, 56 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, 66 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 71 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, 80 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 100 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 104 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 109 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 114 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) [all...] |
atomicops_internals_x86_msvc.h | 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 70 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 75 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { 79 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { 84 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { 93 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr [all...] |
/prebuilts/misc/windows/protobuf2.5/include/google/protobuf/stubs/ |
atomicops_internals_macosx.h | 42 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, 56 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, 66 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 71 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, 80 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 100 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 104 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 109 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 114 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) [all...] |
atomicops_internals_x86_msvc.h | 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 70 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 75 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { 79 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { 84 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { 93 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr [all...] |
/prebuilts/tools/darwin-x86_64/protoc/include/google/protobuf/stubs/ |
atomicops_internals_macosx.h | 42 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, 56 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, 66 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 71 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, 80 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 100 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 104 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 109 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 114 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) [all...] |
atomicops_internals_x86_msvc.h | 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 70 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 75 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { 79 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { 84 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { 93 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr [all...] |
/prebuilts/tools/linux-x86_64/protoc/include/google/protobuf/stubs/ |
atomicops_internals_macosx.h | 42 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, 56 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, 66 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 71 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, 80 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 100 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 104 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 109 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 114 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) [all...] |
atomicops_internals_x86_msvc.h | 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, 49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, 55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, 61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { 65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { 70 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { 75 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { 79 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { 84 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { 93 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr [all...] |
/external/compiler-rt/lib/sanitizer_common/ |
sanitizer_atomic_msvc.h | 25 char volatile *Addend, char Value); // NOLINT 28 short volatile *Addend, short Value); // NOLINT 31 long volatile *Addend, long Value); // NOLINT 34 long volatile * Addend, long Value); // NOLINT 37 short volatile *Destination, // NOLINT 42 long long volatile *Destination, // NOLINT 46 void *volatile *Destination, 51 long volatile *Destination, // NOLINT 57 long long volatile * Addend, long long Value); // NOLINT 78 const volatile T *a, memory_order mo) [all...] |
/external/llvm/test/CodeGen/SystemZ/ |
bswap-02.ll | 89 ; Check that volatile accesses do not use LRV, which might access the 96 %a = load volatile i32 , i32 *%src 107 %val0 = load volatile i32 , i32 *%ptr 108 %val1 = load volatile i32 , i32 *%ptr 109 %val2 = load volatile i32 , i32 *%ptr 110 %val3 = load volatile i32 , i32 *%ptr 111 %val4 = load volatile i32 , i32 *%ptr 112 %val5 = load volatile i32 , i32 *%ptr 113 %val6 = load volatile i32 , i32 *%ptr 114 %val7 = load volatile i32 , i32 *%pt [all...] |
bswap-03.ll | 89 ; Check that volatile accesses do not use LRVG, which might access the 96 %a = load volatile i64 , i64 *%src 107 %val0 = load volatile i64 , i64 *%ptr 108 %val1 = load volatile i64 , i64 *%ptr 109 %val2 = load volatile i64 , i64 *%ptr 110 %val3 = load volatile i64 , i64 *%ptr 111 %val4 = load volatile i64 , i64 *%ptr 112 %val5 = load volatile i64 , i64 *%ptr 113 %val6 = load volatile i64 , i64 *%ptr 114 %val7 = load volatile i64 , i64 *%pt [all...] |
/device/google/contexthub/firmware/src/platform/stm32f4xx/ |
pwr.c | 25 volatile uint32_t CR; 26 volatile uint32_t PLLCFGR; 27 volatile uint32_t CFGR; 28 volatile uint32_t CIR; 29 volatile uint32_t AHB1RSTR; 30 volatile uint32_t AHB2RSTR; 31 volatile uint32_t AHB3RSTR; 33 volatile uint32_t APB1RSTR; 34 volatile uint32_t APB2RSTR; 36 volatile uint32_t AHB1ENR [all...] |
/external/clang/test/Sema/ |
warn-cast-align.c | 16 const volatile void *P2 = P; 25 const volatile char *j = (const volatile char*) P2; 26 const volatile short *k = (const volatile short*) P2; 27 const volatile int *l = (const volatile int*) P2;
|