HomeSort by relevance Sort by last modified time
    Searched refs:__volatile__ (Results 1 - 25 of 637) sorted by null

1 2 3 4 5 6 7 8 91011>>

  /external/clang/test/CodeGen/
mips-clobber-reg.c 13 __asm__ __volatile__ (".set noat \n\t addi $7,$at,77":::"at");
14 __asm__ __volatile__ ("addi $7,$v0,77":::"v0");
15 __asm__ __volatile__ ("addi $7,$v1,77":::"v1");
16 __asm__ __volatile__ ("addi $7,$a0,77":::"a0");
17 __asm__ __volatile__ ("addi $7,$a1,77":::"a1");
18 __asm__ __volatile__ ("addi $7,$a2,77":::"a2");
19 __asm__ __volatile__ ("addi $7,$a3,77":::"a3");
20 __asm__ __volatile__ ("addi $7,$t0,77":::"t0");
21 __asm__ __volatile__ ("addi $7,$t1,77":::"t1");
22 __asm__ __volatile__ ("addi $7,$t2,77":::"t2");
    [all...]
nvptx-inlineasm.c 7 asm __volatile__ ("{ \n\t"
2008-12-23-AsmIntPointerTie.c 7 __asm__ __volatile__ ("%0 %1 " : "=r" (a): "0" (b));
mips-constraint-regs.c 15 __asm__ __volatile__(
26 __asm__ __volatile__(
39 __asm__ __volatile__(
  /external/valgrind/main/memcheck/tests/x86/
bug152022.c 17 __asm__ __volatile__( "subw $0x28, %%sp\n"
fxsave.c 18 asm __volatile__("fxsave (%0)" : : "r" (p) : "memory" );
22 asm __volatile__("fxrstor (%0)" : : "r" (p) : "memory" );
27 asm __volatile__("finit");
28 asm __volatile__(
38 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm0");
39 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm1");
40 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm2");
41 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm3");
42 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm4");
43 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm5")
    [all...]
fprem.c 10 __asm__ __volatile__(
25 __asm__ __volatile__(
40 __asm__ __volatile__(
52 __asm__ __volatile__("finit");
xor-undef-x86.c 17 __asm__ __volatile__(
26 __asm__ __volatile__(
38 __asm__ __volatile__(
51 __asm__ __volatile__(
67 __asm__ __volatile__(
81 __asm__ __volatile__(
98 __asm__ __volatile__(
112 __asm__ __volatile__(
127 __asm__ __volatile__(
141 __asm__ __volatile__(
    [all...]
  /external/valgrind/main/VEX/test/
fpconst.c 7 asm __volatile__("fninit");
8 asm __volatile__("fld1");
9 asm __volatile__("fstpl (%0)" : : "r" (p) : "memory" );
14 asm __volatile__("fninit");
15 asm __volatile__("fldl2t");
16 asm __volatile__("fstpl (%0)" : : "r" (p) : "memory" );
21 asm __volatile__("fninit");
22 asm __volatile__("fldl2e");
23 asm __volatile__("fstpl (%0)" : : "r" (p) : "memory" );
28 asm __volatile__("fninit")
    [all...]
fxsave.c 17 asm __volatile__("fxsave (%0)" : : "r" (p) : "memory" );
21 asm __volatile__("fxrstor (%0)" : : "r" (p) : "memory" );
26 asm __volatile__("finit");
27 asm __volatile__(
37 asm __volatile__("movups vecZ, %xmm0");
38 asm __volatile__("movups vecZ, %xmm1");
39 asm __volatile__("movups vecZ, %xmm2");
40 asm __volatile__("movups vecZ, %xmm3");
41 asm __volatile__("movups vecZ, %xmm4");
42 asm __volatile__("movups vecZ, %xmm5")
    [all...]
  /external/valgrind/main/memcheck/tests/amd64/
fxsave-amd64.c 20 asm __volatile__("rex64/fxsave (%0)" : : "r" (p) : "memory" );
22 asm __volatile__("fxsave (%0)" : : "r" (p) : "memory" );
29 asm __volatile__("rex64/fxrstor (%0)" : : "r" (p) : "memory" );
31 asm __volatile__("fxrstor (%0)" : : "r" (p) : "memory" );
37 asm __volatile__("finit");
38 asm __volatile__(
49 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm0");
50 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm1");
51 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm2");
52 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm3")
    [all...]
xor-undef-amd64.c 17 __asm__ __volatile__(
26 __asm__ __volatile__(
38 __asm__ __volatile__(
50 __asm__ __volatile__(
65 __asm__ __volatile__(
77 __asm__ __volatile__(
92 __asm__ __volatile__(
104 __asm__ __volatile__(
117 __asm__ __volatile__(
129 __asm__ __volatile__(
    [all...]
  /external/valgrind/main/none/tests/x86/
cse_fail.c 32 __asm__ __volatile__(
44 __asm__ __volatile__(
movx.c 7 __asm__ __volatile__(
20 __asm__ __volatile__(
33 __asm__ __volatile__(
46 __asm__ __volatile__(
59 __asm__ __volatile__(
72 __asm__ __volatile__(
85 __asm__ __volatile__(
98 __asm__ __volatile__(
111 __asm__ __volatile__(
124 __asm__ __volatile__(
    [all...]
  /external/valgrind/main/none/tests/ppc32/
bug129390-ppc32.c 7 __asm__ __volatile__(
  /external/valgrind/main/none/tests/ppc64/
twi_tdi.c 33 __asm__ __volatile__("twi 0, %0,-100"
37 __asm__ __volatile__("twi 1, %0,-100"
41 __asm__ __volatile__("twi 2, %0,-100"
45 __asm__ __volatile__("twi 3, %0,-100"
49 __asm__ __volatile__("twi 4, %0,-100"
53 __asm__ __volatile__("twi 5, %0,-100"
57 __asm__ __volatile__("twi 6, %0,-100"
61 __asm__ __volatile__("twi 7, %0,-100"
65 __asm__ __volatile__("twi 8, %0,-100"
69 __asm__ __volatile__("twi 9, %0,-100
    [all...]
tw_td.c 34 __asm__ __volatile__("tw 0, %0,%1"
38 __asm__ __volatile__("tw 1, %0,%1"
42 __asm__ __volatile__("tw 2, %0,%1"
46 __asm__ __volatile__("tw 3, %0,%1"
50 __asm__ __volatile__("tw 4, %0,%1"
54 __asm__ __volatile__("tw 5, %0,%1"
58 __asm__ __volatile__("tw 6, %0,%1"
62 __asm__ __volatile__("tw 7, %0,%1"
66 __asm__ __volatile__("tw 8, %0,%1"
70 __asm__ __volatile__("tw 9, %0,%1
    [all...]
  /external/valgrind/main/helgrind/tests/
tc19_shadowmem.c 287 __asm__ __volatile__("");
289 __asm__ __volatile__("");
291 __asm__ __volatile__("");
293 __asm__ __volatile__("");
295 __asm__ __volatile__("");
297 __asm__ __volatile__("");
299 __asm__ __volatile__("");
301 __asm__ __volatile__("");
303 __asm__ __volatile__("");
305 __asm__ __volatile__("");
    [all...]
  /external/chromium_org/third_party/re2/util/
atomicops.h 12 __asm__ __volatile__("xchgl (%0),%0" // The lock prefix is implicit for xchg.
21 __asm__ __volatile__("sfence" : : : "memory");
27 __asm__ __volatile__("eieio" : : : "memory");
33 __asm__ __volatile__("wmb" : : : "memory");
70 __asm__ __volatile__("mb" : : : "memory");
  /external/regex-re2/util/
atomicops.h 12 __asm__ __volatile__("xchgl (%0),%0" // The lock prefix is implicit for xchg.
21 __asm__ __volatile__("sfence" : : : "memory");
27 __asm__ __volatile__("eieio" : : : "memory");
33 __asm__ __volatile__("wmb" : : : "memory");
70 __asm__ __volatile__("mb" : : : "memory");
  /external/valgrind/main/VEX/switchback/
test_simple.c 5 __asm__ __volatile__ ("addi 17, 14, 5");
  /bionic/libc/kernel/arch-arm/asm/
locks.h 22 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
24 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
25 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
29 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
30 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); })
32 #define __up_op_read(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_read\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " moveq ip, %0\n" " bleq " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
35 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [%0]\n" " subs lr, lr, %1\n" " str lr, [%0]\n" " msr cpsr_c, ip\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
36 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [%1]\n" " subs lr, lr, %2\n" " str lr, [%1]\n" " msr cpsr_c, ip\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
37 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [%0]\n" " adds lr, lr, %1\n" " str lr, [%0]\n" " msr cpsr_c, ip\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
41 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [ (…)
    [all...]
  /development/ndk/platforms/android-3/arch-arm/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); })
32 #define __up_op_read(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_read\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " moveq ip, %0\n" " bleq " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
36 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [%0]\n" " subs lr, lr, %1\n" " str lr, [%0]\n" " msr cpsr_c, ip\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
38 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [%1]\n" " subs lr, lr, %2\n" " str lr, [%1]\n" " msr cpsr_c, ip\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
40 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [%0]\n" " adds lr, lr, %1\n" " str lr, [%0]\n" " msr cpsr_c, ip\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
45 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [ (…)
    [all...]
  /external/kernel-headers/original/asm-arm/
locks.h 19 __asm__ __volatile__( \
38 __asm__ __volatile__( \
60 __asm__ __volatile__( \
86 __asm__ __volatile__( \
105 __asm__ __volatile__( \
125 __asm__ __volatile__( \
144 __asm__ __volatile__( \
164 __asm__ __volatile__( \
187 __asm__ __volatile__( \
214 __asm__ __volatile__( \
    [all...]
  /prebuilts/ndk/4/platforms/android-3/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); })
32 #define __up_op_read(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_read\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " moveq ip, %0\n" " bleq " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
36 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [%0]\n" " subs lr, lr, %1\n" " str lr, [%0]\n" " msr cpsr_c, ip\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
38 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [%1]\n" " subs lr, lr, %2\n" " str lr, [%1]\n" " msr cpsr_c, ip\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
40 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [%0]\n" " adds lr, lr, %1\n" " str lr, [%0]\n" " msr cpsr_c, ip\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
45 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" " mrs ip, cpsr\n" " orr lr, ip, #128\n" " msr cpsr_c, lr\n" " ldr lr, [ (…)
    [all...]

Completed in 2551 milliseconds

1 2 3 4 5 6 7 8 91011>>