/prebuilts/ndk/9/platforms/android-12/arch-x86/usr/include/asm/ |
tlbflush_32.h | 27 #define __native_flush_tlb() do { unsigned int tmpreg; __asm__ __volatile__( "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" : "=r" (tmpreg) :: "memory"); } while (0) 29 #define __native_flush_tlb_global() do { unsigned int tmpreg, cr4, cr4_orig; __asm__ __volatile__( "movl %%cr4, %2; # turn off PGE \n" "movl %2, %1; \n" "andl %3, %1; \n" "movl %1, %%cr4; \n" "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" "movl %2, %%cr4; # turn PGE back on \n" : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) : "i" (~X86_CR4_PGE) : "memory"); } while (0) 30 #define __native_flush_tlb_single(addr) __asm__ __volatile__("invlpg (%0)" ::"r" (addr) : "memory")
|
/prebuilts/ndk/9/platforms/android-13/arch-x86/usr/include/asm/ |
tlbflush_32.h | 27 #define __native_flush_tlb() do { unsigned int tmpreg; __asm__ __volatile__( "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" : "=r" (tmpreg) :: "memory"); } while (0) 29 #define __native_flush_tlb_global() do { unsigned int tmpreg, cr4, cr4_orig; __asm__ __volatile__( "movl %%cr4, %2; # turn off PGE \n" "movl %2, %1; \n" "andl %3, %1; \n" "movl %1, %%cr4; \n" "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" "movl %2, %%cr4; # turn PGE back on \n" : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) : "i" (~X86_CR4_PGE) : "memory"); } while (0) 30 #define __native_flush_tlb_single(addr) __asm__ __volatile__("invlpg (%0)" ::"r" (addr) : "memory")
|
/prebuilts/ndk/9/platforms/android-14/arch-x86/usr/include/asm/ |
tlbflush_32.h | 27 #define __native_flush_tlb() do { unsigned int tmpreg; __asm__ __volatile__( "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" : "=r" (tmpreg) :: "memory"); } while (0) 29 #define __native_flush_tlb_global() do { unsigned int tmpreg, cr4, cr4_orig; __asm__ __volatile__( "movl %%cr4, %2; # turn off PGE \n" "movl %2, %1; \n" "andl %3, %1; \n" "movl %1, %%cr4; \n" "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" "movl %2, %%cr4; # turn PGE back on \n" : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) : "i" (~X86_CR4_PGE) : "memory"); } while (0) 30 #define __native_flush_tlb_single(addr) __asm__ __volatile__("invlpg (%0)" ::"r" (addr) : "memory")
|
/prebuilts/ndk/9/platforms/android-15/arch-x86/usr/include/asm/ |
tlbflush_32.h | 27 #define __native_flush_tlb() do { unsigned int tmpreg; __asm__ __volatile__( "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" : "=r" (tmpreg) :: "memory"); } while (0) 29 #define __native_flush_tlb_global() do { unsigned int tmpreg, cr4, cr4_orig; __asm__ __volatile__( "movl %%cr4, %2; # turn off PGE \n" "movl %2, %1; \n" "andl %3, %1; \n" "movl %1, %%cr4; \n" "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" "movl %2, %%cr4; # turn PGE back on \n" : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) : "i" (~X86_CR4_PGE) : "memory"); } while (0) 30 #define __native_flush_tlb_single(addr) __asm__ __volatile__("invlpg (%0)" ::"r" (addr) : "memory")
|
/prebuilts/ndk/9/platforms/android-16/arch-x86/usr/include/asm/ |
tlbflush_32.h | 27 #define __native_flush_tlb() do { unsigned int tmpreg; __asm__ __volatile__( "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" : "=r" (tmpreg) :: "memory"); } while (0) 29 #define __native_flush_tlb_global() do { unsigned int tmpreg, cr4, cr4_orig; __asm__ __volatile__( "movl %%cr4, %2; # turn off PGE \n" "movl %2, %1; \n" "andl %3, %1; \n" "movl %1, %%cr4; \n" "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" "movl %2, %%cr4; # turn PGE back on \n" : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) : "i" (~X86_CR4_PGE) : "memory"); } while (0) 30 #define __native_flush_tlb_single(addr) __asm__ __volatile__("invlpg (%0)" ::"r" (addr) : "memory")
|
/prebuilts/ndk/9/platforms/android-17/arch-x86/usr/include/asm/ |
tlbflush_32.h | 27 #define __native_flush_tlb() do { unsigned int tmpreg; __asm__ __volatile__( "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" : "=r" (tmpreg) :: "memory"); } while (0) 29 #define __native_flush_tlb_global() do { unsigned int tmpreg, cr4, cr4_orig; __asm__ __volatile__( "movl %%cr4, %2; # turn off PGE \n" "movl %2, %1; \n" "andl %3, %1; \n" "movl %1, %%cr4; \n" "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" "movl %2, %%cr4; # turn PGE back on \n" : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) : "i" (~X86_CR4_PGE) : "memory"); } while (0) 30 #define __native_flush_tlb_single(addr) __asm__ __volatile__("invlpg (%0)" ::"r" (addr) : "memory")
|
/prebuilts/ndk/9/platforms/android-18/arch-x86/usr/include/asm/ |
tlbflush_32.h | 27 #define __native_flush_tlb() do { unsigned int tmpreg; __asm__ __volatile__( "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" : "=r" (tmpreg) :: "memory"); } while (0) 29 #define __native_flush_tlb_global() do { unsigned int tmpreg, cr4, cr4_orig; __asm__ __volatile__( "movl %%cr4, %2; # turn off PGE \n" "movl %2, %1; \n" "andl %3, %1; \n" "movl %1, %%cr4; \n" "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" "movl %2, %%cr4; # turn PGE back on \n" : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) : "i" (~X86_CR4_PGE) : "memory"); } while (0) 30 #define __native_flush_tlb_single(addr) __asm__ __volatile__("invlpg (%0)" ::"r" (addr) : "memory")
|
/prebuilts/ndk/9/platforms/android-19/arch-x86/usr/include/asm/ |
tlbflush_32.h | 27 #define __native_flush_tlb() do { unsigned int tmpreg; __asm__ __volatile__( "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" : "=r" (tmpreg) :: "memory"); } while (0) 29 #define __native_flush_tlb_global() do { unsigned int tmpreg, cr4, cr4_orig; __asm__ __volatile__( "movl %%cr4, %2; # turn off PGE \n" "movl %2, %1; \n" "andl %3, %1; \n" "movl %1, %%cr4; \n" "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" "movl %2, %%cr4; # turn PGE back on \n" : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) : "i" (~X86_CR4_PGE) : "memory"); } while (0) 30 #define __native_flush_tlb_single(addr) __asm__ __volatile__("invlpg (%0)" ::"r" (addr) : "memory")
|
/prebuilts/ndk/9/platforms/android-9/arch-x86/usr/include/asm/ |
tlbflush_32.h | 27 #define __native_flush_tlb() do { unsigned int tmpreg; __asm__ __volatile__( "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" : "=r" (tmpreg) :: "memory"); } while (0) 29 #define __native_flush_tlb_global() do { unsigned int tmpreg, cr4, cr4_orig; __asm__ __volatile__( "movl %%cr4, %2; # turn off PGE \n" "movl %2, %1; \n" "andl %3, %1; \n" "movl %1, %%cr4; \n" "movl %%cr3, %0; \n" "movl %0, %%cr3; # flush TLB \n" "movl %2, %%cr4; # turn PGE back on \n" : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) : "i" (~X86_CR4_PGE) : "memory"); } while (0) 30 #define __native_flush_tlb_single(addr) __asm__ __volatile__("invlpg (%0)" ::"r" (addr) : "memory")
|
/external/valgrind/helgrind/tests/ |
tc07_hbl1.c | 48 __asm__ __volatile__ ( \ 52 __asm__ __volatile__( \ 63 __asm__ __volatile__( \ 75 __asm__ __volatile__( \ 87 __asm__ __volatile__( \ 97 __asm__ __volatile__ ( \
|
/external/valgrind/memcheck/tests/amd64/ |
bt_everything.c | 21 __asm__ 35 __asm__ 46 __asm__ 57 __asm__ 72 __asm__ 86 __asm__ 97 __asm__ 108 __asm__ 124 __asm__ 138 __asm__ [all...] |
/external/valgrind/none/tests/mips64/ |
macro_int.h | 4 __asm__ __volatile__( \ 22 __asm__ __volatile__( \ 38 __asm__ __volatile__( \ 55 __asm__ __volatile__( \ 75 __asm__ __volatile__( \ 94 __asm__ __volatile__( \
|
unaligned_load_store.c | 18 __asm__ volatile("move $a0, %0" "\n\t" 47 __asm__ volatile("move $a0, %0" "\n\t"
|
/hardware/intel/common/libmix/mix_vbp/viddec_fw/fw/parser/include/ |
fw_pvt.h | 14 #define TRAPS_ENABLE __asm__ volatile ("mov %%psr, %%l0; or %%l0, 0x20, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0") 15 #define TRAPS_DISABLE __asm__ volatile ("mov %%psr, %%l0; and %%l0, ~0x20, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0") 17 #define TRAPS_INT_ENABLE __asm__ volatile ("mov %%psr, %%l0; and %%l0, ~0xF00, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0") 18 #define TRAPS_INT_DISABLE __asm__ volatile ("mov %%psr, %%l0; or %%l0, 0xF00, %%l0; mov %%l0, %%psr; nop; nop; nop;":::"l0") 20 #define TRAPS_ENABLED(enabled) __asm__ volatile ("mov %%psr, %0; and %0, 0x20, %0": "=r" (enabled):) 22 #define TRAPS_INT_DISABLED(enabled) __asm__ volatile ("mov %%psr, %0; and %0, 0xF00, %0": "=r" (enabled):)
|
/art/runtime/arch/x86_64/ |
thread_x86_64.cc | 52 __asm__ __volatile__("movq %%gs:(%1), %0" 62 __asm__ __volatile__("movq %%gs:(%1), %0"
|
/development/ndk/platforms/android-21/arch-arm/include/machine/ |
endian.h | 58 __asm__ __volatile__("rev16 %0, %0" : "+l" (_x)); \ 64 __asm__ __volatile__("rev %0, %0" : "+l" (_x)); \
|
/external/bison/lib/ |
fpucw.h | 75 __asm__ __volatile__ ("fnstcw %0" : "=m" (*&_cw)); \ 80 __asm__ __volatile__ ("fldcw %0" : : "m" (*&_ncw)); \
|
/external/libopus/silk/arm/ |
SigProc_FIX_armv5e.h | 36 __asm__( 51 __asm__(
|
/external/speex/libspeex/ |
cb_search_bfin.h | 42 __asm__ __volatile__ ( 92 __asm__ __volatile__
|
filters_arm4.h | 42 __asm__ __volatile__ ( 72 __asm__ __volatile__ (
|
/external/valgrind/none/tests/arm/ |
ldrt.c | 14 __asm__ __volatile__( 23 __asm__ __volatile__( 31 __asm__ __volatile__( 40 __asm__ __volatile__( 49 __asm__ __volatile__( 57 __asm__ __volatile__( 66 __asm__ __volatile__( 76 __asm__ __volatile__(
|
/external/valgrind/none/tests/mips32/ |
unaligned_load_store.c | 18 __asm__ volatile("move $a0, %0" "\n\t" 47 __asm__ volatile("move $a0, %0" "\n\t"
|
/external/valgrind/none/tests/s390x/ |
srnm.c | 8 __asm__ volatile ( "lghi 8," #b "\n\t" \ 17 __asm__ volatile ("stfpc %0\n\t" : "=m"(fpc));
|
/external/valgrind/none/tests/x86/ |
jcxz.c | 11 __asm__ __volatile__( 32 __asm__ __volatile__(
|
/external/webrtc/src/system_wrappers/source/ |
cpu_features.cc | 34 __asm__ volatile ( 43 __asm__ volatile (
|