/external/linux-tools-perf/ |
perf.h | 16 #define rmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory") macro 22 #define rmb() asm volatile("lfence" ::: "memory") macro 28 #define rmb() asm volatile ("sync" ::: "memory") macro 34 #define rmb() asm volatile("bcr 15,0" ::: "memory") macro 41 # define rmb() asm volatile("synco" ::: "memory") macro 43 # define rmb() asm volatile("" ::: "memory") macro 50 #define rmb() asm volatile("" ::: "memory") macro 56 #define rmb() asm volatile("":::"memory") macro 62 #define rmb() asm volatile("mb" ::: "memory") macro 68 #define rmb() asm volatile ("mf" ::: "memory" macro 84 #define rmb macro 90 #define rmb macro [all...] |
/bionic/libc/kernel/arch-x86/asm/ |
system_32.h | 29 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/development/ndk/platforms/android-9/arch-x86/include/asm/ |
system_32.h | 29 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/prebuilts/ndk/android-ndk-r4/platforms/android-5/arch-x86/usr/include/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/prebuilts/ndk/android-ndk-r4/platforms/android-8/arch-x86/usr/include/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/prebuilts/ndk/android-ndk-r6/platforms/android-9/arch-x86/usr/include/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/prebuilts/ndk/android-ndk-r7/platforms/android-14/arch-x86/usr/include/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/prebuilts/ndk/android-ndk-r7/platforms/android-9/arch-x86/usr/include/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/external/kernel-headers/original/asm-mips/ |
barrier.h | 20 * rmb() on most CPUs, and is never heavier weight than is 21 * rmb(). 59 * as Alpha, "y" could be set to 3 and "x" to 0. Use rmb() 120 #define rmb() fast_rmb() macro 127 #define rmb() fast_rmb() macro
|
/bionic/libc/kernel/arch-mips/asm/ |
barrier.h | 32 #define rmb() fast_rmb() macro
|
/development/ndk/platforms/android-9/arch-mips/include/asm/ |
barrier.h | 32 #define rmb() fast_rmb() macro
|
/prebuilts/gcc/darwin-x86/mips/mipsel-linux-android-4.4.3/sysroot/usr/include/asm/ |
barrier.h | 32 #define rmb() fast_rmb() macro
|
/prebuilts/gcc/linux-x86/mips/mipsel-linux-android-4.4.3/sysroot/usr/include/asm/ |
barrier.h | 32 #define rmb() fast_rmb() macro
|
/prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.4.3/sysroot/usr/include/alsa/ |
iatomic.h | 211 #define rmb() mb() macro 215 #define rmb() asm volatile("lfence":::"memory") macro 357 * rmb(): Like wmb(), but for reads. 358 * mb(): wmb()/rmb() combo, i.e., all previous memory 370 #define rmb() mb() macro 487 #define rmb() \ macro 670 * rmb() prevents loads being reordered across this point. 675 * stronger but slower sync instruction for mb and rmb. 678 #define rmb() __asm__ __volatile__ ("sync" : : : "memory") macro 886 #define rmb() mb( macro 1006 #define rmb macro 1031 #define rmb macro [all...] |
/prebuilts/gcc/linux-x86/host/i686-linux-glibc2.7-4.6/sysroot/usr/include/alsa/ |
iatomic.h | 211 #define rmb() mb() macro 215 #define rmb() asm volatile("lfence":::"memory") macro 357 * rmb(): Like wmb(), but for reads. 358 * mb(): wmb()/rmb() combo, i.e., all previous memory 370 #define rmb() mb() macro 487 #define rmb() \ macro 670 * rmb() prevents loads being reordered across this point. 675 * stronger but slower sync instruction for mb and rmb. 678 #define rmb() __asm__ __volatile__ ("sync" : : : "memory") macro 886 #define rmb() mb( macro 1006 #define rmb macro 1031 #define rmb macro [all...] |
/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.7-4.6/sysroot/usr/include/alsa/ |
iatomic.h | 211 #define rmb() mb() macro 215 #define rmb() asm volatile("lfence":::"memory") macro 357 * rmb(): Like wmb(), but for reads. 358 * mb(): wmb()/rmb() combo, i.e., all previous memory 370 #define rmb() mb() macro 487 #define rmb() \ macro 670 * rmb() prevents loads being reordered across this point. 675 * stronger but slower sync instruction for mb and rmb. 678 #define rmb() __asm__ __volatile__ ("sync" : : : "memory") macro 886 #define rmb() mb( macro 1006 #define rmb macro 1031 #define rmb macro [all...] |
/external/kernel-headers/original/asm-x86/ |
system_32.h | 223 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro 235 * rmb() on most CPUs, and is never heavier weight than is 236 * rmb(). 274 * as Alpha, "y" could be set to 3 and "x" to 0. Use rmb() 283 # define smp_rmb() rmb()
|
/external/kernel-headers/original/asm-arm/ |
system.h | 175 #define rmb() mb() macro 338 #define smp_rmb() rmb()
|