/bionic/libc/kernel/arch-x86/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/development/ndk/platforms/android-9/arch-x86/include/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/prebuilt/ndk/android-ndk-r4/platforms/android-5/arch-x86/usr/include/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/prebuilt/ndk/android-ndk-r4/platforms/android-8/arch-x86/usr/include/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/prebuilt/ndk/android-ndk-r6/platforms/android-9/arch-x86/usr/include/asm/ |
system_32.h | 22 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro
|
/prebuilt/linux-x86/toolchain/i686-linux-glibc2.7-4.4.3/sysroot/usr/include/alsa/ |
iatomic.h | 211 #define rmb() mb() macro 215 #define rmb() asm volatile("lfence":::"memory") macro 357 * rmb(): Like wmb(), but for reads. 358 * mb(): wmb()/rmb() combo, i.e., all previous memory 370 #define rmb() mb() macro 487 #define rmb() \ macro 670 * rmb() prevents loads being reordered across this point. 675 * stronger but slower sync instruction for mb and rmb. 678 #define rmb() __asm__ __volatile__ ("sync" : : : "memory") macro 886 #define rmb() mb( macro 1006 #define rmb macro 1031 #define rmb macro [all...] |
/external/kernel-headers/original/asm-x86/ |
system_32.h | 223 #define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2) macro 235 * rmb() on most CPUs, and is never heavier weight than is 236 * rmb(). 274 * as Alpha, "y" could be set to 3 and "x" to 0. Use rmb() 283 # define smp_rmb() rmb()
|
/bionic/libc/kernel/arch-sh/asm/ |
system.h | 26 #define rmb() mb() macro
|
/external/kernel-headers/original/asm-arm/ |
system.h | 175 #define rmb() mb() macro 338 #define smp_rmb() rmb()
|