HomeSort by relevance Sort by last modified time
    Searched refs:ptr (Results 51 - 75 of 5300) sorted by null

1 23 4 5 6 7 8 91011>>

  /prebuilts/ndk/current/platforms/android-14/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-14/arch-x86/usr/include/asm/
cmpxchg_32.h 22 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
29 #define set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit_constant(ptr, value) : __set_64bit_var(ptr, value) )
30 #define _set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit(ptr, (unsigned int)(value), (unsigned int)((value)>>32ULL) ) : __set_64bit(ptr, ll_low(value), ll_high(value))
    [all...]
  /prebuilts/ndk/current/platforms/android-15/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-15/arch-x86/usr/include/asm/
cmpxchg_32.h 22 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
29 #define set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit_constant(ptr, value) : __set_64bit_var(ptr, value) )
30 #define _set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit(ptr, (unsigned int)(value), (unsigned int)((value)>>32ULL) ) : __set_64bit(ptr, ll_low(value), ll_high(value))
    [all...]
  /prebuilts/ndk/current/platforms/android-16/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-16/arch-x86/usr/include/asm/
cmpxchg_32.h 22 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
29 #define set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit_constant(ptr, value) : __set_64bit_var(ptr, value) )
30 #define _set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit(ptr, (unsigned int)(value), (unsigned int)((value)>>32ULL) ) : __set_64bit(ptr, ll_low(value), ll_high(value))
    [all...]
  /prebuilts/ndk/current/platforms/android-17/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-17/arch-x86/usr/include/asm/
cmpxchg_32.h 22 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
29 #define set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit_constant(ptr, value) : __set_64bit_var(ptr, value) )
30 #define _set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit(ptr, (unsigned int)(value), (unsigned int)((value)>>32ULL) ) : __set_64bit(ptr, ll_low(value), ll_high(value))
    [all...]
  /prebuilts/ndk/current/platforms/android-18/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-18/arch-x86/usr/include/asm/
cmpxchg_32.h 22 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
29 #define set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit_constant(ptr, value) : __set_64bit_var(ptr, value) )
30 #define _set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit(ptr, (unsigned int)(value), (unsigned int)((value)>>32ULL) ) : __set_64bit(ptr, ll_low(value), ll_high(value))
    [all...]
  /prebuilts/ndk/current/platforms/android-19/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-19/arch-x86/usr/include/asm/
cmpxchg_32.h 22 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
29 #define set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit_constant(ptr, value) : __set_64bit_var(ptr, value) )
30 #define _set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit(ptr, (unsigned int)(value), (unsigned int)((value)>>32ULL) ) : __set_64bit(ptr, ll_low(value), ll_high(value))
    [all...]
  /prebuilts/ndk/current/platforms/android-3/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-4/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-5/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-8/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-9/arch-arm/usr/include/asm/
locks.h 17 #define __down_op(ptr,fail) ({ __asm__ __volatile__( "@ down_op\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %0\n" " blmi " #fail : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); })
19 #define __down_op_ret(ptr,fail) ({ unsigned int ret; __asm__ __volatile__( "@ down_op_ret\n" "1: ldrex lr, [%1]\n" " sub lr, lr, %2\n" " strex ip, lr, [%1]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movmi ip, %1\n" " movpl ip, #0\n" " blmi " #fail "\n" " mov %0, ip" : "=&r" (ret) : "r" (ptr), "I" (1) : "ip", "lr", "cc"); smp_mb(); ret; })
21 #define __up_op(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op\n" "1: ldrex lr, [%0]\n" " add lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " cmp lr, #0\n" " movle ip, %0\n" " blle " #wake : : "r" (ptr), "I" (1) : "ip", "lr", "cc"); })
26 #define __down_op_write(ptr,fail) ({ __asm__ __volatile__( "@ down_op_write\n" "1: ldrex lr, [%0]\n" " sub lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " teq lr, #0\n" " movne ip, %0\n" " blne " #fail : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); smp_mb(); })
28 #define __up_op_write(ptr,wake) ({ smp_mb(); __asm__ __volatile__( "@ up_op_write\n" "1: ldrex lr, [%0]\n" " adds lr, lr, %1\n" " strex ip, lr, [%0]\n" " teq ip, #0\n" " bne 1b\n" " movcs ip, %0\n" " blcs " #wake : : "r" (ptr), "I" (RW_LOCK_BIAS) : "ip", "lr", "cc"); }
    [all...]
  /prebuilts/ndk/current/platforms/android-9/arch-x86/usr/include/asm/
cmpxchg_32.h 22 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
29 #define set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit_constant(ptr, value) : __set_64bit_var(ptr, value) )
30 #define _set_64bit(ptr,value) (__builtin_constant_p(value) ? __set_64bit(ptr, (unsigned int)(value), (unsigned int)((value)>>32ULL) ) : __set_64bit(ptr, ll_low(value), ll_high(value))
    [all...]
  /external/protobuf/src/google/protobuf/stubs/
atomicops_internals_x86_msvc.h 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
42 return Barrier_AtomicIncrement(ptr, increment);
49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
52 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
58 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
62 *ptr = value;
65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
66 NoBarrier_AtomicExchange(ptr, value)
    [all...]
  /prebuilts/misc/darwin-x86_64/protobuf2.5/include/google/protobuf/stubs/
atomicops_internals_x86_msvc.h 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
42 return Barrier_AtomicIncrement(ptr, increment);
49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
52 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
58 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
62 *ptr = value;
65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
66 NoBarrier_AtomicExchange(ptr, value)
    [all...]
  /prebuilts/misc/linux-x86_64/protobuf2.5/include/google/protobuf/stubs/
atomicops_internals_x86_msvc.h 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
42 return Barrier_AtomicIncrement(ptr, increment);
49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
52 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
58 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
62 *ptr = value;
65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
66 NoBarrier_AtomicExchange(ptr, value)
    [all...]
  /prebuilts/misc/windows/protobuf2.5/include/google/protobuf/stubs/
atomicops_internals_x86_msvc.h 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
42 return Barrier_AtomicIncrement(ptr, increment);
49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
52 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
58 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
62 *ptr = value;
65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
66 NoBarrier_AtomicExchange(ptr, value)
    [all...]
  /prebuilts/tools/darwin-x86_64/protoc/include/google/protobuf/stubs/
atomicops_internals_x86_msvc.h 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
42 return Barrier_AtomicIncrement(ptr, increment);
49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
52 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
58 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
62 *ptr = value;
65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
66 NoBarrier_AtomicExchange(ptr, value)
    [all...]
  /prebuilts/tools/linux-x86_64/protoc/include/google/protobuf/stubs/
atomicops_internals_x86_msvc.h 40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
42 return Barrier_AtomicIncrement(ptr, increment);
49 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
52 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
55 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
58 return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
61 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
62 *ptr = value;
65 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
66 NoBarrier_AtomicExchange(ptr, value)
    [all...]
  /external/clang/test/CXX/expr/expr.prim/expr.prim.lambda/
p20.cpp 5 void destroy(T* ptr) {
6 ptr->~T();
7 (*ptr).~T();

Completed in 1738 milliseconds

1 23 4 5 6 7 8 91011>>