Home | History | Annotate | Download | only in asm
      1 /****************************************************************************
      2  ****************************************************************************
      3  ***
      4  ***   This header was automatically generated from a Linux kernel header
      5  ***   of the same name, to make information necessary for userspace to
      6  ***   call into the kernel available to libc.  It contains only constants,
      7  ***   structures, and macros generated from the original header, and thus,
      8  ***   contains no copyrightable information.
      9  ***
     10  ***   To edit the content of this header, modify the corresponding
     11  ***   source file (e.g. under external/kernel-headers/original/) then
     12  ***   run bionic/libc/kernel/tools/update_all.py
     13  ***
     14  ***   Any manual change here will be lost the next time this script will
     15  ***   be run. You've been warned!
     16  ***
     17  ****************************************************************************
     18  ****************************************************************************/
     19 #ifndef __ASM_PROC_LOCKS_H
     20 #define __ASM_PROC_LOCKS_H
     21 #if __LINUX_ARM_ARCH__ >= 6
     22 #define __down_op(ptr,fail)   ({   __asm__ __volatile__(   "@ down_op\n"  "1:	ldrex	lr, [%0]\n"  "	sub	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	teq	lr, #0\n"  "	movmi	ip, %0\n"  "	blmi	" #fail   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   smp_mb();   })
     23 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
     24 #define __down_op_ret(ptr,fail)   ({   unsigned int ret;   __asm__ __volatile__(   "@ down_op_ret\n"  "1:	ldrex	lr, [%1]\n"  "	sub	lr, lr, %2\n"  "	strex	ip, lr, [%1]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	teq	lr, #0\n"  "	movmi	ip, %1\n"  "	movpl	ip, #0\n"  "	blmi	" #fail "\n"  "	mov	%0, ip"   : "=&r" (ret)   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   smp_mb();   ret;   })
     25 #define __up_op(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op\n"  "1:	ldrex	lr, [%0]\n"  "	add	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	cmp	lr, #0\n"  "	movle	ip, %0\n"  "	blle	" #wake   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   })
     26 #define RW_LOCK_BIAS 0x01000000
     27 #define RW_LOCK_BIAS_STR "0x01000000"
     28 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
     29 #define __down_op_write(ptr,fail)   ({   __asm__ __volatile__(   "@ down_op_write\n"  "1:	ldrex	lr, [%0]\n"  "	sub	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	teq	lr, #0\n"  "	movne	ip, %0\n"  "	blne	" #fail   :   : "r" (ptr), "I" (RW_LOCK_BIAS)   : "ip", "lr", "cc");   smp_mb();   })
     30 #define __up_op_write(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op_write\n"  "1:	ldrex	lr, [%0]\n"  "	adds	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	movcs	ip, %0\n"  "	blcs	" #wake   :   : "r" (ptr), "I" (RW_LOCK_BIAS)   : "ip", "lr", "cc");   })
     31 #define __down_op_read(ptr,fail)   __down_op(ptr, fail)
     32 #define __up_op_read(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op_read\n"  "1:	ldrex	lr, [%0]\n"  "	add	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	teq	lr, #0\n"  "	moveq	ip, %0\n"  "	bleq	" #wake   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   })
     33 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
     34 #else
     35 #define __down_op(ptr,fail)   ({   __asm__ __volatile__(   "@ down_op\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	subs	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	movmi	ip, %0\n"  "	blmi	" #fail   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   smp_mb();   })
     36 #define __down_op_ret(ptr,fail)   ({   unsigned int ret;   __asm__ __volatile__(   "@ down_op_ret\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%1]\n"  "	subs	lr, lr, %2\n"  "	str	lr, [%1]\n"  "	msr	cpsr_c, ip\n"  "	movmi	ip, %1\n"  "	movpl	ip, #0\n"  "	blmi	" #fail "\n"  "	mov	%0, ip"   : "=&r" (ret)   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   smp_mb();   ret;   })
     37 #define __up_op(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	adds	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	movle	ip, %0\n"  "	blle	" #wake   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   })
     38 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
     39 #define RW_LOCK_BIAS 0x01000000
     40 #define RW_LOCK_BIAS_STR "0x01000000"
     41 #define __down_op_write(ptr,fail)   ({   __asm__ __volatile__(   "@ down_op_write\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	subs	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	movne	ip, %0\n"  "	blne	" #fail   :   : "r" (ptr), "I" (RW_LOCK_BIAS)   : "ip", "lr", "cc");   smp_mb();   })
     42 #define __up_op_write(ptr,wake)   ({   __asm__ __volatile__(   "@ up_op_write\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	adds	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	movcs	ip, %0\n"  "	blcs	" #wake   :   : "r" (ptr), "I" (RW_LOCK_BIAS)   : "ip", "lr", "cc");   smp_mb();   })
     43 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
     44 #define __down_op_read(ptr,fail)   __down_op(ptr, fail)
     45 #define __up_op_read(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op_read\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	adds	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	moveq	ip, %0\n"  "	bleq	" #wake   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   })
     46 #endif
     47 #endif
     48 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
     49