Home | History | Annotate | Download | only in aarch64
      1 /*
      2  * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
      3  *
      4  * SPDX-License-Identifier: BSD-3-Clause
      5  */
      6 
      7 #include <asm_macros.S>
      8 
      9 	.globl	spin_lock
     10 	.globl	spin_unlock
     11 
     12 #if ARM_ARCH_AT_LEAST(8, 1)
     13 
     14 /*
     15  * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
     16  * Swap instruction.
     17  */
     18 # define USE_CAS	1
     19 
     20 /*
     21  * Lock contenders using CAS, upon failing to acquire the lock, wait with the
     22  * monitor in open state. Therefore, a normal store upon unlocking won't
     23  * generate an SEV. Use explicit SEV instruction with CAS unlock.
     24  */
     25 # define COND_SEV()	sev
     26 
     27 #else
     28 
     29 # define USE_CAS	0
     30 
     31 /*
     32  * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
     33  * with the monitor in exclusive state. A normal store upon unlocking will
     34  * implicitly generate an envent; so, no explicit SEV with unlock is required.
     35  */
     36 # define COND_SEV()
     37 
     38 #endif
     39 
     40 #if USE_CAS
     41 
     42 	.arch	armv8.1-a
     43 
     44 /*
     45  * Acquire lock using Compare and Swap instruction.
     46  *
     47  * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
     48  * 0.
     49  *
     50  * void spin_lock(spinlock_t *lock);
     51  */
     52 func spin_lock
     53 	mov	w2, #1
     54 	sevl
     55 1:
     56 	wfe
     57 	mov	w1, wzr
     58 	casa	w1, w2, [x0]
     59 	cbnz	w1, 1b
     60 	ret
     61 endfunc spin_lock
     62 
     63 	.arch	armv8-a
     64 
     65 #else /* !USE_CAS */
     66 
     67 /*
     68  * Acquire lock using load-/store-exclusive instruction pair.
     69  *
     70  * void spin_lock(spinlock_t *lock);
     71  */
     72 func spin_lock
     73 	mov	w2, #1
     74 	sevl
     75 l1:	wfe
     76 l2:	ldaxr	w1, [x0]
     77 	cbnz	w1, l1
     78 	stxr	w1, w2, [x0]
     79 	cbnz	w1, l2
     80 	ret
     81 endfunc spin_lock
     82 
     83 #endif /* USE_CAS */
     84 
     85 /*
     86  * Release lock previously acquired by spin_lock.
     87  *
     88  * Unconditionally write 0, and conditionally generate an event.
     89  *
     90  * void spin_unlock(spinlock_t *lock);
     91  */
     92 func spin_unlock
     93 	stlr	wzr, [x0]
     94 	COND_SEV()
     95 	ret
     96 endfunc spin_unlock
     97