Home | History | Annotate | Download | only in builtins

Lines Matching full:lock

20  *  To avoid needing a per-object lock, this code allocates an array of
22 * For operations that must be atomic on two locations, the lower lock is
49 // Platform-specific lock implementation. Falls back to spinlocks if none is
50 // defined. Each platform should define the Lock type, and corresponding
51 // lock() and unlock() functions.
58 typedef struct _usem Lock;
59 inline static void unlock(Lock *l) {
65 inline static void lock(Lock *l) {
74 static Lock locks[SPINLOCK_COUNT] = { [0 ... SPINLOCK_COUNT-1] = {0,1,0} };
78 typedef OSSpinLock Lock;
79 inline static void unlock(Lock *l) {
82 /// Locks a lock. In the current implementation, this is potentially
84 inline static void lock(Lock *l) {
87 static Lock locks[SPINLOCK_COUNT]; // initialized to OS_SPINLOCK_INIT which is 0
90 typedef _Atomic(uintptr_t) Lock;
91 /// Unlock a lock. This is a release operation.
92 inline static void unlock(Lock *l) {
95 /// Locks a lock. In the current implementation, this is potentially
97 inline static void lock(Lock *l) {
104 static Lock locks[SPINLOCK_COUNT];
108 /// Returns a lock to use for a given pointer.
109 static inline Lock *lock_for_pointer(void *ptr) {
113 // lock.
125 /// Macros for determining whether a size is lock free. Clang can not yet
127 /// not lock free.
134 /// Macro that calls the compiler-generated lock-free versions of functions
168 Lock *l = lock_for_pointer(src);
169 lock(l);
182 Lock *l = lock_for_pointer(dest);
183 lock(l);
200 Lock *l = lock_for_pointer(ptr);
201 lock(l);
221 Lock *l = lock_for_pointer(ptr);
222 lock(l);
244 Lock *l = lock_for_pointer(src);\
245 lock(l);\
259 Lock *l = lock_for_pointer(dest);\
260 lock(l);\
272 Lock *l = lock_for_pointer(dest);\
273 lock(l);\
288 Lock *l = lock_for_pointer(ptr);\
289 lock(l);\
309 Lock *l = lock_for_pointer(ptr);\
310 lock(l);\