Home | History | Annotate | Download | only in bionic

Lines Matching refs:seq

44 //   seq records the state of the slot.
46 // pthread key in the slot, we increse the seq by 1 (which inverts bit 0). The reason to use
51 atomic_uintptr_t seq;
57 static inline bool SeqOfKeyInUse(uintptr_t seq) {
58 return seq & (1 << SEQ_KEY_IN_USE_BIT);
81 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed);
82 if (SeqOfKeyInUse(seq) && seq == key_data[i].seq && key_data[i].data != NULL) {
96 if (atomic_load_explicit(&key_map[i].seq, memory_order_relaxed) != seq) {
122 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed);
123 while (!SeqOfKeyInUse(seq)) {
124 if (atomic_compare_exchange_weak(&key_map[i].seq, &seq, seq + SEQ_INCREMENT_STEP)) {
144 // Increase seq to invalidate values in all threads.
145 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed);
146 if (SeqOfKeyInUse(seq)) {
147 if (atomic_compare_exchange_strong(&key_map[key].seq, &seq, seq + SEQ_INCREMENT_STEP)) {
160 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed);
164 if (__predict_true(SeqOfKeyInUse(seq) && data->seq == seq)) {
167 // We arrive here when current thread holds the seq of an deleted pthread key. So the
179 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed);
180 if (__predict_true(SeqOfKeyInUse(seq))) {
182 data->seq = seq;