Home | History | Annotate | Download | only in bits

Lines Matching refs:__m

52 #define _ATOMIC_STORE_(__a, __m, __x)					   \
54 __typeof__(__m) __v = (__m); \
61 #define _ATOMIC_MODIFY_(__a, __o, __m, __x) \
63 __typeof__(__m) __v = (__m); \
71 #define _ATOMIC_CMPEXCHNG_(__a, __e, __m, __x) \
74 __typeof__(__m) __v = (__m); \
96 test_and_set(memory_order __m = memory_order_seq_cst) volatile;
99 clear(memory_order __m = memory_order_seq_cst) volatile;
121 store(void* __v, memory_order __m = memory_order_seq_cst) volatile
123 __glibcxx_assert(__m != memory_order_acquire);
124 __glibcxx_assert(__m != memory_order_acq_rel);
125 __glibcxx_assert(__m != memory_order_consume);
126 _ATOMIC_STORE_(this, __v, __m);
130 load(memory_order __m = memory_order_seq_cst) const volatile
132 __glibcxx_assert(__m != memory_order_release);
133 __glibcxx_assert(__m != memory_order_acq_rel);
134 return _ATOMIC_LOAD_(this, __m);
138 exchange(void* __v, memory_order __m = memory_order_seq_cst) volatile
139 { return _ATOMIC_MODIFY_(this, =, __v, __m); }
153 memory_order __m = memory_order_seq_cst) volatile
155 return compare_exchange_weak(__v1, __v2, __m,
156 __calculate_memory_order(__m));
171 memory_order __m = memory_order_seq_cst) volatile
173 return compare_exchange_strong(__v1, __v2, __m,
174 __calculate_memory_order(__m));
178 fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile
182 __atomic_flag_wait_explicit(__g, __m);
185 atomic_flag_clear_explicit(__g, __m);
190 fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile
194 __atomic_flag_wait_explicit(__g, __m);
197 atomic_flag_clear_explicit(__g, __m);
312 memory_order __m = memory_order_seq_cst) volatile
314 __glibcxx_assert(__m != memory_order_acquire);
315 __glibcxx_assert(__m != memory_order_acq_rel);
316 __glibcxx_assert(__m != memory_order_consume);
317 _ATOMIC_STORE_(this, __i, __m);
321 load(memory_order __m = memory_order_seq_cst) const volatile
323 __glibcxx_assert(__m != memory_order_release);
324 __glibcxx_assert(__m != memory_order_acq_rel);
325 return _ATOMIC_LOAD_(this, __m);
330 memory_order __m = memory_order_seq_cst) volatile
331 { return _ATOMIC_MODIFY_(this, =, __i, __m); }
345 memory_order __m = memory_order_seq_cst) volatile
347 return compare_exchange_weak(__i1, __i2, __m,
348 __calculate_memory_order(__m));
363 memory_order __m = memory_order_seq_cst) volatile
365 return compare_exchange_strong(__i1, __i2, __m,
366 __calculate_memory_order(__m));
371 memory_order __m = memory_order_seq_cst) volatile
372 { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
376 memory_order __m = memory_order_seq_cst) volatile
377 { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
381 memory_order __m = memory_order_seq_cst) volatile
382 { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
386 memory_order __m = memory_order_seq_cst) volatile
387 { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
391 memory_order __m = memory_order_seq_cst) volatile
392 { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
423 store(bool __i, memory_order __m = memory_order_seq_cst) volatile
424 { _M_base.store(__i, __m); }
427 load(memory_order __m = memory_order_seq_cst) const volatile
428 { return _M_base.load(__m); }
431 exchange(bool __i, memory_order __m = memory_order_seq_cst) volatile
432 { return _M_base.exchange(__i, __m); }
441 memory_order __m = memory_order_seq_cst) volatile
442 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
452 memory_order __m = memory_order_seq_cst) volatile
453 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }