Home | History | Annotate | Download | only in rtl

Lines Matching full:threadstate

223 static T AtomicLoad(ThreadState *thr, uptr pc, const volatile T *a,
253 static void AtomicStore(ThreadState *thr, uptr pc, volatile T *a, T v,
276 static T AtomicRMW(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) {
333 static T AtomicExchange(ThreadState *thr, uptr pc, volatile T *a, T v,
339 static T AtomicFetchAdd(ThreadState *thr, uptr pc, volatile T *a, T v,
345 static T AtomicFetchSub(ThreadState *thr, uptr pc, volatile T *a, T v,
351 static T AtomicFetchAnd(ThreadState *thr, uptr pc, volatile T *a, T v,
357 static T AtomicFetchOr(ThreadState *thr, uptr pc, volatile T *a, T v,
363 static T AtomicFetchXor(ThreadState *thr, uptr pc, volatile T *a, T v,
369 static T AtomicFetchNand(ThreadState *thr, uptr pc, volatile T *a, T v,
398 static bool AtomicCAS(ThreadState *thr, uptr pc,
431 static T AtomicCAS(ThreadState *thr, uptr pc,
442 static void AtomicFence(ThreadState *thr, uptr pc, morder mo) {
457 ThreadState *const thr = cur_thread(); \
467 ScopedAtomic(ThreadState *thr, uptr pc, const volatile void *a,
478 ThreadState *thr_;
481 static void AtomicStatInc(ThreadState *thr, uptr size, morder mo, StatType t) {
874 void __tsan_go_atomic32_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
879 void __tsan_go_atomic64_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
884 void __tsan_go_atomic32_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
889 void __tsan_go_atomic64_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
894 void __tsan_go_atomic32_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
899 void __tsan_go_atomic64_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
904 void __tsan_go_atomic32_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
909 void __tsan_go_atomic64_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
915 ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
924 ThreadState *thr, uptr cpc, uptr pc, u8 *a) {