Home | History | Annotate | Download | only in rtl

Lines Matching refs:thr

282 void Initialize(ThreadState *thr) {
326 int tid = ThreadCreate(thr, 0, 0, true);
328 ThreadStart(thr, tid, internal_getpid());
341 int Finalize(ThreadState *thr) {
345 if (flags()->atexit_sleep_ms > 0 && ThreadCount(thr) > 1)
359 ThreadFinalize(thr);
385 StatAggregate(ctx->stat, thr->stat);
391 void ForkBefore(ThreadState *thr, uptr pc) {
396 void ForkParentAfter(ThreadState *thr, uptr pc) {
401 void ForkChildAfter(ThreadState *thr, uptr pc) {
416 thr->ignore_interceptors++;
417 ThreadIgnoreBegin(thr, pc);
418 ThreadIgnoreSyncBegin(thr, pc);
425 void GrowShadowStack(ThreadState *thr) {
426 const int sz = thr->shadow_stack_end - thr->shadow_stack;
430 internal_memcpy(newstack, thr->shadow_stack, sz * sizeof(uptr));
431 internal_free(thr->shadow_stack);
432 thr->shadow_stack = newstack;
433 thr->shadow_stack_pos = newstack + sz;
434 thr->shadow_stack_end = newstack + newsz;
438 u32 CurrentStackId(ThreadState *thr, uptr pc) {
439 if (thr->shadow_stack_pos == 0) // May happen during bootstrap.
443 DCHECK_LT(thr->shadow_stack_pos, thr->shadow_stack_end);
445 if (thr->shadow_stack_pos == thr->shadow_stack_end)
446 GrowShadowStack(thr);
448 thr->shadow_stack_pos[0] = pc;
449 thr->shadow_stack_pos++;
451 u32 id = StackDepotPut(thr->shadow_stack,
452 thr->shadow_stack_pos - thr->shadow_stack);
454 thr->shadow_stack_pos--;
458 void TraceSwitch(ThreadState *thr) {
459 thr->nomalloc++;
460 Trace *thr_trace = ThreadTrace(thr->tid);
462 unsigned trace = (thr->fast_state.epoch() / kTracePartSize) % TraceParts();
464 hdr->epoch0 = thr->fast_state.epoch();
465 hdr->stack0.ObtainCurrent(thr, 0);
466 hdr->mset0 = thr->mset;
467 thr->nomalloc--;
474 uptr TraceTopPC(ThreadState *thr) {
475 Event *events = (Event*)GetThreadTrace(thr->tid);
476 uptr pc = events[thr->fast_state.GetTracePos()];
516 void HandleRace(ThreadState *thr, u64 *shadow_mem,
518 thr->racy_state[0] = cur.raw();
519 thr->racy_state[1] = old.raw();
520 thr->racy_shadow_addr = shadow_mem;
524 ReportRace(thr);
528 static inline bool HappensBefore(Shadow old, ThreadState *thr) {
529 return thr->clock.get(old.TidWithIgnore()) >= old.epoch();
533 void MemoryAccessImpl1(ThreadState *thr, uptr addr,
536 StatInc(thr, StatMop);
537 StatInc(thr, kAccessIsWrite ? StatMopWrite : StatMopRead);
538 StatInc(thr, (StatType)(StatMop1 + kAccessSizeLog));
599 StatInc(thr, StatShadowReplace);
602 HandleRace(thr, shadow_mem, cur, old);
606 void UnalignedMemoryAccess(ThreadState *thr, uptr pc, uptr addr,
621 MemoryAccess(thr, pc, addr, kAccessSizeLog, kAccessIsWrite, kIsAtomic);
712 void MemoryAccess(ThreadState *thr, uptr pc, uptr addr,
717 (int)thr->fast_state.tid(), (void*)pc, (void*)addr,
735 StatInc(thr, StatMop);
736 StatInc(thr, kAccessIsWrite ? StatMopWrite : StatMopRead);
737 StatInc(thr, (StatType)(StatMop1 + kAccessSizeLog));
738 StatInc(thr, StatMopRodata);
742 FastState fast_state = thr->fast_state;
744 StatInc(thr, StatMop);
745 StatInc(thr, kAccessIsWrite ? StatMopWrite : StatMopRead);
746 StatInc(thr, (StatType)(StatMop1 + kAccessSizeLog));
747 StatInc(thr, StatMopIgnored);
757 thr->fast_synch_epoch, kAccessIsWrite))) {
758 StatInc(thr, StatMop);
759 StatInc(thr, kAccessIsWrite ? StatMopWrite : StatMopRead);
760 StatInc(thr, (StatType)(StatMop1 + kAccessSizeLog));
761 StatInc(thr, StatMopSame);
767 thr->fast_state = fast_state;
768 TraceAddEvent(thr, fast_state, EventTypeMop, pc);
772 MemoryAccessImpl1(thr, addr, kAccessSizeLog, kAccessIsWrite, kIsAtomic,
778 void MemoryAccessImpl(ThreadState *thr, uptr addr,
782 thr->fast_synch_epoch, kAccessIsWrite))) {
783 StatInc(thr, StatMop);
784 StatInc(thr, kAccessIsWrite ? StatMopWrite : StatMopRead);
785 StatInc(thr, (StatType)(StatMop1 + kAccessSizeLog));
786 StatInc(thr, StatMopSame);
790 MemoryAccessImpl1(thr, addr, kAccessSizeLog, kAccessIsWrite, kIsAtomic,
794 static void MemoryRangeSet(ThreadState *thr, uptr pc, uptr addr, uptr size,
796 (void)thr;
855 void MemoryResetRange(ThreadState *thr, uptr pc, uptr addr, uptr size) {
856 MemoryRangeSet(thr, pc, addr, size, 0);
859 void MemoryRangeFreed(ThreadState *thr, uptr pc, uptr addr, uptr size) {
865 CHECK_EQ(thr->is_freeing, false);
866 thr->is_freeing = true;
867 MemoryAccessRange(thr, pc, addr, size, true);
868 thr->is_freeing = false;
870 thr->fast_state.IncrementEpoch();
871 TraceAddEvent(thr, thr->fast_state, EventTypeMop, pc);
873 Shadow s(thr->fast_state);
878 MemoryRangeSet(thr, pc, addr, size, s.raw());
881 void MemoryRangeImitateWrite(ThreadState *thr, uptr pc, uptr addr, uptr size) {
883 thr->fast_state.IncrementEpoch();
884 TraceAddEvent(thr, thr->fast_state, EventTypeMop, pc);
886 Shadow s(thr->fast_state);
890 MemoryRangeSet(thr, pc, addr, size, s.raw());
894 void FuncEntry(ThreadState *thr, uptr pc) {
895 StatInc(thr, StatFuncEnter);
896 DPrintf2("#%d: FuncEntry %p\n", (int)thr->fast_state.tid(), (void*)pc);
898 thr->fast_state.IncrementEpoch();
899 TraceAddEvent(thr, thr->fast_state, EventTypeFuncEnter, pc);
904 DCHECK_GE(thr->shadow_stack_pos, thr->shadow_stack);
906 DCHECK_LT(thr->shadow_stack_pos, thr->shadow_stack_end);
908 if (thr->shadow_stack_pos == thr->shadow_stack_end)
909 GrowShadowStack(thr);
911 thr->shadow_stack_pos[0] = pc;
912 thr->shadow_stack_pos++;
916 void FuncExit(ThreadState *thr) {
917 StatInc(thr, StatFuncExit);
918 DPrintf2("#%d: FuncExit\n", (int)thr->fast_state.tid());
920 thr->fast_state.IncrementEpoch();
921 TraceAddEvent(thr, thr->fast_state, EventTypeFuncExit, 0);
924 DCHECK_GT(thr->shadow_stack_pos, thr->shadow_stack);
926 DCHECK_LT(thr->shadow_stack_pos, thr->shadow_stack_end);
928 thr->shadow_stack_pos--;
931 void ThreadIgnoreBegin(ThreadState *thr, uptr pc) {
932 DPrintf("#%d: ThreadIgnoreBegin\n", thr->tid);
933 thr->ignore_reads_and_writes++;
934 CHECK_GT(thr->ignore_reads_and_writes, 0);
935 thr->fast_state.SetIgnoreBit();
938 thr->mop_ignore_set.Add(CurrentStackId(thr, pc));
942 void ThreadIgnoreEnd(ThreadState *thr, uptr pc) {
943 DPrintf("#%d: ThreadIgnoreEnd\n", thr->tid);
944 thr->ignore_reads_and_writes--;
945 CHECK_GE(thr->ignore_reads_and_writes, 0);
946 if (thr->ignore_reads_and_writes == 0) {
947 thr->fast_state.ClearIgnoreBit();
949 thr->mop_ignore_set.Reset();
954 void ThreadIgnoreSyncBegin(ThreadState *thr, uptr pc) {
955 DPrintf("#%d: ThreadIgnoreSyncBegin\n", thr->tid);
956 thr->ignore_sync++;
957 CHECK_GT(thr->ignore_sync, 0);
960 thr->sync_ignore_set.Add(CurrentStackId(thr, pc));
964 void ThreadIgnoreSyncEnd(ThreadState *thr, uptr pc) {
965 DPrintf("#%d: ThreadIgnoreSyncEnd\n", thr->tid);
966 thr->ignore_sync--;
967 CHECK_GE(thr->ignore_sync, 0);
969 if (thr->ignore_sync == 0)
970 thr->sync_ignore_set.Reset();