Home | History | Annotate | Download | only in rtl
      1 //===-- tsan_update_shadow_word_inl.h ---------------------------*- C++ -*-===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This file is a part of ThreadSanitizer (TSan), a race detector.
     11 //
     12 // Body of the hottest inner loop.
     13 // If we wrap this body into a function, compilers (both gcc and clang)
     14 // produce sligtly less efficient code.
     15 //===----------------------------------------------------------------------===//
     16 do {
     17   StatInc(thr, StatShadowProcessed);
     18   const unsigned kAccessSize = 1 << kAccessSizeLog;
     19   unsigned off = cur.ComputeSearchOffset();
     20   u64 *sp = &shadow_mem[(idx + off) % kShadowCnt];
     21   old = LoadShadow(sp);
     22   if (old.IsZero()) {
     23     StatInc(thr, StatShadowZero);
     24     if (store_word)
     25       StoreIfNotYetStored(sp, &store_word);
     26     // The above StoreIfNotYetStored could be done unconditionally
     27     // and it even shows 4% gain on synthetic benchmarks (r4307).
     28     break;
     29   }
     30   // is the memory access equal to the previous?
     31   if (Shadow::Addr0AndSizeAreEqual(cur, old)) {
     32     StatInc(thr, StatShadowSameSize);
     33     // same thread?
     34     if (Shadow::TidsAreEqual(old, cur)) {
     35       StatInc(thr, StatShadowSameThread);
     36       if (OldIsInSameSynchEpoch(old, thr)) {
     37         if (old.IsRWNotWeaker(kAccessIsWrite, kIsAtomic)) {
     38           // found a slot that holds effectively the same info
     39           // (that is, same tid, same sync epoch and same size)
     40           StatInc(thr, StatMopSame);
     41           return;
     42         }
     43         StoreIfNotYetStored(sp, &store_word);
     44         break;
     45       }
     46       if (old.IsRWWeakerOrEqual(kAccessIsWrite, kIsAtomic))
     47         StoreIfNotYetStored(sp, &store_word);
     48       break;
     49     }
     50     StatInc(thr, StatShadowAnotherThread);
     51     if (HappensBefore(old, thr)) {
     52       StoreIfNotYetStored(sp, &store_word);
     53       break;
     54     }
     55     if (old.IsBothReadsOrAtomic(kAccessIsWrite, kIsAtomic))
     56       break;
     57     goto RACE;
     58   }
     59   // Do the memory access intersect?
     60   if (Shadow::TwoRangesIntersect(old, cur, kAccessSize)) {
     61     StatInc(thr, StatShadowIntersect);
     62     if (Shadow::TidsAreEqual(old, cur)) {
     63       StatInc(thr, StatShadowSameThread);
     64       break;
     65     }
     66     StatInc(thr, StatShadowAnotherThread);
     67     if (old.IsBothReadsOrAtomic(kAccessIsWrite, kIsAtomic))
     68       break;
     69     if (HappensBefore(old, thr))
     70       break;
     71     goto RACE;
     72   }
     73   // The accesses do not intersect.
     74   StatInc(thr, StatShadowNotIntersect);
     75   break;
     76 } while (0);
     77