Home | History | Annotate | Download | only in rtl
      1 //===-- tsan_mman.cc ------------------------------------------------------===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This file is a part of ThreadSanitizer (TSan), a race detector.
     11 //
     12 //===----------------------------------------------------------------------===//
     13 #include "sanitizer_common/sanitizer_allocator_interface.h"
     14 #include "sanitizer_common/sanitizer_common.h"
     15 #include "sanitizer_common/sanitizer_placement_new.h"
     16 #include "tsan_mman.h"
     17 #include "tsan_rtl.h"
     18 #include "tsan_report.h"
     19 #include "tsan_flags.h"
     20 
     21 // May be overriden by front-end.
     22 extern "C" void WEAK __tsan_malloc_hook(void *ptr, uptr size) {
     23   (void)ptr;
     24   (void)size;
     25 }
     26 extern "C" void WEAK __sanitizer_malloc_hook(void *ptr, uptr size) {
     27   (void)ptr;
     28   (void)size;
     29 }
     30 
     31 extern "C" void WEAK __tsan_free_hook(void *ptr) {
     32   (void)ptr;
     33 }
     34 extern "C" void WEAK __sanitizer_free_hook(void *ptr) {
     35   (void)ptr;
     36 }
     37 
     38 namespace __tsan {
     39 
     40 struct MapUnmapCallback {
     41   void OnMap(uptr p, uptr size) const { }
     42   void OnUnmap(uptr p, uptr size) const {
     43     // We are about to unmap a chunk of user memory.
     44     // Mark the corresponding shadow memory as not needed.
     45     DontNeedShadowFor(p, size);
     46   }
     47 };
     48 
     49 static char allocator_placeholder[sizeof(Allocator)] ALIGNED(64);
     50 Allocator *allocator() {
     51   return reinterpret_cast<Allocator*>(&allocator_placeholder);
     52 }
     53 
     54 void InitializeAllocator() {
     55   allocator()->Init();
     56 }
     57 
     58 void AllocatorThreadStart(ThreadState *thr) {
     59   allocator()->InitCache(&thr->alloc_cache);
     60   internal_allocator()->InitCache(&thr->internal_alloc_cache);
     61 }
     62 
     63 void AllocatorThreadFinish(ThreadState *thr) {
     64   allocator()->DestroyCache(&thr->alloc_cache);
     65   internal_allocator()->DestroyCache(&thr->internal_alloc_cache);
     66 }
     67 
     68 void AllocatorPrintStats() {
     69   allocator()->PrintStats();
     70 }
     71 
     72 static void SignalUnsafeCall(ThreadState *thr, uptr pc) {
     73   if (!thr->in_signal_handler || !flags()->report_signal_unsafe)
     74     return;
     75   StackTrace stack;
     76   stack.ObtainCurrent(thr, pc);
     77   ThreadRegistryLock l(ctx->thread_registry);
     78   ScopedReport rep(ReportTypeSignalUnsafe);
     79   if (!IsFiredSuppression(ctx, rep, stack)) {
     80     rep.AddStack(&stack, true);
     81     OutputReport(thr, rep);
     82   }
     83 }
     84 
     85 void *user_alloc(ThreadState *thr, uptr pc, uptr sz, uptr align) {
     86   if ((sz >= (1ull << 40)) || (align >= (1ull << 40)))
     87     return AllocatorReturnNull();
     88   void *p = allocator()->Allocate(&thr->alloc_cache, sz, align);
     89   if (p == 0)
     90     return 0;
     91   if (ctx && ctx->initialized)
     92     OnUserAlloc(thr, pc, (uptr)p, sz, true);
     93   SignalUnsafeCall(thr, pc);
     94   return p;
     95 }
     96 
     97 void user_free(ThreadState *thr, uptr pc, void *p) {
     98   if (ctx && ctx->initialized)
     99     OnUserFree(thr, pc, (uptr)p, true);
    100   allocator()->Deallocate(&thr->alloc_cache, p);
    101   SignalUnsafeCall(thr, pc);
    102 }
    103 
    104 void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write) {
    105   DPrintf("#%d: alloc(%zu) = %p\n", thr->tid, sz, p);
    106   ctx->metamap.AllocBlock(thr, pc, p, sz);
    107   if (write && thr->ignore_reads_and_writes == 0)
    108     MemoryRangeImitateWrite(thr, pc, (uptr)p, sz);
    109   else
    110     MemoryResetRange(thr, pc, (uptr)p, sz);
    111 }
    112 
    113 void OnUserFree(ThreadState *thr, uptr pc, uptr p, bool write) {
    114   CHECK_NE(p, (void*)0);
    115   uptr sz = ctx->metamap.FreeBlock(thr, pc, p);
    116   DPrintf("#%d: free(%p, %zu)\n", thr->tid, p, sz);
    117   if (write && thr->ignore_reads_and_writes == 0)
    118     MemoryRangeFreed(thr, pc, (uptr)p, sz);
    119 }
    120 
    121 void *user_realloc(ThreadState *thr, uptr pc, void *p, uptr sz) {
    122   void *p2 = 0;
    123   // FIXME: Handle "shrinking" more efficiently,
    124   // it seems that some software actually does this.
    125   if (sz) {
    126     p2 = user_alloc(thr, pc, sz);
    127     if (p2 == 0)
    128       return 0;
    129     if (p) {
    130       uptr oldsz = user_alloc_usable_size(p);
    131       internal_memcpy(p2, p, min(oldsz, sz));
    132     }
    133   }
    134   if (p)
    135     user_free(thr, pc, p);
    136   return p2;
    137 }
    138 
    139 uptr user_alloc_usable_size(const void *p) {
    140   if (p == 0)
    141     return 0;
    142   MBlock *b = ctx->metamap.GetBlock((uptr)p);
    143   return b ? b->siz : 0;
    144 }
    145 
    146 void invoke_malloc_hook(void *ptr, uptr size) {
    147   ThreadState *thr = cur_thread();
    148   if (ctx == 0 || !ctx->initialized || thr->ignore_interceptors)
    149     return;
    150   __tsan_malloc_hook(ptr, size);
    151   __sanitizer_malloc_hook(ptr, size);
    152 }
    153 
    154 void invoke_free_hook(void *ptr) {
    155   ThreadState *thr = cur_thread();
    156   if (ctx == 0 || !ctx->initialized || thr->ignore_interceptors)
    157     return;
    158   __tsan_free_hook(ptr);
    159   __sanitizer_free_hook(ptr);
    160 }
    161 
    162 void *internal_alloc(MBlockType typ, uptr sz) {
    163   ThreadState *thr = cur_thread();
    164   CHECK_LE(sz, InternalSizeClassMap::kMaxSize);
    165   if (thr->nomalloc) {
    166     thr->nomalloc = 0;  // CHECK calls internal_malloc().
    167     CHECK(0);
    168   }
    169   return InternalAlloc(sz, &thr->internal_alloc_cache);
    170 }
    171 
    172 void internal_free(void *p) {
    173   ThreadState *thr = cur_thread();
    174   if (thr->nomalloc) {
    175     thr->nomalloc = 0;  // CHECK calls internal_malloc().
    176     CHECK(0);
    177   }
    178   InternalFree(p, &thr->internal_alloc_cache);
    179 }
    180 
    181 }  // namespace __tsan
    182 
    183 using namespace __tsan;
    184 
    185 extern "C" {
    186 uptr __sanitizer_get_current_allocated_bytes() {
    187   uptr stats[AllocatorStatCount];
    188   allocator()->GetStats(stats);
    189   return stats[AllocatorStatAllocated];
    190 }
    191 uptr __tsan_get_current_allocated_bytes() {
    192   return __sanitizer_get_current_allocated_bytes();
    193 }
    194 
    195 uptr __sanitizer_get_heap_size() {
    196   uptr stats[AllocatorStatCount];
    197   allocator()->GetStats(stats);
    198   return stats[AllocatorStatMapped];
    199 }
    200 uptr __tsan_get_heap_size() {
    201   return __sanitizer_get_heap_size();
    202 }
    203 
    204 uptr __sanitizer_get_free_bytes() {
    205   return 1;
    206 }
    207 uptr __tsan_get_free_bytes() {
    208   return __sanitizer_get_free_bytes();
    209 }
    210 
    211 uptr __sanitizer_get_unmapped_bytes() {
    212   return 1;
    213 }
    214 uptr __tsan_get_unmapped_bytes() {
    215   return __sanitizer_get_unmapped_bytes();
    216 }
    217 
    218 uptr __sanitizer_get_estimated_allocated_size(uptr size) {
    219   return size;
    220 }
    221 uptr __tsan_get_estimated_allocated_size(uptr size) {
    222   return __sanitizer_get_estimated_allocated_size(size);
    223 }
    224 
    225 int __sanitizer_get_ownership(const void *p) {
    226   return allocator()->GetBlockBegin(p) != 0;
    227 }
    228 int __tsan_get_ownership(const void *p) {
    229   return __sanitizer_get_ownership(p);
    230 }
    231 
    232 uptr __sanitizer_get_allocated_size(const void *p) {
    233   return user_alloc_usable_size(p);
    234 }
    235 uptr __tsan_get_allocated_size(const void *p) {
    236   return __sanitizer_get_allocated_size(p);
    237 }
    238 
    239 void __tsan_on_thread_idle() {
    240   ThreadState *thr = cur_thread();
    241   allocator()->SwallowCache(&thr->alloc_cache);
    242   internal_allocator()->SwallowCache(&thr->internal_alloc_cache);
    243   ctx->metamap.OnThreadIdle(thr);
    244 }
    245 }  // extern "C"
    246