Home | History | Annotate | Download | only in asan
      1 //===-- asan_fake_stack.cc ------------------------------------------------===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This file is a part of AddressSanitizer, an address sanity checker.
     11 //
     12 // FakeStack is used to detect use-after-return bugs.
     13 //===----------------------------------------------------------------------===//
     14 
     15 #include "asan_allocator.h"
     16 #include "asan_poisoning.h"
     17 #include "asan_thread.h"
     18 
     19 namespace __asan {
     20 
     21 static const u64 kMagic1 = kAsanStackAfterReturnMagic;
     22 static const u64 kMagic2 = (kMagic1 << 8) | kMagic1;
     23 static const u64 kMagic4 = (kMagic2 << 16) | kMagic2;
     24 static const u64 kMagic8 = (kMagic4 << 32) | kMagic4;
     25 
     26 static const u64 kAllocaRedzoneSize = 32UL;
     27 static const u64 kAllocaRedzoneMask = 31UL;
     28 
     29 // For small size classes inline PoisonShadow for better performance.
     30 ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
     31   CHECK_EQ(SHADOW_SCALE, 3);  // This code expects SHADOW_SCALE=3.
     32   u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));
     33   if (class_id <= 6) {
     34     for (uptr i = 0; i < (1U << class_id); i++) {
     35       shadow[i] = magic;
     36       // Make sure this does not become memset.
     37       SanitizerBreakOptimization(nullptr);
     38     }
     39   } else {
     40     // The size class is too big, it's cheaper to poison only size bytes.
     41     PoisonShadow(ptr, size, static_cast<u8>(magic));
     42   }
     43 }
     44 
     45 FakeStack *FakeStack::Create(uptr stack_size_log) {
     46   static uptr kMinStackSizeLog = 16;
     47   static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28);
     48   if (stack_size_log < kMinStackSizeLog)
     49     stack_size_log = kMinStackSizeLog;
     50   if (stack_size_log > kMaxStackSizeLog)
     51     stack_size_log = kMaxStackSizeLog;
     52   uptr size = RequiredSize(stack_size_log);
     53   FakeStack *res = reinterpret_cast<FakeStack *>(
     54       flags()->uar_noreserve ? MmapNoReserveOrDie(size, "FakeStack")
     55                              : MmapOrDie(size, "FakeStack"));
     56   res->stack_size_log_ = stack_size_log;
     57   u8 *p = reinterpret_cast<u8 *>(res);
     58   VReport(1, "T%d: FakeStack created: %p -- %p stack_size_log: %zd; "
     59           "mmapped %zdK, noreserve=%d \n",
     60           GetCurrentTidOrInvalid(), p,
     61           p + FakeStack::RequiredSize(stack_size_log), stack_size_log,
     62           size >> 10, flags()->uar_noreserve);
     63   return res;
     64 }
     65 
     66 void FakeStack::Destroy(int tid) {
     67   PoisonAll(0);
     68   if (Verbosity() >= 2) {
     69     InternalScopedString str(kNumberOfSizeClasses * 50);
     70     for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++)
     71       str.append("%zd: %zd/%zd; ", class_id, hint_position_[class_id],
     72                  NumberOfFrames(stack_size_log(), class_id));
     73     Report("T%d: FakeStack destroyed: %s\n", tid, str.data());
     74   }
     75   uptr size = RequiredSize(stack_size_log_);
     76   FlushUnneededASanShadowMemory(reinterpret_cast<uptr>(this), size);
     77   UnmapOrDie(this, size);
     78 }
     79 
     80 void FakeStack::PoisonAll(u8 magic) {
     81   PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()),
     82                magic);
     83 }
     84 
     85 #if !defined(_MSC_VER) || defined(__clang__)
     86 ALWAYS_INLINE USED
     87 #endif
     88 FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
     89                                uptr real_stack) {
     90   CHECK_LT(class_id, kNumberOfSizeClasses);
     91   if (needs_gc_)
     92     GC(real_stack);
     93   uptr &hint_position = hint_position_[class_id];
     94   const int num_iter = NumberOfFrames(stack_size_log, class_id);
     95   u8 *flags = GetFlags(stack_size_log, class_id);
     96   for (int i = 0; i < num_iter; i++) {
     97     uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++);
     98     // This part is tricky. On one hand, checking and setting flags[pos]
     99     // should be atomic to ensure async-signal safety. But on the other hand,
    100     // if the signal arrives between checking and setting flags[pos], the
    101     // signal handler's fake stack will start from a different hint_position
    102     // and so will not touch this particular byte. So, it is safe to do this
    103     // with regular non-atimic load and store (at least I was not able to make
    104     // this code crash).
    105     if (flags[pos]) continue;
    106     flags[pos] = 1;
    107     FakeFrame *res = reinterpret_cast<FakeFrame *>(
    108         GetFrame(stack_size_log, class_id, pos));
    109     res->real_stack = real_stack;
    110     *SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos];
    111     return res;
    112   }
    113   return nullptr; // We are out of fake stack.
    114 }
    115 
    116 uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
    117   uptr stack_size_log = this->stack_size_log();
    118   uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));
    119   uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log);
    120   if (ptr < beg || ptr >= end) return 0;
    121   uptr class_id = (ptr - beg) >> stack_size_log;
    122   uptr base = beg + (class_id << stack_size_log);
    123   CHECK_LE(base, ptr);
    124   CHECK_LT(ptr, base + (1UL << stack_size_log));
    125   uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id);
    126   uptr res = base + pos * BytesInSizeClass(class_id);
    127   *frame_end = res + BytesInSizeClass(class_id);
    128   *frame_beg = res + sizeof(FakeFrame);
    129   return res;
    130 }
    131 
    132 void FakeStack::HandleNoReturn() {
    133   needs_gc_ = true;
    134 }
    135 
    136 // When throw, longjmp or some such happens we don't call OnFree() and
    137 // as the result may leak one or more fake frames, but the good news is that
    138 // we are notified about all such events by HandleNoReturn().
    139 // If we recently had such no-return event we need to collect garbage frames.
    140 // We do it based on their 'real_stack' values -- everything that is lower
    141 // than the current real_stack is garbage.
    142 NOINLINE void FakeStack::GC(uptr real_stack) {
    143   uptr collected = 0;
    144   for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
    145     u8 *flags = GetFlags(stack_size_log(), class_id);
    146     for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
    147          i++) {
    148       if (flags[i] == 0) continue;  // not allocated.
    149       FakeFrame *ff = reinterpret_cast<FakeFrame *>(
    150           GetFrame(stack_size_log(), class_id, i));
    151       if (ff->real_stack < real_stack) {
    152         flags[i] = 0;
    153         collected++;
    154       }
    155     }
    156   }
    157   needs_gc_ = false;
    158 }
    159 
    160 void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) {
    161   for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
    162     u8 *flags = GetFlags(stack_size_log(), class_id);
    163     for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
    164          i++) {
    165       if (flags[i] == 0) continue;  // not allocated.
    166       FakeFrame *ff = reinterpret_cast<FakeFrame *>(
    167           GetFrame(stack_size_log(), class_id, i));
    168       uptr begin = reinterpret_cast<uptr>(ff);
    169       callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg);
    170     }
    171   }
    172 }
    173 
    174 #if SANITIZER_LINUX && !SANITIZER_ANDROID
    175 static THREADLOCAL FakeStack *fake_stack_tls;
    176 
    177 FakeStack *GetTLSFakeStack() {
    178   return fake_stack_tls;
    179 }
    180 void SetTLSFakeStack(FakeStack *fs) {
    181   fake_stack_tls = fs;
    182 }
    183 #else
    184 FakeStack *GetTLSFakeStack() { return 0; }
    185 void SetTLSFakeStack(FakeStack *fs) { }
    186 #endif  // SANITIZER_LINUX && !SANITIZER_ANDROID
    187 
    188 static FakeStack *GetFakeStack() {
    189   AsanThread *t = GetCurrentThread();
    190   if (!t) return nullptr;
    191   return t->fake_stack();
    192 }
    193 
    194 static FakeStack *GetFakeStackFast() {
    195   if (FakeStack *fs = GetTLSFakeStack())
    196     return fs;
    197   if (!__asan_option_detect_stack_use_after_return)
    198     return nullptr;
    199   return GetFakeStack();
    200 }
    201 
    202 ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {
    203   FakeStack *fs = GetFakeStackFast();
    204   if (!fs) return 0;
    205   uptr local_stack;
    206   uptr real_stack = reinterpret_cast<uptr>(&local_stack);
    207   FakeFrame *ff = fs->Allocate(fs->stack_size_log(), class_id, real_stack);
    208   if (!ff) return 0;  // Out of fake stack.
    209   uptr ptr = reinterpret_cast<uptr>(ff);
    210   SetShadow(ptr, size, class_id, 0);
    211   return ptr;
    212 }
    213 
    214 ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {
    215   FakeStack::Deallocate(ptr, class_id);
    216   SetShadow(ptr, size, class_id, kMagic8);
    217 }
    218 
    219 } // namespace __asan
    220 
    221 // ---------------------- Interface ---------------- {{{1
    222 using namespace __asan;
    223 #define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id)                       \
    224   extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr                                \
    225       __asan_stack_malloc_##class_id(uptr size) {                              \
    226     return OnMalloc(class_id, size);                                           \
    227   }                                                                            \
    228   extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id(  \
    229       uptr ptr, uptr size) {                                                   \
    230     OnFree(ptr, class_id, size);                                               \
    231   }
    232 
    233 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0)
    234 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1)
    235 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2)
    236 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3)
    237 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4)
    238 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5)
    239 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6)
    240 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7)
    241 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8)
    242 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9)
    243 DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10)
    244 extern "C" {
    245 SANITIZER_INTERFACE_ATTRIBUTE
    246 void *__asan_get_current_fake_stack() { return GetFakeStackFast(); }
    247 
    248 SANITIZER_INTERFACE_ATTRIBUTE
    249 void *__asan_addr_is_in_fake_stack(void *fake_stack, void *addr, void **beg,
    250                                    void **end) {
    251   FakeStack *fs = reinterpret_cast<FakeStack*>(fake_stack);
    252   if (!fs) return nullptr;
    253   uptr frame_beg, frame_end;
    254   FakeFrame *frame = reinterpret_cast<FakeFrame *>(fs->AddrIsInFakeStack(
    255       reinterpret_cast<uptr>(addr), &frame_beg, &frame_end));
    256   if (!frame) return nullptr;
    257   if (frame->magic != kCurrentStackFrameMagic)
    258     return nullptr;
    259   if (beg) *beg = reinterpret_cast<void*>(frame_beg);
    260   if (end) *end = reinterpret_cast<void*>(frame_end);
    261   return reinterpret_cast<void*>(frame->real_stack);
    262 }
    263 
    264 SANITIZER_INTERFACE_ATTRIBUTE
    265 void __asan_alloca_poison(uptr addr, uptr size) {
    266   uptr LeftRedzoneAddr = addr - kAllocaRedzoneSize;
    267   uptr PartialRzAddr = addr + size;
    268   uptr RightRzAddr = (PartialRzAddr + kAllocaRedzoneMask) & ~kAllocaRedzoneMask;
    269   uptr PartialRzAligned = PartialRzAddr & ~(SHADOW_GRANULARITY - 1);
    270   FastPoisonShadow(LeftRedzoneAddr, kAllocaRedzoneSize, kAsanAllocaLeftMagic);
    271   FastPoisonShadowPartialRightRedzone(
    272       PartialRzAligned, PartialRzAddr % SHADOW_GRANULARITY,
    273       RightRzAddr - PartialRzAligned, kAsanAllocaRightMagic);
    274   FastPoisonShadow(RightRzAddr, kAllocaRedzoneSize, kAsanAllocaRightMagic);
    275 }
    276 
    277 SANITIZER_INTERFACE_ATTRIBUTE
    278 void __asan_allocas_unpoison(uptr top, uptr bottom) {
    279   if ((!top) || (top > bottom)) return;
    280   REAL(memset)(reinterpret_cast<void*>(MemToShadow(top)), 0,
    281                (bottom - top) / SHADOW_GRANULARITY);
    282 }
    283 } // extern "C"
    284