1 //===-- asan_fake_stack.cc ------------------------------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file is a part of AddressSanitizer, an address sanity checker. 11 // 12 // FakeStack is used to detect use-after-return bugs. 13 //===----------------------------------------------------------------------===// 14 #include "asan_allocator.h" 15 #include "asan_poisoning.h" 16 #include "asan_thread.h" 17 18 namespace __asan { 19 20 bool FakeStack::AddrIsInSizeClass(uptr addr, uptr size_class) { 21 uptr mem = allocated_size_classes_[size_class]; 22 uptr size = ClassMmapSize(size_class); 23 bool res = mem && addr >= mem && addr < mem + size; 24 return res; 25 } 26 27 uptr FakeStack::AddrIsInFakeStack(uptr addr) { 28 for (uptr size_class = 0; size_class < kNumberOfSizeClasses; size_class++) { 29 if (!AddrIsInSizeClass(addr, size_class)) continue; 30 uptr size_class_first_ptr = allocated_size_classes_[size_class]; 31 uptr size = ClassSize(size_class); 32 CHECK_LE(size_class_first_ptr, addr); 33 CHECK_GT(size_class_first_ptr + ClassMmapSize(size_class), addr); 34 return size_class_first_ptr + ((addr - size_class_first_ptr) / size) * size; 35 } 36 return 0; 37 } 38 39 // We may want to compute this during compilation. 40 ALWAYS_INLINE uptr FakeStack::ComputeSizeClass(uptr alloc_size) { 41 uptr rounded_size = RoundUpToPowerOfTwo(alloc_size); 42 uptr log = Log2(rounded_size); 43 CHECK_LE(alloc_size, (1UL << log)); 44 CHECK_GT(alloc_size, (1UL << (log-1))); 45 uptr res = log < kMinStackFrameSizeLog ? 0 : log - kMinStackFrameSizeLog; 46 CHECK_LT(res, kNumberOfSizeClasses); 47 CHECK_GE(ClassSize(res), rounded_size); 48 return res; 49 } 50 51 void FakeFrameFifo::FifoPush(FakeFrame *node) { 52 CHECK(node); 53 node->next = 0; 54 if (first_ == 0 && last_ == 0) { 55 first_ = last_ = node; 56 } else { 57 CHECK(first_); 58 CHECK(last_); 59 last_->next = node; 60 last_ = node; 61 } 62 } 63 64 FakeFrame *FakeFrameFifo::FifoPop() { 65 CHECK(first_ && last_ && "Exhausted fake stack"); 66 FakeFrame *res = 0; 67 if (first_ == last_) { 68 res = first_; 69 first_ = last_ = 0; 70 } else { 71 res = first_; 72 first_ = first_->next; 73 } 74 return res; 75 } 76 77 void FakeStack::Init(uptr stack_size) { 78 stack_size_ = stack_size; 79 alive_ = true; 80 } 81 82 void FakeStack::Cleanup() { 83 alive_ = false; 84 for (uptr i = 0; i < kNumberOfSizeClasses; i++) { 85 uptr mem = allocated_size_classes_[i]; 86 if (mem) { 87 PoisonShadow(mem, ClassMmapSize(i), 0); 88 allocated_size_classes_[i] = 0; 89 UnmapOrDie((void*)mem, ClassMmapSize(i)); 90 } 91 } 92 } 93 94 uptr FakeStack::ClassMmapSize(uptr size_class) { 95 // Limit allocation size to ClassSize * MaxDepth when running with unlimited 96 // stack. 97 return RoundUpTo(Min(ClassSize(size_class) * kMaxRecursionDepth, stack_size_), 98 GetPageSizeCached()); 99 } 100 101 void FakeStack::AllocateOneSizeClass(uptr size_class) { 102 CHECK(ClassMmapSize(size_class) >= GetPageSizeCached()); 103 uptr new_mem = (uptr)MmapOrDie( 104 ClassMmapSize(size_class), __FUNCTION__); 105 // Printf("T%d new_mem[%zu]: %p-%p mmap %zu\n", 106 // GetCurrentThread()->tid(), 107 // size_class, new_mem, new_mem + ClassMmapSize(size_class), 108 // ClassMmapSize(size_class)); 109 uptr i; 110 for (i = 0; i < ClassMmapSize(size_class); 111 i += ClassSize(size_class)) { 112 size_classes_[size_class].FifoPush((FakeFrame*)(new_mem + i)); 113 } 114 CHECK(i == ClassMmapSize(size_class)); 115 allocated_size_classes_[size_class] = new_mem; 116 } 117 118 ALWAYS_INLINE uptr FakeStack::AllocateStack(uptr size, uptr real_stack) { 119 if (!alive_) return real_stack; 120 CHECK(size <= kMaxStackMallocSize && size > 1); 121 uptr size_class = ComputeSizeClass(size); 122 if (!allocated_size_classes_[size_class]) { 123 AllocateOneSizeClass(size_class); 124 } 125 FakeFrame *fake_frame = size_classes_[size_class].FifoPop(); 126 CHECK(fake_frame); 127 fake_frame->size_minus_one = size - 1; 128 fake_frame->real_stack = real_stack; 129 while (FakeFrame *top = call_stack_.top()) { 130 if (top->real_stack > real_stack) break; 131 call_stack_.LifoPop(); 132 DeallocateFrame(top); 133 } 134 call_stack_.LifoPush(fake_frame); 135 uptr ptr = (uptr)fake_frame; 136 PoisonShadow(ptr, size, 0); 137 return ptr; 138 } 139 140 ALWAYS_INLINE void FakeStack::DeallocateFrame(FakeFrame *fake_frame) { 141 CHECK(alive_); 142 uptr size = static_cast<uptr>(fake_frame->size_minus_one + 1); 143 uptr size_class = ComputeSizeClass(size); 144 CHECK(allocated_size_classes_[size_class]); 145 uptr ptr = (uptr)fake_frame; 146 CHECK(AddrIsInSizeClass(ptr, size_class)); 147 CHECK(AddrIsInSizeClass(ptr + size - 1, size_class)); 148 size_classes_[size_class].FifoPush(fake_frame); 149 } 150 151 ALWAYS_INLINE void FakeStack::OnFree(uptr ptr, uptr size, uptr real_stack) { 152 FakeFrame *fake_frame = (FakeFrame*)ptr; 153 CHECK_EQ(fake_frame->magic, kRetiredStackFrameMagic); 154 CHECK_NE(fake_frame->descr, 0); 155 CHECK_EQ(fake_frame->size_minus_one, size - 1); 156 PoisonShadow(ptr, size, kAsanStackAfterReturnMagic); 157 } 158 159 } // namespace __asan 160 161 // ---------------------- Interface ---------------- {{{1 162 using namespace __asan; // NOLINT 163 164 uptr __asan_stack_malloc(uptr size, uptr real_stack) { 165 if (!flags()->use_fake_stack) return real_stack; 166 AsanThread *t = GetCurrentThread(); 167 if (!t) { 168 // TSD is gone, use the real stack. 169 return real_stack; 170 } 171 t->LazyInitFakeStack(); 172 uptr ptr = t->fake_stack()->AllocateStack(size, real_stack); 173 // Printf("__asan_stack_malloc %p %zu %p\n", ptr, size, real_stack); 174 return ptr; 175 } 176 177 void __asan_stack_free(uptr ptr, uptr size, uptr real_stack) { 178 if (!flags()->use_fake_stack) return; 179 if (ptr != real_stack) { 180 FakeStack::OnFree(ptr, size, real_stack); 181 } 182 } 183