Home | History | Annotate | Download | only in rtl
      1 //===-- tsan_stack_trace.cc -----------------------------------------------===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This file is a part of ThreadSanitizer (TSan), a race detector.
     11 //
     12 //===----------------------------------------------------------------------===//
     13 //#include "sanitizer_common/sanitizer_placement_new.h"
     14 #include "tsan_stack_trace.h"
     15 #include "tsan_rtl.h"
     16 #include "tsan_mman.h"
     17 
     18 namespace __tsan {
     19 
     20 StackTrace::StackTrace()
     21     : n_()
     22     , s_()
     23     , c_() {
     24 }
     25 
     26 StackTrace::StackTrace(uptr *buf, uptr cnt)
     27     : n_()
     28     , s_(buf)
     29     , c_(cnt) {
     30   CHECK_NE(buf, 0);
     31   CHECK_NE(cnt, 0);
     32 }
     33 
     34 StackTrace::~StackTrace() {
     35   Reset();
     36 }
     37 
     38 void StackTrace::Reset() {
     39   if (s_ && !c_) {
     40     CHECK_NE(n_, 0);
     41     internal_free(s_);
     42     s_ = 0;
     43   }
     44   n_ = 0;
     45 }
     46 
     47 void StackTrace::Init(const uptr *pcs, uptr cnt) {
     48   Reset();
     49   if (cnt == 0)
     50     return;
     51   if (c_) {
     52     CHECK_NE(s_, 0);
     53     CHECK_LE(cnt, c_);
     54   } else {
     55     s_ = (uptr*)internal_alloc(MBlockStackTrace, cnt * sizeof(s_[0]));
     56   }
     57   n_ = cnt;
     58   internal_memcpy(s_, pcs, cnt * sizeof(s_[0]));
     59 }
     60 
     61 void StackTrace::ObtainCurrent(ThreadState *thr, uptr toppc) {
     62   Reset();
     63   n_ = thr->shadow_stack_pos - thr->shadow_stack;
     64   if (n_ + !!toppc == 0)
     65     return;
     66   uptr start = 0;
     67   if (c_) {
     68     CHECK_NE(s_, 0);
     69     if (n_ + !!toppc > c_) {
     70       start = n_ - c_ + !!toppc;
     71       n_ = c_ - !!toppc;
     72     }
     73   } else {
     74     // Cap potentially huge stacks.
     75     if (n_ + !!toppc > kTraceStackSize) {
     76       start = n_ - kTraceStackSize + !!toppc;
     77       n_ = kTraceStackSize - !!toppc;
     78     }
     79     s_ = (uptr*)internal_alloc(MBlockStackTrace,
     80                                (n_ + !!toppc) * sizeof(s_[0]));
     81   }
     82   for (uptr i = 0; i < n_; i++)
     83     s_[i] = thr->shadow_stack[start + i];
     84   if (toppc) {
     85     s_[n_] = toppc;
     86     n_++;
     87   }
     88 }
     89 
     90 void StackTrace::CopyFrom(const StackTrace& other) {
     91   Reset();
     92   Init(other.Begin(), other.Size());
     93 }
     94 
     95 bool StackTrace::IsEmpty() const {
     96   return n_ == 0;
     97 }
     98 
     99 uptr StackTrace::Size() const {
    100   return n_;
    101 }
    102 
    103 uptr StackTrace::Get(uptr i) const {
    104   CHECK_LT(i, n_);
    105   return s_[i];
    106 }
    107 
    108 const uptr *StackTrace::Begin() const {
    109   return s_;
    110 }
    111 
    112 }  // namespace __tsan
    113