Home | History | Annotate | Download | only in trace_event
      1 // Copyright 2015 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_
      6 #define BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_
      7 
      8 #include <vector>
      9 
     10 #include "base/atomicops.h"
     11 #include "base/base_export.h"
     12 #include "base/debug/stack_trace.h"
     13 #include "base/macros.h"
     14 #include "base/trace_event/heap_profiler_allocation_context.h"
     15 
     16 namespace base {
     17 namespace trace_event {
     18 
     19 // The allocation context tracker keeps track of thread-local context for heap
     20 // profiling. It includes a pseudo stack of trace events. On every allocation
     21 // the tracker provides a snapshot of its context in the form of an
     22 // |AllocationContext| that is to be stored together with the allocation
     23 // details.
     24 class BASE_EXPORT AllocationContextTracker {
     25  public:
     26   enum class CaptureMode: int32_t {
     27     DISABLED,       // Don't capture anything
     28     PSEUDO_STACK,   // GetContextSnapshot() returns pseudo stack trace
     29     NATIVE_STACK    // GetContextSnapshot() returns native (real) stack trace
     30   };
     31 
     32   // Stack frame constructed from trace events in codebase.
     33   struct BASE_EXPORT PseudoStackFrame {
     34     const char* trace_event_category;
     35     const char* trace_event_name;
     36 
     37     bool operator==(const PseudoStackFrame& other) const {
     38       return trace_event_category == other.trace_event_category &&
     39              trace_event_name == other.trace_event_name;
     40     }
     41   };
     42 
     43   // Globally sets capturing mode.
     44   // TODO(primiano): How to guard against *_STACK -> DISABLED -> *_STACK?
     45   static void SetCaptureMode(CaptureMode mode);
     46 
     47   // Returns global capturing mode.
     48   inline static CaptureMode capture_mode() {
     49     // A little lag after heap profiling is enabled or disabled is fine, it is
     50     // more important that the check is as cheap as possible when capturing is
     51     // not enabled, so do not issue a memory barrier in the fast path.
     52     if (subtle::NoBarrier_Load(&capture_mode_) ==
     53             static_cast<int32_t>(CaptureMode::DISABLED))
     54       return CaptureMode::DISABLED;
     55 
     56     // In the slow path, an acquire load is required to pair with the release
     57     // store in |SetCaptureMode|. This is to ensure that the TLS slot for
     58     // the thread-local allocation context tracker has been initialized if
     59     // |capture_mode| returns something other than DISABLED.
     60     return static_cast<CaptureMode>(subtle::Acquire_Load(&capture_mode_));
     61   }
     62 
     63   // Returns the thread-local instance, creating one if necessary. Returns
     64   // always a valid instance, unless it is called re-entrantly, in which case
     65   // returns nullptr in the nested calls.
     66   static AllocationContextTracker* GetInstanceForCurrentThread();
     67 
     68   // Set the thread name in the AllocationContextTracker of the current thread
     69   // if capture is enabled.
     70   static void SetCurrentThreadName(const char* name);
     71 
     72   // Starts and ends a new ignore scope between which the allocations are
     73   // ignored by the heap profiler. GetContextSnapshot() returns false when
     74   // allocations are ignored.
     75   void begin_ignore_scope() { ignore_scope_depth_++; }
     76   void end_ignore_scope() {
     77     if (ignore_scope_depth_)
     78       ignore_scope_depth_--;
     79   }
     80 
     81   // Pushes a frame onto the thread-local pseudo stack.
     82   void PushPseudoStackFrame(PseudoStackFrame stack_frame);
     83 
     84   // Pops a frame from the thread-local pseudo stack.
     85   void PopPseudoStackFrame(PseudoStackFrame stack_frame);
     86 
     87   // Push and pop current task's context. A stack is used to support nested
     88   // tasks and the top of the stack will be used in allocation context.
     89   void PushCurrentTaskContext(const char* context);
     90   void PopCurrentTaskContext(const char* context);
     91 
     92   // Fills a snapshot of the current thread-local context. Doesn't fill and
     93   // returns false if allocations are being ignored.
     94   bool GetContextSnapshot(AllocationContext* snapshot);
     95 
     96   ~AllocationContextTracker();
     97 
     98  private:
     99   AllocationContextTracker();
    100 
    101   static subtle::Atomic32 capture_mode_;
    102 
    103   // The pseudo stack where frames are |TRACE_EVENT| names.
    104   std::vector<PseudoStackFrame> pseudo_stack_;
    105 
    106   // The thread name is used as the first entry in the pseudo stack.
    107   const char* thread_name_;
    108 
    109   // Stack of tasks' contexts. Context serves as a different dimension than
    110   // pseudo stack to cluster allocations.
    111   std::vector<const char*> task_contexts_;
    112 
    113   uint32_t ignore_scope_depth_;
    114 
    115   DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker);
    116 };
    117 
    118 }  // namespace trace_event
    119 }  // namespace base
    120 
    121 #endif  // BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_
    122