Home | History | Annotate | Download | only in trace_event
      1 // Copyright 2015 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_
      6 #define BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_
      7 
      8 #include <vector>
      9 
     10 #include "base/atomicops.h"
     11 #include "base/base_export.h"
     12 #include "base/debug/stack_trace.h"
     13 #include "base/logging.h"
     14 #include "base/macros.h"
     15 #include "base/trace_event/heap_profiler_allocation_context.h"
     16 
     17 namespace base {
     18 namespace trace_event {
     19 
     20 // The allocation context tracker keeps track of thread-local context for heap
     21 // profiling. It includes a pseudo stack of trace events. On every allocation
     22 // the tracker provides a snapshot of its context in the form of an
     23 // |AllocationContext| that is to be stored together with the allocation
     24 // details.
     25 class BASE_EXPORT AllocationContextTracker {
     26  public:
     27   enum class CaptureMode: int32_t {
     28     DISABLED,       // Don't capture anything
     29     PSEUDO_STACK,   // GetContextSnapshot() returns pseudo stack trace
     30     NATIVE_STACK    // GetContextSnapshot() returns native (real) stack trace
     31   };
     32 
     33   // Globally sets capturing mode.
     34   // TODO(primiano): How to guard against *_STACK -> DISABLED -> *_STACK?
     35   static void SetCaptureMode(CaptureMode mode);
     36 
     37   // Returns global capturing mode.
     38   inline static CaptureMode capture_mode() {
     39     // A little lag after heap profiling is enabled or disabled is fine, it is
     40     // more important that the check is as cheap as possible when capturing is
     41     // not enabled, so do not issue a memory barrier in the fast path.
     42     if (subtle::NoBarrier_Load(&capture_mode_) ==
     43             static_cast<int32_t>(CaptureMode::DISABLED))
     44       return CaptureMode::DISABLED;
     45 
     46     // In the slow path, an acquire load is required to pair with the release
     47     // store in |SetCaptureMode|. This is to ensure that the TLS slot for
     48     // the thread-local allocation context tracker has been initialized if
     49     // |capture_mode| returns something other than DISABLED.
     50     return static_cast<CaptureMode>(subtle::Acquire_Load(&capture_mode_));
     51   }
     52 
     53   // Returns the thread-local instance, creating one if necessary. Returns
     54   // always a valid instance, unless it is called re-entrantly, in which case
     55   // returns nullptr in the nested calls.
     56   static AllocationContextTracker* GetInstanceForCurrentThread();
     57 
     58   // Set the thread name in the AllocationContextTracker of the current thread
     59   // if capture is enabled.
     60   static void SetCurrentThreadName(const char* name);
     61 
     62   // Starts and ends a new ignore scope between which the allocations are
     63   // ignored in the heap profiler. A dummy context that short circuits to
     64   // "tracing_overhead" is returned for these allocations.
     65   void begin_ignore_scope() { ignore_scope_depth_++; }
     66   void end_ignore_scope() {
     67     if (ignore_scope_depth_)
     68       ignore_scope_depth_--;
     69   }
     70 
     71   // Pushes a frame onto the thread-local pseudo stack.
     72   void PushPseudoStackFrame(const char* trace_event_name);
     73 
     74   // Pops a frame from the thread-local pseudo stack.
     75   void PopPseudoStackFrame(const char* trace_event_name);
     76 
     77   // Push and pop current task's context. A stack is used to support nested
     78   // tasks and the top of the stack will be used in allocation context.
     79   void PushCurrentTaskContext(const char* context);
     80   void PopCurrentTaskContext(const char* context);
     81 
     82   // Returns a snapshot of the current thread-local context.
     83   AllocationContext GetContextSnapshot();
     84 
     85   ~AllocationContextTracker();
     86 
     87  private:
     88   AllocationContextTracker();
     89 
     90   static subtle::Atomic32 capture_mode_;
     91 
     92   // The pseudo stack where frames are |TRACE_EVENT| names.
     93   std::vector<const char*> pseudo_stack_;
     94 
     95   // The thread name is used as the first entry in the pseudo stack.
     96   const char* thread_name_;
     97 
     98   // Stack of tasks' contexts. Context serves as a different dimension than
     99   // pseudo stack to cluster allocations.
    100   std::vector<const char*> task_contexts_;
    101 
    102   uint32_t ignore_scope_depth_;
    103 
    104   DISALLOW_COPY_AND_ASSIGN(AllocationContextTracker);
    105 };
    106 
    107 }  // namespace trace_event
    108 }  // namespace base
    109 
    110 #endif  // BASE_TRACE_EVENT_HEAP_PROFILER_ALLOCATION_CONTEXT_TRACKER_H_
    111