Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_COUNTERS_H_
      6 #define V8_COUNTERS_H_
      7 
      8 #include "include/v8.h"
      9 #include "src/allocation.h"
     10 #include "src/base/platform/elapsed-timer.h"
     11 #include "src/base/platform/time.h"
     12 #include "src/globals.h"
     13 #include "src/objects.h"
     14 
     15 namespace v8 {
     16 namespace internal {
     17 
     18 // StatsCounters is an interface for plugging into external
     19 // counters for monitoring.  Counters can be looked up and
     20 // manipulated by name.
     21 
     22 class StatsTable {
     23  public:
     24   // Register an application-defined function where
     25   // counters can be looked up.
     26   void SetCounterFunction(CounterLookupCallback f) {
     27     lookup_function_ = f;
     28   }
     29 
     30   // Register an application-defined function to create
     31   // a histogram for passing to the AddHistogramSample function
     32   void SetCreateHistogramFunction(CreateHistogramCallback f) {
     33     create_histogram_function_ = f;
     34   }
     35 
     36   // Register an application-defined function to add a sample
     37   // to a histogram created with CreateHistogram function
     38   void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
     39     add_histogram_sample_function_ = f;
     40   }
     41 
     42   bool HasCounterFunction() const {
     43     return lookup_function_ != NULL;
     44   }
     45 
     46   // Lookup the location of a counter by name.  If the lookup
     47   // is successful, returns a non-NULL pointer for writing the
     48   // value of the counter.  Each thread calling this function
     49   // may receive a different location to store it's counter.
     50   // The return value must not be cached and re-used across
     51   // threads, although a single thread is free to cache it.
     52   int* FindLocation(const char* name) {
     53     if (!lookup_function_) return NULL;
     54     return lookup_function_(name);
     55   }
     56 
     57   // Create a histogram by name. If the create is successful,
     58   // returns a non-NULL pointer for use with AddHistogramSample
     59   // function. min and max define the expected minimum and maximum
     60   // sample values. buckets is the maximum number of buckets
     61   // that the samples will be grouped into.
     62   void* CreateHistogram(const char* name,
     63                         int min,
     64                         int max,
     65                         size_t buckets) {
     66     if (!create_histogram_function_) return NULL;
     67     return create_histogram_function_(name, min, max, buckets);
     68   }
     69 
     70   // Add a sample to a histogram created with the CreateHistogram
     71   // function.
     72   void AddHistogramSample(void* histogram, int sample) {
     73     if (!add_histogram_sample_function_) return;
     74     return add_histogram_sample_function_(histogram, sample);
     75   }
     76 
     77  private:
     78   StatsTable();
     79 
     80   CounterLookupCallback lookup_function_;
     81   CreateHistogramCallback create_histogram_function_;
     82   AddHistogramSampleCallback add_histogram_sample_function_;
     83 
     84   friend class Isolate;
     85 
     86   DISALLOW_COPY_AND_ASSIGN(StatsTable);
     87 };
     88 
     89 // StatsCounters are dynamically created values which can be tracked in
     90 // the StatsTable.  They are designed to be lightweight to create and
     91 // easy to use.
     92 //
     93 // Internally, a counter represents a value in a row of a StatsTable.
     94 // The row has a 32bit value for each process/thread in the table and also
     95 // a name (stored in the table metadata).  Since the storage location can be
     96 // thread-specific, this class cannot be shared across threads.
     97 class StatsCounter {
     98  public:
     99   StatsCounter() { }
    100   explicit StatsCounter(Isolate* isolate, const char* name)
    101       : isolate_(isolate), name_(name), ptr_(NULL), lookup_done_(false) { }
    102 
    103   // Sets the counter to a specific value.
    104   void Set(int value) {
    105     int* loc = GetPtr();
    106     if (loc) *loc = value;
    107   }
    108 
    109   // Increments the counter.
    110   void Increment() {
    111     int* loc = GetPtr();
    112     if (loc) (*loc)++;
    113   }
    114 
    115   void Increment(int value) {
    116     int* loc = GetPtr();
    117     if (loc)
    118       (*loc) += value;
    119   }
    120 
    121   // Decrements the counter.
    122   void Decrement() {
    123     int* loc = GetPtr();
    124     if (loc) (*loc)--;
    125   }
    126 
    127   void Decrement(int value) {
    128     int* loc = GetPtr();
    129     if (loc) (*loc) -= value;
    130   }
    131 
    132   // Is this counter enabled?
    133   // Returns false if table is full.
    134   bool Enabled() {
    135     return GetPtr() != NULL;
    136   }
    137 
    138   // Get the internal pointer to the counter. This is used
    139   // by the code generator to emit code that manipulates a
    140   // given counter without calling the runtime system.
    141   int* GetInternalPointer() {
    142     int* loc = GetPtr();
    143     DCHECK(loc != NULL);
    144     return loc;
    145   }
    146 
    147   // Reset the cached internal pointer.
    148   void Reset() { lookup_done_ = false; }
    149 
    150  protected:
    151   // Returns the cached address of this counter location.
    152   int* GetPtr() {
    153     if (lookup_done_) return ptr_;
    154     lookup_done_ = true;
    155     ptr_ = FindLocationInStatsTable();
    156     return ptr_;
    157   }
    158 
    159  private:
    160   int* FindLocationInStatsTable() const;
    161 
    162   Isolate* isolate_;
    163   const char* name_;
    164   int* ptr_;
    165   bool lookup_done_;
    166 };
    167 
    168 // A Histogram represents a dynamically created histogram in the StatsTable.
    169 // It will be registered with the histogram system on first use.
    170 class Histogram {
    171  public:
    172   Histogram() { }
    173   Histogram(const char* name,
    174             int min,
    175             int max,
    176             int num_buckets,
    177             Isolate* isolate)
    178       : name_(name),
    179         min_(min),
    180         max_(max),
    181         num_buckets_(num_buckets),
    182         histogram_(NULL),
    183         lookup_done_(false),
    184         isolate_(isolate) { }
    185 
    186   // Add a single sample to this histogram.
    187   void AddSample(int sample);
    188 
    189   // Returns true if this histogram is enabled.
    190   bool Enabled() {
    191     return GetHistogram() != NULL;
    192   }
    193 
    194   // Reset the cached internal pointer.
    195   void Reset() {
    196     lookup_done_ = false;
    197   }
    198 
    199  protected:
    200   // Returns the handle to the histogram.
    201   void* GetHistogram() {
    202     if (!lookup_done_) {
    203       lookup_done_ = true;
    204       histogram_ = CreateHistogram();
    205     }
    206     return histogram_;
    207   }
    208 
    209   const char* name() { return name_; }
    210   Isolate* isolate() const { return isolate_; }
    211 
    212  private:
    213   void* CreateHistogram() const;
    214 
    215   const char* name_;
    216   int min_;
    217   int max_;
    218   int num_buckets_;
    219   void* histogram_;
    220   bool lookup_done_;
    221   Isolate* isolate_;
    222 };
    223 
    224 // A HistogramTimer allows distributions of results to be created.
    225 class HistogramTimer : public Histogram {
    226  public:
    227   enum Resolution {
    228     MILLISECOND,
    229     MICROSECOND
    230   };
    231 
    232   HistogramTimer() {}
    233   HistogramTimer(const char* name, int min, int max, Resolution resolution,
    234                  int num_buckets, Isolate* isolate)
    235       : Histogram(name, min, max, num_buckets, isolate),
    236         resolution_(resolution) {}
    237 
    238   // Start the timer.
    239   void Start();
    240 
    241   // Stop the timer and record the results.
    242   void Stop();
    243 
    244   // Returns true if the timer is running.
    245   bool Running() {
    246     return Enabled() && timer_.IsStarted();
    247   }
    248 
    249   // TODO(bmeurer): Remove this when HistogramTimerScope is fixed.
    250 #ifdef DEBUG
    251   base::ElapsedTimer* timer() { return &timer_; }
    252 #endif
    253 
    254  private:
    255   base::ElapsedTimer timer_;
    256   Resolution resolution_;
    257 };
    258 
    259 // Helper class for scoping a HistogramTimer.
    260 // TODO(bmeurer): The ifdeffery is an ugly hack around the fact that the
    261 // Parser is currently reentrant (when it throws an error, we call back
    262 // into JavaScript and all bets are off), but ElapsedTimer is not
    263 // reentry-safe. Fix this properly and remove |allow_nesting|.
    264 class HistogramTimerScope BASE_EMBEDDED {
    265  public:
    266   explicit HistogramTimerScope(HistogramTimer* timer,
    267                                bool allow_nesting = false)
    268 #ifdef DEBUG
    269       : timer_(timer),
    270         skipped_timer_start_(false) {
    271     if (timer_->timer()->IsStarted() && allow_nesting) {
    272       skipped_timer_start_ = true;
    273     } else {
    274       timer_->Start();
    275     }
    276   }
    277 #else
    278       : timer_(timer) {
    279     timer_->Start();
    280   }
    281 #endif
    282   ~HistogramTimerScope() {
    283 #ifdef DEBUG
    284     if (!skipped_timer_start_) {
    285       timer_->Stop();
    286     }
    287 #else
    288     timer_->Stop();
    289 #endif
    290   }
    291 
    292  private:
    293   HistogramTimer* timer_;
    294 #ifdef DEBUG
    295   bool skipped_timer_start_;
    296 #endif
    297 };
    298 
    299 
    300 // A histogram timer that can aggregate events within a larger scope.
    301 //
    302 // Intended use of this timer is to have an outer (aggregating) and an inner
    303 // (to be aggregated) scope, where the inner scope measure the time of events,
    304 // and all those inner scope measurements will be summed up by the outer scope.
    305 // An example use might be to aggregate the time spent in lazy compilation
    306 // while running a script.
    307 //
    308 // Helpers:
    309 // - AggregatingHistogramTimerScope, the "outer" scope within which
    310 //     times will be summed up.
    311 // - AggregatedHistogramTimerScope, the "inner" scope which defines the
    312 //     events to be timed.
    313 class AggregatableHistogramTimer : public Histogram {
    314  public:
    315   AggregatableHistogramTimer() {}
    316   AggregatableHistogramTimer(const char* name, int min, int max,
    317                              int num_buckets, Isolate* isolate)
    318       : Histogram(name, min, max, num_buckets, isolate) {}
    319 
    320   // Start/stop the "outer" scope.
    321   void Start() { time_ = base::TimeDelta(); }
    322   void Stop() { AddSample(static_cast<int>(time_.InMicroseconds())); }
    323 
    324   // Add a time value ("inner" scope).
    325   void Add(base::TimeDelta other) { time_ += other; }
    326 
    327  private:
    328   base::TimeDelta time_;
    329 };
    330 
    331 
    332 // A helper class for use with AggregatableHistogramTimer.
    333 class AggregatingHistogramTimerScope {
    334  public:
    335   explicit AggregatingHistogramTimerScope(AggregatableHistogramTimer* histogram)
    336       : histogram_(histogram) {
    337     histogram_->Start();
    338   }
    339   ~AggregatingHistogramTimerScope() { histogram_->Stop(); }
    340 
    341  private:
    342   AggregatableHistogramTimer* histogram_;
    343 };
    344 
    345 
    346 // A helper class for use with AggregatableHistogramTimer.
    347 class AggregatedHistogramTimerScope {
    348  public:
    349   explicit AggregatedHistogramTimerScope(AggregatableHistogramTimer* histogram)
    350       : histogram_(histogram) {
    351     timer_.Start();
    352   }
    353   ~AggregatedHistogramTimerScope() { histogram_->Add(timer_.Elapsed()); }
    354 
    355  private:
    356   base::ElapsedTimer timer_;
    357   AggregatableHistogramTimer* histogram_;
    358 };
    359 
    360 
    361 // AggretatedMemoryHistogram collects (time, value) sample pairs and turns
    362 // them into time-uniform samples for the backing historgram, such that the
    363 // backing histogram receives one sample every T ms, where the T is controlled
    364 // by the FLAG_histogram_interval.
    365 //
    366 // More formally: let F be a real-valued function that maps time to sample
    367 // values. We define F as a linear interpolation between adjacent samples. For
    368 // each time interval [x; x + T) the backing histogram gets one sample value
    369 // that is the average of F(t) in the interval.
    370 template <typename Histogram>
    371 class AggregatedMemoryHistogram {
    372  public:
    373   AggregatedMemoryHistogram()
    374       : is_initialized_(false),
    375         start_ms_(0.0),
    376         last_ms_(0.0),
    377         aggregate_value_(0.0),
    378         last_value_(0.0),
    379         backing_histogram_(NULL) {}
    380 
    381   explicit AggregatedMemoryHistogram(Histogram* backing_histogram)
    382       : AggregatedMemoryHistogram() {
    383     backing_histogram_ = backing_histogram;
    384   }
    385 
    386   // Invariants that hold before and after AddSample if
    387   // is_initialized_ is true:
    388   //
    389   // 1) For we processed samples that came in before start_ms_ and sent the
    390   // corresponding aggregated samples to backing histogram.
    391   // 2) (last_ms_, last_value_) is the last received sample.
    392   // 3) last_ms_ < start_ms_ + FLAG_histogram_interval.
    393   // 4) aggregate_value_ is the average of the function that is constructed by
    394   // linearly interpolating samples received between start_ms_ and last_ms_.
    395   void AddSample(double current_ms, double current_value);
    396 
    397  private:
    398   double Aggregate(double current_ms, double current_value);
    399   bool is_initialized_;
    400   double start_ms_;
    401   double last_ms_;
    402   double aggregate_value_;
    403   double last_value_;
    404   Histogram* backing_histogram_;
    405 };
    406 
    407 
    408 template <typename Histogram>
    409 void AggregatedMemoryHistogram<Histogram>::AddSample(double current_ms,
    410                                                      double current_value) {
    411   if (!is_initialized_) {
    412     aggregate_value_ = current_value;
    413     start_ms_ = current_ms;
    414     last_value_ = current_value;
    415     last_ms_ = current_ms;
    416     is_initialized_ = true;
    417   } else {
    418     const double kEpsilon = 1e-6;
    419     const int kMaxSamples = 1000;
    420     if (current_ms < last_ms_ + kEpsilon) {
    421       // Two samples have the same time, remember the last one.
    422       last_value_ = current_value;
    423     } else {
    424       double sample_interval_ms = FLAG_histogram_interval;
    425       double end_ms = start_ms_ + sample_interval_ms;
    426       if (end_ms <= current_ms + kEpsilon) {
    427         // Linearly interpolate between the last_ms_ and the current_ms.
    428         double slope = (current_value - last_value_) / (current_ms - last_ms_);
    429         int i;
    430         // Send aggregated samples to the backing histogram from the start_ms
    431         // to the current_ms.
    432         for (i = 0; i < kMaxSamples && end_ms <= current_ms + kEpsilon; i++) {
    433           double end_value = last_value_ + (end_ms - last_ms_) * slope;
    434           double sample_value;
    435           if (i == 0) {
    436             // Take aggregate_value_ into account.
    437             sample_value = Aggregate(end_ms, end_value);
    438           } else {
    439             // There is no aggregate_value_ for i > 0.
    440             sample_value = (last_value_ + end_value) / 2;
    441           }
    442           backing_histogram_->AddSample(static_cast<int>(sample_value + 0.5));
    443           last_value_ = end_value;
    444           last_ms_ = end_ms;
    445           end_ms += sample_interval_ms;
    446         }
    447         if (i == kMaxSamples) {
    448           // We hit the sample limit, ignore the remaining samples.
    449           aggregate_value_ = current_value;
    450           start_ms_ = current_ms;
    451         } else {
    452           aggregate_value_ = last_value_;
    453           start_ms_ = last_ms_;
    454         }
    455       }
    456       aggregate_value_ = current_ms > start_ms_ + kEpsilon
    457                              ? Aggregate(current_ms, current_value)
    458                              : aggregate_value_;
    459       last_value_ = current_value;
    460       last_ms_ = current_ms;
    461     }
    462   }
    463 }
    464 
    465 
    466 template <typename Histogram>
    467 double AggregatedMemoryHistogram<Histogram>::Aggregate(double current_ms,
    468                                                        double current_value) {
    469   double interval_ms = current_ms - start_ms_;
    470   double value = (current_value + last_value_) / 2;
    471   // The aggregate_value_ is the average for [start_ms_; last_ms_].
    472   // The value is the average for [last_ms_; current_ms].
    473   // Return the weighted average of the aggregate_value_ and the value.
    474   return aggregate_value_ * ((last_ms_ - start_ms_) / interval_ms) +
    475          value * ((current_ms - last_ms_) / interval_ms);
    476 }
    477 
    478 
    479 #define HISTOGRAM_RANGE_LIST(HR)                                              \
    480   /* Generic range histograms */                                              \
    481   HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21)        \
    482   HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101)   \
    483   HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \
    484   HR(gc_idle_time_limit_undershot, V8.GCIdleTimeLimit.Undershot, 0, 10000,    \
    485      101)                                                                     \
    486   HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6)             \
    487   HR(errors_thrown_per_context, V8.ErrorsThrownPerContext, 0, 200, 20)        \
    488   HR(debug_feature_usage, V8.DebugFeatureUsage, 1, 7, 7)
    489 
    490 #define HISTOGRAM_TIMER_LIST(HT)                                              \
    491   /* Garbage collection timers. */                                            \
    492   HT(gc_compactor, V8.GCCompactor, 10000, MILLISECOND)                        \
    493   HT(gc_finalize, V8.GCFinalizeMC, 10000, MILLISECOND)                        \
    494   HT(gc_finalize_reduce_memory, V8.GCFinalizeMCReduceMemory, 10000,           \
    495      MILLISECOND)                                                             \
    496   HT(gc_scavenger, V8.GCScavenger, 10000, MILLISECOND)                        \
    497   HT(gc_context, V8.GCContext, 10000,                                         \
    498      MILLISECOND) /* GC context cleanup time */                               \
    499   HT(gc_idle_notification, V8.GCIdleNotification, 10000, MILLISECOND)         \
    500   HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND)     \
    501   HT(gc_incremental_marking_start, V8.GCIncrementalMarkingStart, 10000,       \
    502      MILLISECOND)                                                             \
    503   HT(gc_incremental_marking_finalize, V8.GCIncrementalMarkingFinalize, 10000, \
    504      MILLISECOND)                                                             \
    505   HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000,           \
    506      MILLISECOND)                                                             \
    507   /* Parsing timers. */                                                       \
    508   HT(parse, V8.ParseMicroSeconds, 1000000, MICROSECOND)                       \
    509   HT(parse_lazy, V8.ParseLazyMicroSeconds, 1000000, MICROSECOND)              \
    510   HT(pre_parse, V8.PreParseMicroSeconds, 1000000, MICROSECOND)                \
    511   /* Compilation times. */                                                    \
    512   HT(compile, V8.CompileMicroSeconds, 1000000, MICROSECOND)                   \
    513   HT(compile_eval, V8.CompileEvalMicroSeconds, 1000000, MICROSECOND)          \
    514   /* Serialization as part of compilation (code caching) */                   \
    515   HT(compile_serialize, V8.CompileSerializeMicroSeconds, 100000, MICROSECOND) \
    516   HT(compile_deserialize, V8.CompileDeserializeMicroSeconds, 1000000,         \
    517      MICROSECOND)                                                             \
    518   /* Total compilation time incl. caching/parsing */                          \
    519   HT(compile_script, V8.CompileScriptMicroSeconds, 1000000, MICROSECOND)
    520 
    521 
    522 #define AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) \
    523   AHT(compile_lazy, V8.CompileLazyMicroSeconds)
    524 
    525 
    526 #define HISTOGRAM_PERCENTAGE_LIST(HP)                                          \
    527   /* Heap fragmentation. */                                                    \
    528   HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal)        \
    529   HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \
    530   HP(external_fragmentation_code_space,                                        \
    531      V8.MemoryExternalFragmentationCodeSpace)                                  \
    532   HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
    533   HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace)   \
    534   /* Percentages of heap committed to each space. */                           \
    535   HP(heap_fraction_new_space, V8.MemoryHeapFractionNewSpace)                   \
    536   HP(heap_fraction_old_space, V8.MemoryHeapFractionOldSpace)                   \
    537   HP(heap_fraction_code_space, V8.MemoryHeapFractionCodeSpace)                 \
    538   HP(heap_fraction_map_space, V8.MemoryHeapFractionMapSpace)                   \
    539   HP(heap_fraction_lo_space, V8.MemoryHeapFractionLoSpace)                     \
    540   /* Percentage of crankshafted codegen. */                                    \
    541   HP(codegen_fraction_crankshaft, V8.CodegenFractionCrankshaft)
    542 
    543 
    544 #define HISTOGRAM_LEGACY_MEMORY_LIST(HM)                                      \
    545   HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted)          \
    546   HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed)                    \
    547   HM(heap_sample_map_space_committed, V8.MemoryHeapSampleMapSpaceCommitted)   \
    548   HM(heap_sample_code_space_committed, V8.MemoryHeapSampleCodeSpaceCommitted) \
    549   HM(heap_sample_maximum_committed, V8.MemoryHeapSampleMaximumCommitted)
    550 
    551 #define HISTOGRAM_MEMORY_LIST(HM)                   \
    552   HM(memory_heap_committed, V8.MemoryHeapCommitted) \
    553   HM(memory_heap_used, V8.MemoryHeapUsed)
    554 
    555 
    556 // WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
    557 // Intellisense to crash.  It was broken into two macros (each of length 40
    558 // lines) rather than one macro (of length about 80 lines) to work around
    559 // this problem.  Please avoid using recursive macros of this length when
    560 // possible.
    561 #define STATS_COUNTER_LIST_1(SC)                                      \
    562   /* Global Handle Count*/                                            \
    563   SC(global_handles, V8.GlobalHandles)                                \
    564   /* OS Memory allocated */                                           \
    565   SC(memory_allocated, V8.OsMemoryAllocated)                          \
    566   SC(normalized_maps, V8.NormalizedMaps)                              \
    567   SC(props_to_dictionary, V8.ObjectPropertiesToDictionary)            \
    568   SC(elements_to_dictionary, V8.ObjectElementsToDictionary)           \
    569   SC(alive_after_last_gc, V8.AliveAfterLastGC)                        \
    570   SC(objs_since_last_young, V8.ObjsSinceLastYoung)                    \
    571   SC(objs_since_last_full, V8.ObjsSinceLastFull)                      \
    572   SC(string_table_capacity, V8.StringTableCapacity)                   \
    573   SC(number_of_symbols, V8.NumberOfSymbols)                           \
    574   SC(script_wrappers, V8.ScriptWrappers)                              \
    575   SC(call_initialize_stubs, V8.CallInitializeStubs)                   \
    576   SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs)           \
    577   SC(call_normal_stubs, V8.CallNormalStubs)                           \
    578   SC(call_megamorphic_stubs, V8.CallMegamorphicStubs)                 \
    579   SC(inlined_copied_elements, V8.InlinedCopiedElements)              \
    580   SC(arguments_adaptors, V8.ArgumentsAdaptors)                        \
    581   SC(compilation_cache_hits, V8.CompilationCacheHits)                 \
    582   SC(compilation_cache_misses, V8.CompilationCacheMisses)             \
    583   /* Amount of evaled source code. */                                 \
    584   SC(total_eval_size, V8.TotalEvalSize)                               \
    585   /* Amount of loaded source code. */                                 \
    586   SC(total_load_size, V8.TotalLoadSize)                               \
    587   /* Amount of parsed source code. */                                 \
    588   SC(total_parse_size, V8.TotalParseSize)                             \
    589   /* Amount of source code skipped over using preparsing. */          \
    590   SC(total_preparse_skipped, V8.TotalPreparseSkipped)                 \
    591   /* Number of symbol lookups skipped using preparsing */             \
    592   SC(total_preparse_symbols_skipped, V8.TotalPreparseSymbolSkipped)   \
    593   /* Amount of compiled source code. */                               \
    594   SC(total_compile_size, V8.TotalCompileSize)                         \
    595   /* Amount of source code compiled with the full codegen. */         \
    596   SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize)   \
    597   /* Number of contexts created from scratch. */                      \
    598   SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch)    \
    599   /* Number of contexts created by partial snapshot. */               \
    600   SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot)      \
    601   /* Number of code objects found from pc. */                         \
    602   SC(pc_to_code, V8.PcToCode)                                         \
    603   SC(pc_to_code_cached, V8.PcToCodeCached)                            \
    604   /* The store-buffer implementation of the write barrier. */         \
    605   SC(store_buffer_compactions, V8.StoreBufferCompactions)             \
    606   SC(store_buffer_overflows, V8.StoreBufferOverflows)
    607 
    608 
    609 #define STATS_COUNTER_LIST_2(SC)                                               \
    610   /* Number of code stubs. */                                                  \
    611   SC(code_stubs, V8.CodeStubs)                                                 \
    612   /* Amount of stub code. */                                                   \
    613   SC(total_stubs_code_size, V8.TotalStubsCodeSize)                             \
    614   /* Amount of (JS) compiled code. */                                          \
    615   SC(total_compiled_code_size, V8.TotalCompiledCodeSize)                       \
    616   SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest)            \
    617   SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \
    618   SC(gc_compactor_caused_by_oldspace_exhaustion,                               \
    619      V8.GCCompactorCausedByOldspaceExhaustion)                                 \
    620   SC(gc_last_resort_from_js, V8.GCLastResortFromJS)                            \
    621   SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles)                  \
    622   /* How is the generic keyed-load stub used? */                               \
    623   SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi)                           \
    624   SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol)                     \
    625   SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache)          \
    626   SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow)                         \
    627   SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs)               \
    628   SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow)            \
    629   /* How is the generic keyed-call stub used? */                               \
    630   SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast)                  \
    631   SC(keyed_call_generic_smi_dict, V8.KeyedCallGenericSmiDict)                  \
    632   SC(keyed_call_generic_lookup_cache, V8.KeyedCallGenericLookupCache)          \
    633   SC(keyed_call_generic_lookup_dict, V8.KeyedCallGenericLookupDict)            \
    634   SC(keyed_call_generic_slow, V8.KeyedCallGenericSlow)                         \
    635   SC(keyed_call_generic_slow_load, V8.KeyedCallGenericSlowLoad)                \
    636   SC(named_load_global_stub, V8.NamedLoadGlobalStub)                           \
    637   SC(named_store_global_inline, V8.NamedStoreGlobalInline)                     \
    638   SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss)            \
    639   SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs)             \
    640   SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow)          \
    641   SC(store_normal_miss, V8.StoreNormalMiss)                                    \
    642   SC(store_normal_hit, V8.StoreNormalHit)                                      \
    643   SC(cow_arrays_created_stub, V8.COWArraysCreatedStub)                         \
    644   SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime)                   \
    645   SC(cow_arrays_converted, V8.COWArraysConverted)                              \
    646   SC(call_miss, V8.CallMiss)                                                   \
    647   SC(keyed_call_miss, V8.KeyedCallMiss)                                        \
    648   SC(load_miss, V8.LoadMiss)                                                   \
    649   SC(keyed_load_miss, V8.KeyedLoadMiss)                                        \
    650   SC(call_const, V8.CallConst)                                                 \
    651   SC(call_const_fast_api, V8.CallConstFastApi)                                 \
    652   SC(call_const_interceptor, V8.CallConstInterceptor)                          \
    653   SC(call_const_interceptor_fast_api, V8.CallConstInterceptorFastApi)          \
    654   SC(call_global_inline, V8.CallGlobalInline)                                  \
    655   SC(call_global_inline_miss, V8.CallGlobalInlineMiss)                         \
    656   SC(constructed_objects, V8.ConstructedObjects)                               \
    657   SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime)                \
    658   SC(negative_lookups, V8.NegativeLookups)                                     \
    659   SC(negative_lookups_miss, V8.NegativeLookupsMiss)                            \
    660   SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes)             \
    661   SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses)             \
    662   SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates)           \
    663   SC(array_function_runtime, V8.ArrayFunctionRuntime)                          \
    664   SC(array_function_native, V8.ArrayFunctionNative)                            \
    665   SC(enum_cache_hits, V8.EnumCacheHits)                                        \
    666   SC(enum_cache_misses, V8.EnumCacheMisses)                                    \
    667   SC(fast_new_closure_total, V8.FastNewClosureTotal)                           \
    668   SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized)            \
    669   SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized)    \
    670   SC(string_add_runtime, V8.StringAddRuntime)                                  \
    671   SC(string_add_native, V8.StringAddNative)                                    \
    672   SC(string_add_runtime_ext_to_one_byte, V8.StringAddRuntimeExtToOneByte)      \
    673   SC(sub_string_runtime, V8.SubStringRuntime)                                  \
    674   SC(sub_string_native, V8.SubStringNative)                                    \
    675   SC(string_add_make_two_char, V8.StringAddMakeTwoChar)                        \
    676   SC(string_compare_native, V8.StringCompareNative)                            \
    677   SC(string_compare_runtime, V8.StringCompareRuntime)                          \
    678   SC(regexp_entry_runtime, V8.RegExpEntryRuntime)                              \
    679   SC(regexp_entry_native, V8.RegExpEntryNative)                                \
    680   SC(number_to_string_native, V8.NumberToStringNative)                         \
    681   SC(number_to_string_runtime, V8.NumberToStringRuntime)                       \
    682   SC(math_acos, V8.MathAcos)                                                   \
    683   SC(math_asin, V8.MathAsin)                                                   \
    684   SC(math_atan, V8.MathAtan)                                                   \
    685   SC(math_atan2, V8.MathAtan2)                                                 \
    686   SC(math_clz32, V8.MathClz32)                                                 \
    687   SC(math_exp, V8.MathExp)                                                     \
    688   SC(math_floor, V8.MathFloor)                                                 \
    689   SC(math_log, V8.MathLog)                                                     \
    690   SC(math_pow, V8.MathPow)                                                     \
    691   SC(math_round, V8.MathRound)                                                 \
    692   SC(math_sqrt, V8.MathSqrt)                                                   \
    693   SC(stack_interrupts, V8.StackInterrupts)                                     \
    694   SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks)                          \
    695   SC(bounds_checks_eliminated, V8.BoundsChecksEliminated)                      \
    696   SC(bounds_checks_hoisted, V8.BoundsChecksHoisted)                            \
    697   SC(soft_deopts_requested, V8.SoftDeoptsRequested)                            \
    698   SC(soft_deopts_inserted, V8.SoftDeoptsInserted)                              \
    699   SC(soft_deopts_executed, V8.SoftDeoptsExecuted)                              \
    700   /* Number of write barriers in generated code. */                            \
    701   SC(write_barriers_dynamic, V8.WriteBarriersDynamic)                          \
    702   SC(write_barriers_static, V8.WriteBarriersStatic)                            \
    703   SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable)               \
    704   SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted)               \
    705   SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed)                         \
    706   SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable)               \
    707   SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted)               \
    708   SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed)                         \
    709   SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable)             \
    710   SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted)             \
    711   SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed)                       \
    712   SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable)               \
    713   SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted)               \
    714   SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed)                         \
    715   SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable)                 \
    716   SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted)                 \
    717   SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed)                           \
    718   SC(turbo_escape_allocs_replaced, V8.TurboEscapeAllocsReplaced)               \
    719   SC(crankshaft_escape_allocs_replaced, V8.CrankshaftEscapeAllocsReplaced)     \
    720   SC(turbo_escape_loads_replaced, V8.TurboEscapeLoadsReplaced)                 \
    721   SC(crankshaft_escape_loads_replaced, V8.CrankshaftEscapeLoadsReplaced)
    722 
    723 
    724 // This file contains all the v8 counters that are in use.
    725 class Counters {
    726  public:
    727 #define HR(name, caption, min, max, num_buckets) \
    728   Histogram* name() { return &name##_; }
    729   HISTOGRAM_RANGE_LIST(HR)
    730 #undef HR
    731 
    732 #define HT(name, caption, max, res) \
    733   HistogramTimer* name() { return &name##_; }
    734   HISTOGRAM_TIMER_LIST(HT)
    735 #undef HT
    736 
    737 #define AHT(name, caption) \
    738   AggregatableHistogramTimer* name() { return &name##_; }
    739   AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
    740 #undef AHT
    741 
    742 #define HP(name, caption) \
    743   Histogram* name() { return &name##_; }
    744   HISTOGRAM_PERCENTAGE_LIST(HP)
    745 #undef HP
    746 
    747 #define HM(name, caption) \
    748   Histogram* name() { return &name##_; }
    749   HISTOGRAM_LEGACY_MEMORY_LIST(HM)
    750   HISTOGRAM_MEMORY_LIST(HM)
    751 #undef HM
    752 
    753 #define HM(name, caption)                                     \
    754   AggregatedMemoryHistogram<Histogram>* aggregated_##name() { \
    755     return &aggregated_##name##_;                             \
    756   }
    757   HISTOGRAM_MEMORY_LIST(HM)
    758 #undef HM
    759 
    760 #define SC(name, caption) \
    761   StatsCounter* name() { return &name##_; }
    762   STATS_COUNTER_LIST_1(SC)
    763   STATS_COUNTER_LIST_2(SC)
    764 #undef SC
    765 
    766 #define SC(name) \
    767   StatsCounter* count_of_##name() { return &count_of_##name##_; } \
    768   StatsCounter* size_of_##name() { return &size_of_##name##_; }
    769   INSTANCE_TYPE_LIST(SC)
    770 #undef SC
    771 
    772 #define SC(name) \
    773   StatsCounter* count_of_CODE_TYPE_##name() \
    774     { return &count_of_CODE_TYPE_##name##_; } \
    775   StatsCounter* size_of_CODE_TYPE_##name() \
    776     { return &size_of_CODE_TYPE_##name##_; }
    777   CODE_KIND_LIST(SC)
    778 #undef SC
    779 
    780 #define SC(name) \
    781   StatsCounter* count_of_FIXED_ARRAY_##name() \
    782     { return &count_of_FIXED_ARRAY_##name##_; } \
    783   StatsCounter* size_of_FIXED_ARRAY_##name() \
    784     { return &size_of_FIXED_ARRAY_##name##_; }
    785   FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
    786 #undef SC
    787 
    788 #define SC(name) \
    789   StatsCounter* count_of_CODE_AGE_##name() \
    790     { return &count_of_CODE_AGE_##name##_; } \
    791   StatsCounter* size_of_CODE_AGE_##name() \
    792     { return &size_of_CODE_AGE_##name##_; }
    793   CODE_AGE_LIST_COMPLETE(SC)
    794 #undef SC
    795 
    796   enum Id {
    797 #define RATE_ID(name, caption, max, res) k_##name,
    798     HISTOGRAM_TIMER_LIST(RATE_ID)
    799 #undef RATE_ID
    800 #define AGGREGATABLE_ID(name, caption) k_##name,
    801     AGGREGATABLE_HISTOGRAM_TIMER_LIST(AGGREGATABLE_ID)
    802 #undef AGGREGATABLE_ID
    803 #define PERCENTAGE_ID(name, caption) k_##name,
    804     HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID)
    805 #undef PERCENTAGE_ID
    806 #define MEMORY_ID(name, caption) k_##name,
    807     HISTOGRAM_LEGACY_MEMORY_LIST(MEMORY_ID)
    808     HISTOGRAM_MEMORY_LIST(MEMORY_ID)
    809 #undef MEMORY_ID
    810 #define COUNTER_ID(name, caption) k_##name,
    811     STATS_COUNTER_LIST_1(COUNTER_ID)
    812     STATS_COUNTER_LIST_2(COUNTER_ID)
    813 #undef COUNTER_ID
    814 #define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
    815     INSTANCE_TYPE_LIST(COUNTER_ID)
    816 #undef COUNTER_ID
    817 #define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
    818     kSizeOfCODE_TYPE_##name,
    819     CODE_KIND_LIST(COUNTER_ID)
    820 #undef COUNTER_ID
    821 #define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
    822     kSizeOfFIXED_ARRAY__##name,
    823     FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID)
    824 #undef COUNTER_ID
    825 #define COUNTER_ID(name) kCountOfCODE_AGE__##name, \
    826     kSizeOfCODE_AGE__##name,
    827     CODE_AGE_LIST_COMPLETE(COUNTER_ID)
    828 #undef COUNTER_ID
    829     stats_counter_count
    830   };
    831 
    832   void ResetCounters();
    833   void ResetHistograms();
    834 
    835  private:
    836 #define HR(name, caption, min, max, num_buckets) Histogram name##_;
    837   HISTOGRAM_RANGE_LIST(HR)
    838 #undef HR
    839 
    840 #define HT(name, caption, max, res) HistogramTimer name##_;
    841   HISTOGRAM_TIMER_LIST(HT)
    842 #undef HT
    843 
    844 #define AHT(name, caption) \
    845   AggregatableHistogramTimer name##_;
    846   AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
    847 #undef AHT
    848 
    849 #define HP(name, caption) \
    850   Histogram name##_;
    851   HISTOGRAM_PERCENTAGE_LIST(HP)
    852 #undef HP
    853 
    854 #define HM(name, caption) \
    855   Histogram name##_;
    856   HISTOGRAM_LEGACY_MEMORY_LIST(HM)
    857   HISTOGRAM_MEMORY_LIST(HM)
    858 #undef HM
    859 
    860 #define HM(name, caption) \
    861   AggregatedMemoryHistogram<Histogram> aggregated_##name##_;
    862   HISTOGRAM_MEMORY_LIST(HM)
    863 #undef HM
    864 
    865 #define SC(name, caption) \
    866   StatsCounter name##_;
    867   STATS_COUNTER_LIST_1(SC)
    868   STATS_COUNTER_LIST_2(SC)
    869 #undef SC
    870 
    871 #define SC(name) \
    872   StatsCounter size_of_##name##_; \
    873   StatsCounter count_of_##name##_;
    874   INSTANCE_TYPE_LIST(SC)
    875 #undef SC
    876 
    877 #define SC(name) \
    878   StatsCounter size_of_CODE_TYPE_##name##_; \
    879   StatsCounter count_of_CODE_TYPE_##name##_;
    880   CODE_KIND_LIST(SC)
    881 #undef SC
    882 
    883 #define SC(name) \
    884   StatsCounter size_of_FIXED_ARRAY_##name##_; \
    885   StatsCounter count_of_FIXED_ARRAY_##name##_;
    886   FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
    887 #undef SC
    888 
    889 #define SC(name) \
    890   StatsCounter size_of_CODE_AGE_##name##_; \
    891   StatsCounter count_of_CODE_AGE_##name##_;
    892   CODE_AGE_LIST_COMPLETE(SC)
    893 #undef SC
    894 
    895   friend class Isolate;
    896 
    897   explicit Counters(Isolate* isolate);
    898 
    899   DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
    900 };
    901 
    902 }  // namespace internal
    903 }  // namespace v8
    904 
    905 #endif  // V8_COUNTERS_H_
    906