Home | History | Annotate | Download | only in heap
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/heap/gc-tracer.h"
      6 
      7 #include <cstdarg>
      8 
      9 #include "src/base/atomic-utils.h"
     10 #include "src/counters.h"
     11 #include "src/heap/heap-inl.h"
     12 #include "src/isolate.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 static size_t CountTotalHolesSize(Heap* heap) {
     18   size_t holes_size = 0;
     19   PagedSpaces spaces(heap);
     20   for (PagedSpace* space = spaces.next(); space != nullptr;
     21        space = spaces.next()) {
     22     DCHECK_GE(holes_size + space->Waste() + space->Available(), holes_size);
     23     holes_size += space->Waste() + space->Available();
     24   }
     25   return holes_size;
     26 }
     27 
     28 RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
     29   STATIC_ASSERT(Scope::FIRST_SCOPE == Scope::MC_INCREMENTAL);
     30   return static_cast<RuntimeCallCounterId>(
     31       static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
     32       static_cast<int>(id));
     33 }
     34 
     35 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
     36     : tracer_(tracer), scope_(scope) {
     37   start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
     38   // TODO(cbruni): remove once we fully moved to a trace-based system.
     39   if (V8_LIKELY(!FLAG_runtime_stats)) return;
     40   runtime_stats_ = tracer_->heap_->isolate()->counters()->runtime_call_stats();
     41   runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
     42 }
     43 
     44 GCTracer::Scope::~Scope() {
     45   tracer_->AddScopeSample(
     46       scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_);
     47   // TODO(cbruni): remove once we fully moved to a trace-based system.
     48   if (V8_LIKELY(runtime_stats_ == nullptr)) return;
     49   runtime_stats_->Leave(&timer_);
     50 }
     51 
     52 GCTracer::BackgroundScope::BackgroundScope(GCTracer* tracer, ScopeId scope)
     53     : tracer_(tracer), scope_(scope), runtime_stats_enabled_(false) {
     54   start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
     55   // TODO(cbruni): remove once we fully moved to a trace-based system.
     56   if (V8_LIKELY(!base::AsAtomic32::Relaxed_Load(&FLAG_runtime_stats))) return;
     57   timer_.Start(&counter_, nullptr);
     58   runtime_stats_enabled_ = true;
     59 }
     60 
     61 GCTracer::BackgroundScope::~BackgroundScope() {
     62   double duration_ms =
     63       tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_;
     64   // TODO(cbruni): remove once we fully moved to a trace-based system.
     65   if (V8_LIKELY(!runtime_stats_enabled_)) {
     66     tracer_->AddBackgroundScopeSample(scope_, duration_ms, nullptr);
     67   } else {
     68     timer_.Stop();
     69     tracer_->AddBackgroundScopeSample(scope_, duration_ms, &counter_);
     70   }
     71 }
     72 
     73 const char* GCTracer::Scope::Name(ScopeId id) {
     74 #define CASE(scope)  \
     75   case Scope::scope: \
     76     return "V8.GC_" #scope;
     77   switch (id) {
     78     TRACER_SCOPES(CASE)
     79     TRACER_BACKGROUND_SCOPES(CASE)
     80     case Scope::NUMBER_OF_SCOPES:
     81       break;
     82   }
     83 #undef CASE
     84   UNREACHABLE();
     85   return nullptr;
     86 }
     87 
     88 const char* GCTracer::BackgroundScope::Name(ScopeId id) {
     89 #define CASE(scope)            \
     90   case BackgroundScope::scope: \
     91     return "V8.GC_" #scope;
     92   switch (id) {
     93     TRACER_BACKGROUND_SCOPES(CASE)
     94     case BackgroundScope::NUMBER_OF_SCOPES:
     95       break;
     96   }
     97 #undef CASE
     98   UNREACHABLE();
     99   return nullptr;
    100 }
    101 
    102 GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
    103                        const char* collector_reason)
    104     : type(type),
    105       gc_reason(gc_reason),
    106       collector_reason(collector_reason),
    107       start_time(0.0),
    108       end_time(0.0),
    109       reduce_memory(false),
    110       start_object_size(0),
    111       end_object_size(0),
    112       start_memory_size(0),
    113       end_memory_size(0),
    114       start_holes_size(0),
    115       end_holes_size(0),
    116       new_space_object_size(0),
    117       survived_new_space_object_size(0),
    118       incremental_marking_bytes(0),
    119       incremental_marking_duration(0.0) {
    120   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
    121     scopes[i] = 0;
    122   }
    123 }
    124 
    125 const char* GCTracer::Event::TypeName(bool short_name) const {
    126   switch (type) {
    127     case SCAVENGER:
    128       return (short_name) ? "s" : "Scavenge";
    129     case MARK_COMPACTOR:
    130     case INCREMENTAL_MARK_COMPACTOR:
    131       return (short_name) ? "ms" : "Mark-sweep";
    132     case MINOR_MARK_COMPACTOR:
    133       return (short_name) ? "mmc" : "Minor Mark-Compact";
    134     case START:
    135       return (short_name) ? "st" : "Start";
    136   }
    137   return "Unknown Event Type";
    138 }
    139 
    140 GCTracer::GCTracer(Heap* heap)
    141     : heap_(heap),
    142       current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
    143       previous_(current_),
    144       incremental_marking_bytes_(0),
    145       incremental_marking_duration_(0.0),
    146       incremental_marking_start_time_(0.0),
    147       recorded_incremental_marking_speed_(0.0),
    148       allocation_time_ms_(0.0),
    149       new_space_allocation_counter_bytes_(0),
    150       old_generation_allocation_counter_bytes_(0),
    151       allocation_duration_since_gc_(0.0),
    152       new_space_allocation_in_bytes_since_gc_(0),
    153       old_generation_allocation_in_bytes_since_gc_(0),
    154       combined_mark_compact_speed_cache_(0.0),
    155       start_counter_(0),
    156       average_mutator_duration_(0),
    157       average_mark_compact_duration_(0),
    158       current_mark_compact_mutator_utilization_(1.0),
    159       previous_mark_compact_end_time_(0) {
    160   // All accesses to incremental_marking_scope assume that incremental marking
    161   // scopes come first.
    162   STATIC_ASSERT(0 == Scope::FIRST_INCREMENTAL_SCOPE);
    163   // We assume that MC_INCREMENTAL is the first scope so that we can properly
    164   // map it to RuntimeCallStats.
    165   STATIC_ASSERT(0 == Scope::MC_INCREMENTAL);
    166   current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
    167   for (int i = 0; i < BackgroundScope::NUMBER_OF_SCOPES; i++) {
    168     background_counter_[i].total_duration_ms = 0;
    169     background_counter_[i].runtime_call_counter = RuntimeCallCounter(nullptr);
    170   }
    171 }
    172 
    173 void GCTracer::ResetForTesting() {
    174   current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
    175   current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
    176   previous_ = current_;
    177   ResetIncrementalMarkingCounters();
    178   allocation_time_ms_ = 0.0;
    179   new_space_allocation_counter_bytes_ = 0.0;
    180   old_generation_allocation_counter_bytes_ = 0.0;
    181   allocation_duration_since_gc_ = 0.0;
    182   new_space_allocation_in_bytes_since_gc_ = 0.0;
    183   old_generation_allocation_in_bytes_since_gc_ = 0.0;
    184   combined_mark_compact_speed_cache_ = 0.0;
    185   recorded_minor_gcs_total_.Reset();
    186   recorded_minor_gcs_survived_.Reset();
    187   recorded_compactions_.Reset();
    188   recorded_mark_compacts_.Reset();
    189   recorded_incremental_mark_compacts_.Reset();
    190   recorded_new_generation_allocations_.Reset();
    191   recorded_old_generation_allocations_.Reset();
    192   recorded_context_disposal_times_.Reset();
    193   recorded_survival_ratios_.Reset();
    194   start_counter_ = 0;
    195   average_mutator_duration_ = 0;
    196   average_mark_compact_duration_ = 0;
    197   current_mark_compact_mutator_utilization_ = 1.0;
    198   previous_mark_compact_end_time_ = 0;
    199   base::LockGuard<base::Mutex> guard(&background_counter_mutex_);
    200   for (int i = 0; i < BackgroundScope::NUMBER_OF_SCOPES; i++) {
    201     background_counter_[i].total_duration_ms = 0;
    202     background_counter_[i].runtime_call_counter.Reset();
    203   }
    204 }
    205 
    206 void GCTracer::NotifyYoungGenerationHandling(
    207     YoungGenerationHandling young_generation_handling) {
    208   DCHECK(current_.type == Event::SCAVENGER || start_counter_ > 1);
    209   heap_->isolate()->counters()->young_generation_handling()->AddSample(
    210       static_cast<int>(young_generation_handling));
    211 }
    212 
    213 void GCTracer::Start(GarbageCollector collector,
    214                      GarbageCollectionReason gc_reason,
    215                      const char* collector_reason) {
    216   start_counter_++;
    217   if (start_counter_ != 1) return;
    218 
    219   previous_ = current_;
    220   double start_time = heap_->MonotonicallyIncreasingTimeInMs();
    221   SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
    222                    heap_->OldGenerationAllocationCounter());
    223 
    224   switch (collector) {
    225     case SCAVENGER:
    226       current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
    227       break;
    228     case MINOR_MARK_COMPACTOR:
    229       current_ =
    230           Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
    231       break;
    232     case MARK_COMPACTOR:
    233       if (heap_->incremental_marking()->WasActivated()) {
    234         current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
    235                          collector_reason);
    236       } else {
    237         current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
    238       }
    239       break;
    240   }
    241 
    242   current_.reduce_memory = heap_->ShouldReduceMemory();
    243   current_.start_time = start_time;
    244   current_.start_object_size = heap_->SizeOfObjects();
    245   current_.start_memory_size = heap_->memory_allocator()->Size();
    246   current_.start_holes_size = CountTotalHolesSize(heap_);
    247   current_.new_space_object_size = heap_->new_space()->Size();
    248 
    249   current_.incremental_marking_bytes = 0;
    250   current_.incremental_marking_duration = 0;
    251 
    252   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
    253     current_.scopes[i] = 0;
    254   }
    255 
    256   Counters* counters = heap_->isolate()->counters();
    257 
    258   if (Heap::IsYoungGenerationCollector(collector)) {
    259     counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
    260   } else {
    261     counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
    262   }
    263 }
    264 
    265 void GCTracer::ResetIncrementalMarkingCounters() {
    266   if (incremental_marking_duration_ > 0) {
    267     heap_->isolate()->counters()->incremental_marking_sum()->AddSample(
    268         static_cast<int>(incremental_marking_duration_));
    269   }
    270   incremental_marking_bytes_ = 0;
    271   incremental_marking_duration_ = 0;
    272   for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
    273     incremental_marking_scopes_[i].ResetCurrentCycle();
    274   }
    275 }
    276 
    277 void GCTracer::Stop(GarbageCollector collector) {
    278   start_counter_--;
    279   if (start_counter_ != 0) {
    280     if (FLAG_trace_gc_verbose) {
    281       heap_->isolate()->PrintWithTimestamp(
    282           "[Finished reentrant %s during %s.]\n",
    283           Heap::CollectorName(collector), current_.TypeName(false));
    284     }
    285     return;
    286   }
    287 
    288   DCHECK_LE(0, start_counter_);
    289   DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
    290          (collector == MINOR_MARK_COMPACTOR &&
    291           current_.type == Event::MINOR_MARK_COMPACTOR) ||
    292          (collector == MARK_COMPACTOR &&
    293           (current_.type == Event::MARK_COMPACTOR ||
    294            current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
    295 
    296   current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
    297   current_.end_object_size = heap_->SizeOfObjects();
    298   current_.end_memory_size = heap_->memory_allocator()->Size();
    299   current_.end_holes_size = CountTotalHolesSize(heap_);
    300   current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize();
    301 
    302   AddAllocation(current_.end_time);
    303 
    304   double duration = current_.end_time - current_.start_time;
    305 
    306   switch (current_.type) {
    307     case Event::SCAVENGER:
    308     case Event::MINOR_MARK_COMPACTOR:
    309       recorded_minor_gcs_total_.Push(
    310           MakeBytesAndDuration(current_.new_space_object_size, duration));
    311       recorded_minor_gcs_survived_.Push(MakeBytesAndDuration(
    312           current_.survived_new_space_object_size, duration));
    313       FetchBackgroundMinorGCCounters();
    314       break;
    315     case Event::INCREMENTAL_MARK_COMPACTOR:
    316       current_.incremental_marking_bytes = incremental_marking_bytes_;
    317       current_.incremental_marking_duration = incremental_marking_duration_;
    318       for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
    319         current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
    320         current_.scopes[i] = incremental_marking_scopes_[i].duration;
    321       }
    322 
    323       RecordMutatorUtilization(
    324           current_.end_time, duration + current_.incremental_marking_duration);
    325       RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
    326                                     current_.incremental_marking_duration);
    327       recorded_incremental_mark_compacts_.Push(
    328           MakeBytesAndDuration(current_.start_object_size, duration));
    329       ResetIncrementalMarkingCounters();
    330       combined_mark_compact_speed_cache_ = 0.0;
    331       FetchBackgroundMarkCompactCounters();
    332       break;
    333     case Event::MARK_COMPACTOR:
    334       DCHECK_EQ(0u, current_.incremental_marking_bytes);
    335       DCHECK_EQ(0, current_.incremental_marking_duration);
    336       RecordMutatorUtilization(
    337           current_.end_time, duration + current_.incremental_marking_duration);
    338       recorded_mark_compacts_.Push(
    339           MakeBytesAndDuration(current_.start_object_size, duration));
    340       ResetIncrementalMarkingCounters();
    341       combined_mark_compact_speed_cache_ = 0.0;
    342       FetchBackgroundMarkCompactCounters();
    343       break;
    344     case Event::START:
    345       UNREACHABLE();
    346   }
    347   FetchBackgroundGeneralCounters();
    348 
    349   heap_->UpdateTotalGCTime(duration);
    350 
    351   if ((current_.type == Event::SCAVENGER ||
    352        current_.type == Event::MINOR_MARK_COMPACTOR) &&
    353       FLAG_trace_gc_ignore_scavenger)
    354     return;
    355 
    356   if (FLAG_trace_gc_nvp) {
    357     PrintNVP();
    358   } else {
    359     Print();
    360   }
    361 
    362   if (FLAG_trace_gc) {
    363     heap_->PrintShortHeapStatistics();
    364   }
    365 }
    366 
    367 
    368 void GCTracer::SampleAllocation(double current_ms,
    369                                 size_t new_space_counter_bytes,
    370                                 size_t old_generation_counter_bytes) {
    371   if (allocation_time_ms_ == 0) {
    372     // It is the first sample.
    373     allocation_time_ms_ = current_ms;
    374     new_space_allocation_counter_bytes_ = new_space_counter_bytes;
    375     old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
    376     return;
    377   }
    378   // This assumes that counters are unsigned integers so that the subtraction
    379   // below works even if the new counter is less than the old counter.
    380   size_t new_space_allocated_bytes =
    381       new_space_counter_bytes - new_space_allocation_counter_bytes_;
    382   size_t old_generation_allocated_bytes =
    383       old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
    384   double duration = current_ms - allocation_time_ms_;
    385   allocation_time_ms_ = current_ms;
    386   new_space_allocation_counter_bytes_ = new_space_counter_bytes;
    387   old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
    388   allocation_duration_since_gc_ += duration;
    389   new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
    390   old_generation_allocation_in_bytes_since_gc_ +=
    391       old_generation_allocated_bytes;
    392 }
    393 
    394 
    395 void GCTracer::AddAllocation(double current_ms) {
    396   allocation_time_ms_ = current_ms;
    397   if (allocation_duration_since_gc_ > 0) {
    398     recorded_new_generation_allocations_.Push(
    399         MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
    400                              allocation_duration_since_gc_));
    401     recorded_old_generation_allocations_.Push(
    402         MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
    403                              allocation_duration_since_gc_));
    404   }
    405   allocation_duration_since_gc_ = 0;
    406   new_space_allocation_in_bytes_since_gc_ = 0;
    407   old_generation_allocation_in_bytes_since_gc_ = 0;
    408 }
    409 
    410 
    411 void GCTracer::AddContextDisposalTime(double time) {
    412   recorded_context_disposal_times_.Push(time);
    413 }
    414 
    415 void GCTracer::AddCompactionEvent(double duration,
    416                                   size_t live_bytes_compacted) {
    417   recorded_compactions_.Push(
    418       MakeBytesAndDuration(live_bytes_compacted, duration));
    419 }
    420 
    421 
    422 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
    423   recorded_survival_ratios_.Push(promotion_ratio);
    424 }
    425 
    426 void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) {
    427   if (bytes > 0) {
    428     incremental_marking_bytes_ += bytes;
    429     incremental_marking_duration_ += duration;
    430   }
    431 }
    432 
    433 void GCTracer::Output(const char* format, ...) const {
    434   if (FLAG_trace_gc) {
    435     va_list arguments;
    436     va_start(arguments, format);
    437     base::OS::VPrint(format, arguments);
    438     va_end(arguments);
    439   }
    440 
    441   const int kBufferSize = 256;
    442   char raw_buffer[kBufferSize];
    443   Vector<char> buffer(raw_buffer, kBufferSize);
    444   va_list arguments2;
    445   va_start(arguments2, format);
    446   VSNPrintF(buffer, format, arguments2);
    447   va_end(arguments2);
    448 
    449   heap_->AddToRingBuffer(buffer.start());
    450 }
    451 
    452 void GCTracer::Print() const {
    453   double duration = current_.end_time - current_.start_time;
    454   const size_t kIncrementalStatsSize = 128;
    455   char incremental_buffer[kIncrementalStatsSize] = {0};
    456 
    457   if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
    458     base::OS::SNPrintF(
    459         incremental_buffer, kIncrementalStatsSize,
    460         " (+ %.1f ms in %d steps since start of marking, "
    461         "biggest step %.1f ms, walltime since start of marking %.f ms)",
    462         current_.scopes[Scope::MC_INCREMENTAL],
    463         current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
    464         current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].longest_step,
    465         current_.end_time - incremental_marking_start_time_);
    466   }
    467 
    468   // Avoid PrintF as Output also appends the string to the tracing ring buffer
    469   // that gets printed on OOM failures.
    470   Output(
    471       "[%d:%p] "
    472       "%8.0f ms: "
    473       "%s %.1f (%.1f) -> %.1f (%.1f) MB, "
    474       "%.1f / %.1f ms %s (average mu = %.3f, current mu = %.3f) %s %s\n",
    475       base::OS::GetCurrentProcessId(),
    476       reinterpret_cast<void*>(heap_->isolate()),
    477       heap_->isolate()->time_millis_since_init(), current_.TypeName(false),
    478       static_cast<double>(current_.start_object_size) / MB,
    479       static_cast<double>(current_.start_memory_size) / MB,
    480       static_cast<double>(current_.end_object_size) / MB,
    481       static_cast<double>(current_.end_memory_size) / MB, duration,
    482       TotalExternalTime(), incremental_buffer,
    483       AverageMarkCompactMutatorUtilization(),
    484       CurrentMarkCompactMutatorUtilization(),
    485       Heap::GarbageCollectionReasonToString(current_.gc_reason),
    486       current_.collector_reason != nullptr ? current_.collector_reason : "");
    487 }
    488 
    489 
    490 void GCTracer::PrintNVP() const {
    491   double duration = current_.end_time - current_.start_time;
    492   double spent_in_mutator = current_.start_time - previous_.end_time;
    493   size_t allocated_since_last_gc =
    494       current_.start_object_size - previous_.end_object_size;
    495 
    496   double incremental_walltime_duration = 0;
    497 
    498   if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
    499     incremental_walltime_duration =
    500         current_.end_time - incremental_marking_start_time_;
    501   }
    502 
    503   switch (current_.type) {
    504     case Event::SCAVENGER:
    505       heap_->isolate()->PrintWithTimestamp(
    506           "pause=%.1f "
    507           "mutator=%.1f "
    508           "gc=%s "
    509           "reduce_memory=%d "
    510           "heap.prologue=%.2f "
    511           "heap.epilogue=%.2f "
    512           "heap.epilogue.reduce_new_space=%.2f "
    513           "heap.external.prologue=%.2f "
    514           "heap.external.epilogue=%.2f "
    515           "heap.external_weak_global_handles=%.2f "
    516           "fast_promote=%.2f "
    517           "scavenge=%.2f "
    518           "scavenge.process_array_buffers=%.2f "
    519           "scavenge.roots=%.2f "
    520           "scavenge.weak=%.2f "
    521           "scavenge.weak_global_handles.identify=%.2f "
    522           "scavenge.weak_global_handles.process=%.2f "
    523           "scavenge.parallel=%.2f "
    524           "scavenge.update_refs=%.2f "
    525           "background.scavenge.parallel=%.2f "
    526           "background.array_buffer_free=%.2f "
    527           "background.store_buffer=%.2f "
    528           "background.unmapper=%.2f "
    529           "incremental.steps_count=%d "
    530           "incremental.steps_took=%.1f "
    531           "scavenge_throughput=%.f "
    532           "total_size_before=%" PRIuS
    533           " "
    534           "total_size_after=%" PRIuS
    535           " "
    536           "holes_size_before=%" PRIuS
    537           " "
    538           "holes_size_after=%" PRIuS
    539           " "
    540           "allocated=%" PRIuS
    541           " "
    542           "promoted=%" PRIuS
    543           " "
    544           "semi_space_copied=%" PRIuS
    545           " "
    546           "nodes_died_in_new=%d "
    547           "nodes_copied_in_new=%d "
    548           "nodes_promoted=%d "
    549           "promotion_ratio=%.1f%% "
    550           "average_survival_ratio=%.1f%% "
    551           "promotion_rate=%.1f%% "
    552           "semi_space_copy_rate=%.1f%% "
    553           "new_space_allocation_throughput=%.1f "
    554           "unmapper_chunks=%d "
    555           "context_disposal_rate=%.1f\n",
    556           duration, spent_in_mutator, current_.TypeName(true),
    557           current_.reduce_memory, current_.scopes[Scope::HEAP_PROLOGUE],
    558           current_.scopes[Scope::HEAP_EPILOGUE],
    559           current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
    560           current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
    561           current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
    562           current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
    563           current_.scopes[Scope::SCAVENGER_FAST_PROMOTE],
    564           current_.scopes[Scope::SCAVENGER_SCAVENGE],
    565           current_.scopes[Scope::SCAVENGER_PROCESS_ARRAY_BUFFERS],
    566           current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS],
    567           current_.scopes[Scope::SCAVENGER_SCAVENGE_WEAK],
    568           current_
    569               .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY],
    570           current_
    571               .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS],
    572           current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL],
    573           current_.scopes[Scope::SCAVENGER_SCAVENGE_UPDATE_REFS],
    574           current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL],
    575           current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
    576           current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
    577           current_.scopes[Scope::BACKGROUND_UNMAPPER],
    578           current_.incremental_marking_scopes[GCTracer::Scope::MC_INCREMENTAL]
    579               .steps,
    580           current_.scopes[Scope::MC_INCREMENTAL],
    581           ScavengeSpeedInBytesPerMillisecond(), current_.start_object_size,
    582           current_.end_object_size, current_.start_holes_size,
    583           current_.end_holes_size, allocated_since_last_gc,
    584           heap_->promoted_objects_size(),
    585           heap_->semi_space_copied_object_size(),
    586           heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
    587           heap_->nodes_promoted_, heap_->promotion_ratio_,
    588           AverageSurvivalRatio(), heap_->promotion_rate_,
    589           heap_->semi_space_copied_rate_,
    590           NewSpaceAllocationThroughputInBytesPerMillisecond(),
    591           heap_->memory_allocator()->unmapper()->NumberOfChunks(),
    592           ContextDisposalRateInMilliseconds());
    593       break;
    594     case Event::MINOR_MARK_COMPACTOR:
    595       heap_->isolate()->PrintWithTimestamp(
    596           "pause=%.1f "
    597           "mutator=%.1f "
    598           "gc=%s "
    599           "reduce_memory=%d "
    600           "minor_mc=%.2f "
    601           "finish_sweeping=%.2f "
    602           "mark=%.2f "
    603           "mark.seed=%.2f "
    604           "mark.roots=%.2f "
    605           "mark.weak=%.2f "
    606           "mark.global_handles=%.2f "
    607           "clear=%.2f "
    608           "clear.string_table=%.2f "
    609           "clear.weak_lists=%.2f "
    610           "evacuate=%.2f "
    611           "evacuate.copy=%.2f "
    612           "evacuate.update_pointers=%.2f "
    613           "evacuate.update_pointers.to_new_roots=%.2f "
    614           "evacuate.update_pointers.slots=%.2f "
    615           "background.mark=%.2f "
    616           "background.evacuate.copy=%.2f "
    617           "background.evacuate.update_pointers=%.2f "
    618           "background.array_buffer_free=%.2f "
    619           "background.store_buffer=%.2f "
    620           "background.unmapper=%.2f "
    621           "update_marking_deque=%.2f "
    622           "reset_liveness=%.2f\n",
    623           duration, spent_in_mutator, "mmc", current_.reduce_memory,
    624           current_.scopes[Scope::MINOR_MC],
    625           current_.scopes[Scope::MINOR_MC_SWEEPING],
    626           current_.scopes[Scope::MINOR_MC_MARK],
    627           current_.scopes[Scope::MINOR_MC_MARK_SEED],
    628           current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
    629           current_.scopes[Scope::MINOR_MC_MARK_WEAK],
    630           current_.scopes[Scope::MINOR_MC_MARK_GLOBAL_HANDLES],
    631           current_.scopes[Scope::MINOR_MC_CLEAR],
    632           current_.scopes[Scope::MINOR_MC_CLEAR_STRING_TABLE],
    633           current_.scopes[Scope::MINOR_MC_CLEAR_WEAK_LISTS],
    634           current_.scopes[Scope::MINOR_MC_EVACUATE],
    635           current_.scopes[Scope::MINOR_MC_EVACUATE_COPY],
    636           current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS],
    637           current_
    638               .scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
    639           current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS],
    640           current_.scopes[Scope::MINOR_MC_BACKGROUND_MARKING],
    641           current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY],
    642           current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
    643           current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
    644           current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
    645           current_.scopes[Scope::BACKGROUND_UNMAPPER],
    646           current_.scopes[Scope::MINOR_MC_MARKING_DEQUE],
    647           current_.scopes[Scope::MINOR_MC_RESET_LIVENESS]);
    648       break;
    649     case Event::MARK_COMPACTOR:
    650     case Event::INCREMENTAL_MARK_COMPACTOR:
    651       heap_->isolate()->PrintWithTimestamp(
    652           "pause=%.1f "
    653           "mutator=%.1f "
    654           "gc=%s "
    655           "reduce_memory=%d "
    656           "heap.prologue=%.2f "
    657           "heap.epilogue=%.2f "
    658           "heap.epilogue.reduce_new_space=%.2f "
    659           "heap.external.prologue=%.1f "
    660           "heap.external.epilogue=%.1f "
    661           "heap.external.weak_global_handles=%.1f "
    662           "clear=%1.f "
    663           "clear.dependent_code=%.1f "
    664           "clear.maps=%.1f "
    665           "clear.slots_buffer=%.1f "
    666           "clear.store_buffer=%.1f "
    667           "clear.string_table=%.1f "
    668           "clear.weak_collections=%.1f "
    669           "clear.weak_lists=%.1f "
    670           "clear.weak_references=%.1f "
    671           "epilogue=%.1f "
    672           "evacuate=%.1f "
    673           "evacuate.candidates=%.1f "
    674           "evacuate.clean_up=%.1f "
    675           "evacuate.copy=%.1f "
    676           "evacuate.prologue=%.1f "
    677           "evacuate.epilogue=%.1f "
    678           "evacuate.rebalance=%.1f "
    679           "evacuate.update_pointers=%.1f "
    680           "evacuate.update_pointers.to_new_roots=%.1f "
    681           "evacuate.update_pointers.slots.main=%.1f "
    682           "evacuate.update_pointers.slots.map_space=%.1f "
    683           "evacuate.update_pointers.weak=%.1f "
    684           "finish=%.1f "
    685           "mark=%.1f "
    686           "mark.finish_incremental=%.1f "
    687           "mark.roots=%.1f "
    688           "mark.main=%.1f "
    689           "mark.weak_closure=%.1f "
    690           "mark.weak_closure.ephemeron=%.1f "
    691           "mark.weak_closure.ephemeron.marking=%.1f "
    692           "mark.weak_closure.ephemeron.linear=%.1f "
    693           "mark.weak_closure.weak_handles=%.1f "
    694           "mark.weak_closure.weak_roots=%.1f "
    695           "mark.weak_closure.harmony=%.1f "
    696           "mark.wrapper_prologue=%.1f "
    697           "mark.wrapper_epilogue=%.1f "
    698           "mark.wrapper_tracing=%.1f "
    699           "prologue=%.1f "
    700           "sweep=%.1f "
    701           "sweep.code=%.1f "
    702           "sweep.map=%.1f "
    703           "sweep.old=%.1f "
    704           "incremental=%.1f "
    705           "incremental.finalize=%.1f "
    706           "incremental.finalize.body=%.1f "
    707           "incremental.finalize.external.prologue=%.1f "
    708           "incremental.finalize.external.epilogue=%.1f "
    709           "incremental.sweeping=%.1f "
    710           "incremental.wrapper_prologue=%.1f "
    711           "incremental.wrapper_tracing=%.1f "
    712           "incremental_wrapper_tracing_longest_step=%.1f "
    713           "incremental_finalize_longest_step=%.1f "
    714           "incremental_finalize_steps_count=%d "
    715           "incremental_longest_step=%.1f "
    716           "incremental_steps_count=%d "
    717           "incremental_marking_throughput=%.f "
    718           "incremental_walltime_duration=%.f "
    719           "background.mark=%.1f "
    720           "background.sweep=%.1f "
    721           "background.evacuate.copy=%.1f "
    722           "background.evacuate.update_pointers=%.1f "
    723           "background.array_buffer_free=%.2f "
    724           "background.store_buffer=%.2f "
    725           "background.unmapper=%.1f "
    726           "total_size_before=%" PRIuS
    727           " "
    728           "total_size_after=%" PRIuS
    729           " "
    730           "holes_size_before=%" PRIuS
    731           " "
    732           "holes_size_after=%" PRIuS
    733           " "
    734           "allocated=%" PRIuS
    735           " "
    736           "promoted=%" PRIuS
    737           " "
    738           "semi_space_copied=%" PRIuS
    739           " "
    740           "nodes_died_in_new=%d "
    741           "nodes_copied_in_new=%d "
    742           "nodes_promoted=%d "
    743           "promotion_ratio=%.1f%% "
    744           "average_survival_ratio=%.1f%% "
    745           "promotion_rate=%.1f%% "
    746           "semi_space_copy_rate=%.1f%% "
    747           "new_space_allocation_throughput=%.1f "
    748           "unmapper_chunks=%d "
    749           "context_disposal_rate=%.1f "
    750           "compaction_speed=%.f\n",
    751           duration, spent_in_mutator, current_.TypeName(true),
    752           current_.reduce_memory, current_.scopes[Scope::HEAP_PROLOGUE],
    753           current_.scopes[Scope::HEAP_EPILOGUE],
    754           current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
    755           current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
    756           current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
    757           current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
    758           current_.scopes[Scope::MC_CLEAR],
    759           current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
    760           current_.scopes[Scope::MC_CLEAR_MAPS],
    761           current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
    762           current_.scopes[Scope::MC_CLEAR_STORE_BUFFER],
    763           current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
    764           current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
    765           current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
    766           current_.scopes[Scope::MC_CLEAR_WEAK_REFERENCES],
    767           current_.scopes[Scope::MC_EPILOGUE],
    768           current_.scopes[Scope::MC_EVACUATE],
    769           current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
    770           current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
    771           current_.scopes[Scope::MC_EVACUATE_COPY],
    772           current_.scopes[Scope::MC_EVACUATE_PROLOGUE],
    773           current_.scopes[Scope::MC_EVACUATE_EPILOGUE],
    774           current_.scopes[Scope::MC_EVACUATE_REBALANCE],
    775           current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
    776           current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
    777           current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN],
    778           current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAP_SPACE],
    779           current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
    780           current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK],
    781           current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
    782           current_.scopes[Scope::MC_MARK_ROOTS],
    783           current_.scopes[Scope::MC_MARK_MAIN],
    784           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
    785           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON],
    786           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING],
    787           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR],
    788           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
    789           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS],
    790           current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_HARMONY],
    791           current_.scopes[Scope::MC_MARK_WRAPPER_PROLOGUE],
    792           current_.scopes[Scope::MC_MARK_WRAPPER_EPILOGUE],
    793           current_.scopes[Scope::MC_MARK_WRAPPER_TRACING],
    794           current_.scopes[Scope::MC_PROLOGUE], current_.scopes[Scope::MC_SWEEP],
    795           current_.scopes[Scope::MC_SWEEP_CODE],
    796           current_.scopes[Scope::MC_SWEEP_MAP],
    797           current_.scopes[Scope::MC_SWEEP_OLD],
    798           current_.scopes[Scope::MC_INCREMENTAL],
    799           current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
    800           current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY],
    801           current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
    802           current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
    803           current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
    804           current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE],
    805           current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_TRACING],
    806           current_
    807               .incremental_marking_scopes[Scope::MC_INCREMENTAL_WRAPPER_TRACING]
    808               .longest_step,
    809           current_
    810               .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
    811               .longest_step,
    812           current_
    813               .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
    814               .steps,
    815           current_.incremental_marking_scopes[Scope::MC_INCREMENTAL]
    816               .longest_step,
    817           current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
    818           IncrementalMarkingSpeedInBytesPerMillisecond(),
    819           incremental_walltime_duration,
    820           current_.scopes[Scope::MC_BACKGROUND_MARKING],
    821           current_.scopes[Scope::MC_BACKGROUND_SWEEPING],
    822           current_.scopes[Scope::MC_BACKGROUND_EVACUATE_COPY],
    823           current_.scopes[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
    824           current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
    825           current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
    826           current_.scopes[Scope::BACKGROUND_UNMAPPER],
    827           current_.start_object_size, current_.end_object_size,
    828           current_.start_holes_size, current_.end_holes_size,
    829           allocated_since_last_gc, heap_->promoted_objects_size(),
    830           heap_->semi_space_copied_object_size(),
    831           heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
    832           heap_->nodes_promoted_, heap_->promotion_ratio_,
    833           AverageSurvivalRatio(), heap_->promotion_rate_,
    834           heap_->semi_space_copied_rate_,
    835           NewSpaceAllocationThroughputInBytesPerMillisecond(),
    836           heap_->memory_allocator()->unmapper()->NumberOfChunks(),
    837           ContextDisposalRateInMilliseconds(),
    838           CompactionSpeedInBytesPerMillisecond());
    839       break;
    840     case Event::START:
    841       break;
    842     default:
    843       UNREACHABLE();
    844   }
    845 }
    846 
    847 double GCTracer::AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
    848                               const BytesAndDuration& initial, double time_ms) {
    849   BytesAndDuration sum = buffer.Sum(
    850       [time_ms](BytesAndDuration a, BytesAndDuration b) {
    851         if (time_ms != 0 && a.second >= time_ms) return a;
    852         return std::make_pair(a.first + b.first, a.second + b.second);
    853       },
    854       initial);
    855   uint64_t bytes = sum.first;
    856   double durations = sum.second;
    857   if (durations == 0.0) return 0;
    858   double speed = bytes / durations;
    859   const int max_speed = 1024 * MB;
    860   const int min_speed = 1;
    861   if (speed >= max_speed) return max_speed;
    862   if (speed <= min_speed) return min_speed;
    863   return speed;
    864 }
    865 
    866 double GCTracer::AverageSpeed(
    867     const base::RingBuffer<BytesAndDuration>& buffer) {
    868   return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
    869 }
    870 
    871 void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
    872   if (duration == 0 || bytes == 0) return;
    873   double current_speed = bytes / duration;
    874   if (recorded_incremental_marking_speed_ == 0) {
    875     recorded_incremental_marking_speed_ = current_speed;
    876   } else {
    877     recorded_incremental_marking_speed_ =
    878         (recorded_incremental_marking_speed_ + current_speed) / 2;
    879   }
    880 }
    881 
    882 void GCTracer::RecordMutatorUtilization(double mark_compact_end_time,
    883                                         double mark_compact_duration) {
    884   if (previous_mark_compact_end_time_ == 0) {
    885     // The first event only contributes to previous_mark_compact_end_time_,
    886     // because we cannot compute the mutator duration.
    887     previous_mark_compact_end_time_ = mark_compact_end_time;
    888   } else {
    889     double total_duration =
    890         mark_compact_end_time - previous_mark_compact_end_time_;
    891     double mutator_duration = total_duration - mark_compact_duration;
    892     if (average_mark_compact_duration_ == 0 && average_mutator_duration_ == 0) {
    893       // This is the first event with mutator and mark-compact durations.
    894       average_mark_compact_duration_ = mark_compact_duration;
    895       average_mutator_duration_ = mutator_duration;
    896     } else {
    897       average_mark_compact_duration_ =
    898           (average_mark_compact_duration_ + mark_compact_duration) / 2;
    899       average_mutator_duration_ =
    900           (average_mutator_duration_ + mutator_duration) / 2;
    901     }
    902     current_mark_compact_mutator_utilization_ =
    903         total_duration ? mutator_duration / total_duration : 0;
    904     previous_mark_compact_end_time_ = mark_compact_end_time;
    905   }
    906 }
    907 
    908 double GCTracer::AverageMarkCompactMutatorUtilization() const {
    909   double average_total_duration =
    910       average_mark_compact_duration_ + average_mutator_duration_;
    911   if (average_total_duration == 0) return 1.0;
    912   return average_mutator_duration_ / average_total_duration;
    913 }
    914 
    915 double GCTracer::CurrentMarkCompactMutatorUtilization() const {
    916   return current_mark_compact_mutator_utilization_;
    917 }
    918 
    919 double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
    920   const int kConservativeSpeedInBytesPerMillisecond = 128 * KB;
    921   if (recorded_incremental_marking_speed_ != 0) {
    922     return recorded_incremental_marking_speed_;
    923   }
    924   if (incremental_marking_duration_ != 0.0) {
    925     return incremental_marking_bytes_ / incremental_marking_duration_;
    926   }
    927   return kConservativeSpeedInBytesPerMillisecond;
    928 }
    929 
    930 double GCTracer::ScavengeSpeedInBytesPerMillisecond(
    931     ScavengeSpeedMode mode) const {
    932   if (mode == kForAllObjects) {
    933     return AverageSpeed(recorded_minor_gcs_total_);
    934   } else {
    935     return AverageSpeed(recorded_minor_gcs_survived_);
    936   }
    937 }
    938 
    939 double GCTracer::CompactionSpeedInBytesPerMillisecond() const {
    940   return AverageSpeed(recorded_compactions_);
    941 }
    942 
    943 double GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
    944   return AverageSpeed(recorded_mark_compacts_);
    945 }
    946 
    947 double GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const {
    948   return AverageSpeed(recorded_incremental_mark_compacts_);
    949 }
    950 
    951 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
    952   if (combined_mark_compact_speed_cache_ > 0)
    953     return combined_mark_compact_speed_cache_;
    954   const double kMinimumMarkingSpeed = 0.5;
    955   double speed1 = IncrementalMarkingSpeedInBytesPerMillisecond();
    956   double speed2 = FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
    957   if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
    958     // No data for the incremental marking speed.
    959     // Return the non-incremental mark-compact speed.
    960     combined_mark_compact_speed_cache_ =
    961         MarkCompactSpeedInBytesPerMillisecond();
    962   } else {
    963     // Combine the speed of incremental step and the speed of the final step.
    964     // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
    965     combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
    966   }
    967   return combined_mark_compact_speed_cache_;
    968 }
    969 
    970 double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
    971     double time_ms) const {
    972   size_t bytes = new_space_allocation_in_bytes_since_gc_;
    973   double durations = allocation_duration_since_gc_;
    974   return AverageSpeed(recorded_new_generation_allocations_,
    975                       MakeBytesAndDuration(bytes, durations), time_ms);
    976 }
    977 
    978 double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
    979     double time_ms) const {
    980   size_t bytes = old_generation_allocation_in_bytes_since_gc_;
    981   double durations = allocation_duration_since_gc_;
    982   return AverageSpeed(recorded_old_generation_allocations_,
    983                       MakeBytesAndDuration(bytes, durations), time_ms);
    984 }
    985 
    986 double GCTracer::AllocationThroughputInBytesPerMillisecond(
    987     double time_ms) const {
    988   return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
    989          OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
    990 }
    991 
    992 double GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
    993   return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
    994 }
    995 
    996 double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
    997     const {
    998   return OldGenerationAllocationThroughputInBytesPerMillisecond(
    999       kThroughputTimeFrameMs);
   1000 }
   1001 
   1002 double GCTracer::ContextDisposalRateInMilliseconds() const {
   1003   if (recorded_context_disposal_times_.Count() <
   1004       recorded_context_disposal_times_.kSize)
   1005     return 0.0;
   1006   double begin = heap_->MonotonicallyIncreasingTimeInMs();
   1007   double end = recorded_context_disposal_times_.Sum(
   1008       [](double a, double b) { return b; }, 0.0);
   1009   return (begin - end) / recorded_context_disposal_times_.Count();
   1010 }
   1011 
   1012 double GCTracer::AverageSurvivalRatio() const {
   1013   if (recorded_survival_ratios_.Count() == 0) return 0.0;
   1014   double sum = recorded_survival_ratios_.Sum(
   1015       [](double a, double b) { return a + b; }, 0.0);
   1016   return sum / recorded_survival_ratios_.Count();
   1017 }
   1018 
   1019 bool GCTracer::SurvivalEventsRecorded() const {
   1020   return recorded_survival_ratios_.Count() > 0;
   1021 }
   1022 
   1023 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
   1024 
   1025 void GCTracer::NotifyIncrementalMarkingStart() {
   1026   incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs();
   1027 }
   1028 
   1029 void GCTracer::FetchBackgroundMarkCompactCounters() {
   1030   FetchBackgroundCounters(Scope::FIRST_MC_BACKGROUND_SCOPE,
   1031                           Scope::LAST_MC_BACKGROUND_SCOPE,
   1032                           BackgroundScope::FIRST_MC_BACKGROUND_SCOPE,
   1033                           BackgroundScope::LAST_MC_BACKGROUND_SCOPE);
   1034   heap_->isolate()->counters()->background_marking()->AddSample(
   1035       static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_MARKING]));
   1036   heap_->isolate()->counters()->background_sweeping()->AddSample(
   1037       static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_SWEEPING]));
   1038 }
   1039 
   1040 void GCTracer::FetchBackgroundMinorGCCounters() {
   1041   FetchBackgroundCounters(Scope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
   1042                           Scope::LAST_MINOR_GC_BACKGROUND_SCOPE,
   1043                           BackgroundScope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
   1044                           BackgroundScope::LAST_MINOR_GC_BACKGROUND_SCOPE);
   1045   heap_->isolate()->counters()->background_scavenger()->AddSample(
   1046       static_cast<int>(
   1047           current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]));
   1048 }
   1049 
   1050 void GCTracer::FetchBackgroundGeneralCounters() {
   1051   FetchBackgroundCounters(Scope::FIRST_GENERAL_BACKGROUND_SCOPE,
   1052                           Scope::LAST_GENERAL_BACKGROUND_SCOPE,
   1053                           BackgroundScope::FIRST_GENERAL_BACKGROUND_SCOPE,
   1054                           BackgroundScope::LAST_GENERAL_BACKGROUND_SCOPE);
   1055 }
   1056 
   1057 void GCTracer::FetchBackgroundCounters(int first_global_scope,
   1058                                        int last_global_scope,
   1059                                        int first_background_scope,
   1060                                        int last_background_scope) {
   1061   DCHECK_EQ(last_global_scope - first_global_scope,
   1062             last_background_scope - first_background_scope);
   1063   base::LockGuard<base::Mutex> guard(&background_counter_mutex_);
   1064   int background_mc_scopes = last_background_scope - first_background_scope + 1;
   1065   for (int i = 0; i < background_mc_scopes; i++) {
   1066     current_.scopes[first_global_scope + i] +=
   1067         background_counter_[first_background_scope + i].total_duration_ms;
   1068     background_counter_[first_background_scope + i].total_duration_ms = 0;
   1069   }
   1070   if (V8_LIKELY(!FLAG_runtime_stats)) return;
   1071   RuntimeCallStats* runtime_stats =
   1072       heap_->isolate()->counters()->runtime_call_stats();
   1073   if (!runtime_stats) return;
   1074   for (int i = 0; i < background_mc_scopes; i++) {
   1075     runtime_stats
   1076         ->GetCounter(GCTracer::RCSCounterFromScope(
   1077             static_cast<Scope::ScopeId>(first_global_scope + i)))
   1078         ->Add(&background_counter_[first_background_scope + i]
   1079                    .runtime_call_counter);
   1080     background_counter_[first_background_scope + i]
   1081         .runtime_call_counter.Reset();
   1082   }
   1083 }
   1084 
   1085 void GCTracer::AddBackgroundScopeSample(
   1086     BackgroundScope::ScopeId scope, double duration,
   1087     RuntimeCallCounter* runtime_call_counter) {
   1088   base::LockGuard<base::Mutex> guard(&background_counter_mutex_);
   1089   BackgroundCounter& counter = background_counter_[scope];
   1090   counter.total_duration_ms += duration;
   1091   if (runtime_call_counter) {
   1092     counter.runtime_call_counter.Add(runtime_call_counter);
   1093   }
   1094 }
   1095 
   1096 void GCTracer::RecordMarkCompactHistograms(HistogramTimer* gc_timer) {
   1097   Counters* counters = heap_->isolate()->counters();
   1098   if (gc_timer == counters->gc_finalize()) {
   1099     DCHECK_EQ(Scope::FIRST_TOP_MC_SCOPE, Scope::MC_CLEAR);
   1100     counters->gc_finalize_clear()->AddSample(
   1101         static_cast<int>(current_.scopes[Scope::MC_CLEAR]));
   1102     counters->gc_finalize_epilogue()->AddSample(
   1103         static_cast<int>(current_.scopes[Scope::MC_EPILOGUE]));
   1104     counters->gc_finalize_evacuate()->AddSample(
   1105         static_cast<int>(current_.scopes[Scope::MC_EVACUATE]));
   1106     counters->gc_finalize_finish()->AddSample(
   1107         static_cast<int>(current_.scopes[Scope::MC_FINISH]));
   1108     counters->gc_finalize_mark()->AddSample(
   1109         static_cast<int>(current_.scopes[Scope::MC_MARK]));
   1110     counters->gc_finalize_prologue()->AddSample(
   1111         static_cast<int>(current_.scopes[Scope::MC_PROLOGUE]));
   1112     counters->gc_finalize_sweep()->AddSample(
   1113         static_cast<int>(current_.scopes[Scope::MC_SWEEP]));
   1114     DCHECK_EQ(Scope::LAST_TOP_MC_SCOPE, Scope::MC_SWEEP);
   1115   }
   1116 }
   1117 
   1118 }  // namespace internal
   1119 }  // namespace v8
   1120