Home | History | Annotate | Download | only in heap
      1 // Copyright 2015 the V8 project authors. All rights reserved.
      2 //
      3 // Use of this source code is governed by a BSD-style license that can be
      4 // found in the LICENSE file.
      5 
      6 #include "src/heap/object-stats.h"
      7 
      8 #include <unordered_set>
      9 
     10 #include "src/assembler-inl.h"
     11 #include "src/base/bits.h"
     12 #include "src/compilation-cache.h"
     13 #include "src/counters.h"
     14 #include "src/globals.h"
     15 #include "src/heap/heap-inl.h"
     16 #include "src/heap/mark-compact.h"
     17 #include "src/isolate.h"
     18 #include "src/objects/compilation-cache-inl.h"
     19 #include "src/objects/js-collection-inl.h"
     20 #include "src/objects/literal-objects-inl.h"
     21 #include "src/objects/templates.h"
     22 #include "src/utils.h"
     23 
     24 namespace v8 {
     25 namespace internal {
     26 
     27 static base::LazyMutex object_stats_mutex = LAZY_MUTEX_INITIALIZER;
     28 
     29 class FieldStatsCollector : public ObjectVisitor {
     30  public:
     31   FieldStatsCollector(size_t* tagged_fields_count,
     32                       size_t* embedder_fields_count,
     33                       size_t* unboxed_double_fields_count,
     34                       size_t* raw_fields_count)
     35       : tagged_fields_count_(tagged_fields_count),
     36         embedder_fields_count_(embedder_fields_count),
     37         unboxed_double_fields_count_(unboxed_double_fields_count),
     38         raw_fields_count_(raw_fields_count) {}
     39 
     40   void RecordStats(HeapObject* host) {
     41     size_t old_pointer_fields_count = *tagged_fields_count_;
     42     host->Iterate(this);
     43     size_t tagged_fields_count_in_object =
     44         *tagged_fields_count_ - old_pointer_fields_count;
     45 
     46     int object_size_in_words = host->Size() / kPointerSize;
     47     DCHECK_LE(tagged_fields_count_in_object, object_size_in_words);
     48     size_t raw_fields_count_in_object =
     49         object_size_in_words - tagged_fields_count_in_object;
     50 
     51     if (host->IsJSObject()) {
     52       JSObjectFieldStats field_stats = GetInobjectFieldStats(host->map());
     53       // Embedder fields are already included into pointer words.
     54       DCHECK_LE(field_stats.embedded_fields_count_,
     55                 tagged_fields_count_in_object);
     56       tagged_fields_count_in_object -= field_stats.embedded_fields_count_;
     57       *tagged_fields_count_ -= field_stats.embedded_fields_count_;
     58       *embedder_fields_count_ += field_stats.embedded_fields_count_;
     59 
     60       // The rest are data words.
     61       DCHECK_LE(field_stats.unboxed_double_fields_count_,
     62                 raw_fields_count_in_object);
     63       raw_fields_count_in_object -= field_stats.unboxed_double_fields_count_;
     64       *unboxed_double_fields_count_ += field_stats.unboxed_double_fields_count_;
     65     }
     66     *raw_fields_count_ += raw_fields_count_in_object;
     67   }
     68 
     69   void VisitPointers(HeapObject* host, Object** start, Object** end) override {
     70     *tagged_fields_count_ += (end - start);
     71   }
     72   void VisitPointers(HeapObject* host, MaybeObject** start,
     73                      MaybeObject** end) override {
     74     *tagged_fields_count_ += (end - start);
     75   }
     76 
     77  private:
     78   struct JSObjectFieldStats {
     79     JSObjectFieldStats()
     80         : embedded_fields_count_(0), unboxed_double_fields_count_(0) {}
     81 
     82     unsigned embedded_fields_count_ : kDescriptorIndexBitCount;
     83     unsigned unboxed_double_fields_count_ : kDescriptorIndexBitCount;
     84   };
     85   std::unordered_map<Map*, JSObjectFieldStats> field_stats_cache_;
     86 
     87   JSObjectFieldStats GetInobjectFieldStats(Map* map);
     88 
     89   size_t* const tagged_fields_count_;
     90   size_t* const embedder_fields_count_;
     91   size_t* const unboxed_double_fields_count_;
     92   size_t* const raw_fields_count_;
     93 };
     94 
     95 FieldStatsCollector::JSObjectFieldStats
     96 FieldStatsCollector::GetInobjectFieldStats(Map* map) {
     97   auto iter = field_stats_cache_.find(map);
     98   if (iter != field_stats_cache_.end()) {
     99     return iter->second;
    100   }
    101   // Iterate descriptor array and calculate stats.
    102   JSObjectFieldStats stats;
    103   stats.embedded_fields_count_ = JSObject::GetEmbedderFieldCount(map);
    104   if (!map->is_dictionary_map()) {
    105     int nof = map->NumberOfOwnDescriptors();
    106     DescriptorArray* descriptors = map->instance_descriptors();
    107     for (int descriptor = 0; descriptor < nof; descriptor++) {
    108       PropertyDetails details = descriptors->GetDetails(descriptor);
    109       if (details.location() == kField) {
    110         FieldIndex index = FieldIndex::ForDescriptor(map, descriptor);
    111         // Stop on first out-of-object field.
    112         if (!index.is_inobject()) break;
    113         if (details.representation().IsDouble() &&
    114             map->IsUnboxedDoubleField(index)) {
    115           ++stats.unboxed_double_fields_count_;
    116         }
    117       }
    118     }
    119   }
    120   field_stats_cache_.insert(std::make_pair(map, stats));
    121   return stats;
    122 }
    123 
    124 void ObjectStats::ClearObjectStats(bool clear_last_time_stats) {
    125   memset(object_counts_, 0, sizeof(object_counts_));
    126   memset(object_sizes_, 0, sizeof(object_sizes_));
    127   memset(over_allocated_, 0, sizeof(over_allocated_));
    128   memset(size_histogram_, 0, sizeof(size_histogram_));
    129   memset(over_allocated_histogram_, 0, sizeof(over_allocated_histogram_));
    130   if (clear_last_time_stats) {
    131     memset(object_counts_last_time_, 0, sizeof(object_counts_last_time_));
    132     memset(object_sizes_last_time_, 0, sizeof(object_sizes_last_time_));
    133   }
    134   tagged_fields_count_ = 0;
    135   embedder_fields_count_ = 0;
    136   unboxed_double_fields_count_ = 0;
    137   raw_fields_count_ = 0;
    138 }
    139 
    140 // Tell the compiler to never inline this: occasionally, the optimizer will
    141 // decide to inline this and unroll the loop, making the compiled code more than
    142 // 100KB larger.
    143 V8_NOINLINE static void PrintJSONArray(size_t* array, const int len) {
    144   PrintF("[ ");
    145   for (int i = 0; i < len; i++) {
    146     PrintF("%zu", array[i]);
    147     if (i != (len - 1)) PrintF(", ");
    148   }
    149   PrintF(" ]");
    150 }
    151 
    152 V8_NOINLINE static void DumpJSONArray(std::stringstream& stream, size_t* array,
    153                                       const int len) {
    154   stream << PrintCollection(Vector<size_t>(array, len));
    155 }
    156 
    157 void ObjectStats::PrintKeyAndId(const char* key, int gc_count) {
    158   PrintF("\"isolate\": \"%p\", \"id\": %d, \"key\": \"%s\", ",
    159          reinterpret_cast<void*>(isolate()), gc_count, key);
    160 }
    161 
    162 void ObjectStats::PrintInstanceTypeJSON(const char* key, int gc_count,
    163                                         const char* name, int index) {
    164   PrintF("{ ");
    165   PrintKeyAndId(key, gc_count);
    166   PrintF("\"type\": \"instance_type_data\", ");
    167   PrintF("\"instance_type\": %d, ", index);
    168   PrintF("\"instance_type_name\": \"%s\", ", name);
    169   PrintF("\"overall\": %zu, ", object_sizes_[index]);
    170   PrintF("\"count\": %zu, ", object_counts_[index]);
    171   PrintF("\"over_allocated\": %zu, ", over_allocated_[index]);
    172   PrintF("\"histogram\": ");
    173   PrintJSONArray(size_histogram_[index], kNumberOfBuckets);
    174   PrintF(",");
    175   PrintF("\"over_allocated_histogram\": ");
    176   PrintJSONArray(over_allocated_histogram_[index], kNumberOfBuckets);
    177   PrintF(" }\n");
    178 }
    179 
    180 void ObjectStats::PrintJSON(const char* key) {
    181   double time = isolate()->time_millis_since_init();
    182   int gc_count = heap()->gc_count();
    183 
    184   // gc_descriptor
    185   PrintF("{ ");
    186   PrintKeyAndId(key, gc_count);
    187   PrintF("\"type\": \"gc_descriptor\", \"time\": %f }\n", time);
    188   // field_data
    189   PrintF("{ ");
    190   PrintKeyAndId(key, gc_count);
    191   PrintF("\"type\": \"field_data\"");
    192   PrintF(", \"tagged_fields\": %zu", tagged_fields_count_ * kPointerSize);
    193   PrintF(", \"embedder_fields\": %zu", embedder_fields_count_ * kPointerSize);
    194   PrintF(", \"unboxed_double_fields\": %zu",
    195          unboxed_double_fields_count_ * kDoubleSize);
    196   PrintF(", \"other_raw_fields\": %zu", raw_fields_count_ * kPointerSize);
    197   PrintF(" }\n");
    198   // bucket_sizes
    199   PrintF("{ ");
    200   PrintKeyAndId(key, gc_count);
    201   PrintF("\"type\": \"bucket_sizes\", \"sizes\": [ ");
    202   for (int i = 0; i < kNumberOfBuckets; i++) {
    203     PrintF("%d", 1 << (kFirstBucketShift + i));
    204     if (i != (kNumberOfBuckets - 1)) PrintF(", ");
    205   }
    206   PrintF(" ] }\n");
    207 
    208 #define INSTANCE_TYPE_WRAPPER(name) \
    209   PrintInstanceTypeJSON(key, gc_count, #name, name);
    210 
    211 #define VIRTUAL_INSTANCE_TYPE_WRAPPER(name) \
    212   PrintInstanceTypeJSON(key, gc_count, #name, FIRST_VIRTUAL_TYPE + name);
    213 
    214   INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER)
    215   VIRTUAL_INSTANCE_TYPE_LIST(VIRTUAL_INSTANCE_TYPE_WRAPPER)
    216 
    217 #undef INSTANCE_TYPE_WRAPPER
    218 #undef VIRTUAL_INSTANCE_TYPE_WRAPPER
    219 }
    220 
    221 void ObjectStats::DumpInstanceTypeData(std::stringstream& stream,
    222                                        const char* name, int index) {
    223   stream << "\"" << name << "\":{";
    224   stream << "\"type\":" << static_cast<int>(index) << ",";
    225   stream << "\"overall\":" << object_sizes_[index] << ",";
    226   stream << "\"count\":" << object_counts_[index] << ",";
    227   stream << "\"over_allocated\":" << over_allocated_[index] << ",";
    228   stream << "\"histogram\":";
    229   DumpJSONArray(stream, size_histogram_[index], kNumberOfBuckets);
    230   stream << ",\"over_allocated_histogram\":";
    231   DumpJSONArray(stream, over_allocated_histogram_[index], kNumberOfBuckets);
    232   stream << "},";
    233 }
    234 
    235 void ObjectStats::Dump(std::stringstream& stream) {
    236   double time = isolate()->time_millis_since_init();
    237   int gc_count = heap()->gc_count();
    238 
    239   stream << "{";
    240   stream << "\"isolate\":\"" << reinterpret_cast<void*>(isolate()) << "\",";
    241   stream << "\"id\":" << gc_count << ",";
    242   stream << "\"time\":" << time << ",";
    243 
    244   // field_data
    245   stream << "\"field_data\":{";
    246   stream << "\"tagged_fields\":" << (tagged_fields_count_ * kPointerSize);
    247   stream << ",\"embedder_fields\":" << (embedder_fields_count_ * kPointerSize);
    248   stream << ",\"unboxed_double_fields\": "
    249          << (unboxed_double_fields_count_ * kDoubleSize);
    250   stream << ",\"other_raw_fields\":" << (raw_fields_count_ * kPointerSize);
    251   stream << "}, ";
    252 
    253   stream << "\"bucket_sizes\":[";
    254   for (int i = 0; i < kNumberOfBuckets; i++) {
    255     stream << (1 << (kFirstBucketShift + i));
    256     if (i != (kNumberOfBuckets - 1)) stream << ",";
    257   }
    258   stream << "],";
    259   stream << "\"type_data\":{";
    260 
    261 #define INSTANCE_TYPE_WRAPPER(name) DumpInstanceTypeData(stream, #name, name);
    262 
    263 #define VIRTUAL_INSTANCE_TYPE_WRAPPER(name) \
    264   DumpInstanceTypeData(stream, #name, FIRST_VIRTUAL_TYPE + name);
    265 
    266   INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER);
    267   VIRTUAL_INSTANCE_TYPE_LIST(VIRTUAL_INSTANCE_TYPE_WRAPPER)
    268   stream << "\"END\":{}}}";
    269 
    270 #undef INSTANCE_TYPE_WRAPPER
    271 #undef VIRTUAL_INSTANCE_TYPE_WRAPPER
    272 }
    273 
    274 void ObjectStats::CheckpointObjectStats() {
    275   base::LockGuard<base::Mutex> lock_guard(object_stats_mutex.Pointer());
    276   MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
    277   MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
    278   ClearObjectStats();
    279 }
    280 
    281 namespace {
    282 
    283 int Log2ForSize(size_t size) {
    284   DCHECK_GT(size, 0);
    285   return kSizetSize * 8 - 1 - base::bits::CountLeadingZeros(size);
    286 }
    287 
    288 }  // namespace
    289 
    290 int ObjectStats::HistogramIndexFromSize(size_t size) {
    291   if (size == 0) return 0;
    292   return Min(Max(Log2ForSize(size) + 1 - kFirstBucketShift, 0),
    293              kLastValueBucketIndex);
    294 }
    295 
    296 void ObjectStats::RecordObjectStats(InstanceType type, size_t size) {
    297   DCHECK_LE(type, LAST_TYPE);
    298   object_counts_[type]++;
    299   object_sizes_[type] += size;
    300   size_histogram_[type][HistogramIndexFromSize(size)]++;
    301 }
    302 
    303 void ObjectStats::RecordVirtualObjectStats(VirtualInstanceType type,
    304                                            size_t size, size_t over_allocated) {
    305   DCHECK_LE(type, LAST_VIRTUAL_TYPE);
    306   object_counts_[FIRST_VIRTUAL_TYPE + type]++;
    307   object_sizes_[FIRST_VIRTUAL_TYPE + type] += size;
    308   size_histogram_[FIRST_VIRTUAL_TYPE + type][HistogramIndexFromSize(size)]++;
    309   over_allocated_[FIRST_VIRTUAL_TYPE + type] += over_allocated;
    310   over_allocated_histogram_[FIRST_VIRTUAL_TYPE + type]
    311                            [HistogramIndexFromSize(size)]++;
    312 }
    313 
    314 Isolate* ObjectStats::isolate() { return heap()->isolate(); }
    315 
    316 class ObjectStatsCollectorImpl {
    317  public:
    318   enum Phase {
    319     kPhase1,
    320     kPhase2,
    321   };
    322   static const int kNumberOfPhases = kPhase2 + 1;
    323 
    324   ObjectStatsCollectorImpl(Heap* heap, ObjectStats* stats);
    325 
    326   void CollectGlobalStatistics();
    327 
    328   enum class CollectFieldStats { kNo, kYes };
    329   void CollectStatistics(HeapObject* obj, Phase phase,
    330                          CollectFieldStats collect_field_stats);
    331 
    332  private:
    333   enum CowMode {
    334     kCheckCow,
    335     kIgnoreCow,
    336   };
    337 
    338   Isolate* isolate() { return heap_->isolate(); }
    339 
    340   bool RecordVirtualObjectStats(HeapObject* parent, HeapObject* obj,
    341                                 ObjectStats::VirtualInstanceType type,
    342                                 size_t size, size_t over_allocated,
    343                                 CowMode check_cow_array = kCheckCow);
    344   void RecordExternalResourceStats(Address resource,
    345                                    ObjectStats::VirtualInstanceType type,
    346                                    size_t size);
    347   // Gets size from |ob| and assumes no over allocating.
    348   bool RecordSimpleVirtualObjectStats(HeapObject* parent, HeapObject* obj,
    349                                       ObjectStats::VirtualInstanceType type);
    350   // For HashTable it is possible to compute over allocated memory.
    351   void RecordHashTableVirtualObjectStats(HeapObject* parent,
    352                                          FixedArray* hash_table,
    353                                          ObjectStats::VirtualInstanceType type);
    354 
    355   bool SameLiveness(HeapObject* obj1, HeapObject* obj2);
    356   bool CanRecordFixedArray(FixedArrayBase* array);
    357   bool IsCowArray(FixedArrayBase* array);
    358 
    359   // Blacklist for objects that should not be recorded using
    360   // VirtualObjectStats and RecordSimpleVirtualObjectStats. For recording those
    361   // objects dispatch to the low level ObjectStats::RecordObjectStats manually.
    362   bool ShouldRecordObject(HeapObject* object, CowMode check_cow_array);
    363 
    364   void RecordObjectStats(HeapObject* obj, InstanceType type, size_t size);
    365 
    366   // Specific recursion into constant pool or embedded code objects. Records
    367   // FixedArrays and Tuple2.
    368   void RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
    369       HeapObject* parent, HeapObject* object,
    370       ObjectStats::VirtualInstanceType type);
    371 
    372   // Details.
    373   void RecordVirtualAllocationSiteDetails(AllocationSite* site);
    374   void RecordVirtualBytecodeArrayDetails(BytecodeArray* bytecode);
    375   void RecordVirtualCodeDetails(Code* code);
    376   void RecordVirtualContext(Context* context);
    377   void RecordVirtualFeedbackVectorDetails(FeedbackVector* vector);
    378   void RecordVirtualFixedArrayDetails(FixedArray* array);
    379   void RecordVirtualFunctionTemplateInfoDetails(FunctionTemplateInfo* fti);
    380   void RecordVirtualJSGlobalObjectDetails(JSGlobalObject* object);
    381   void RecordVirtualJSCollectionDetails(JSObject* object);
    382   void RecordVirtualJSObjectDetails(JSObject* object);
    383   void RecordVirtualMapDetails(Map* map);
    384   void RecordVirtualScriptDetails(Script* script);
    385   void RecordVirtualExternalStringDetails(ExternalString* script);
    386   void RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo* info);
    387   void RecordVirtualJSFunctionDetails(JSFunction* function);
    388 
    389   void RecordVirtualArrayBoilerplateDescription(
    390       ArrayBoilerplateDescription* description);
    391   Heap* heap_;
    392   ObjectStats* stats_;
    393   MarkCompactCollector::NonAtomicMarkingState* marking_state_;
    394   std::unordered_set<HeapObject*> virtual_objects_;
    395   std::unordered_set<Address> external_resources_;
    396   FieldStatsCollector field_stats_collector_;
    397 };
    398 
    399 ObjectStatsCollectorImpl::ObjectStatsCollectorImpl(Heap* heap,
    400                                                    ObjectStats* stats)
    401     : heap_(heap),
    402       stats_(stats),
    403       marking_state_(
    404           heap->mark_compact_collector()->non_atomic_marking_state()),
    405       field_stats_collector_(
    406           &stats->tagged_fields_count_, &stats->embedder_fields_count_,
    407           &stats->unboxed_double_fields_count_, &stats->raw_fields_count_) {}
    408 
    409 bool ObjectStatsCollectorImpl::ShouldRecordObject(HeapObject* obj,
    410                                                   CowMode check_cow_array) {
    411   if (obj->IsFixedArrayExact()) {
    412     FixedArray* fixed_array = FixedArray::cast(obj);
    413     bool cow_check = check_cow_array == kIgnoreCow || !IsCowArray(fixed_array);
    414     return CanRecordFixedArray(fixed_array) && cow_check;
    415   }
    416   if (obj == ReadOnlyRoots(heap_).empty_property_array()) return false;
    417   return true;
    418 }
    419 
    420 void ObjectStatsCollectorImpl::RecordHashTableVirtualObjectStats(
    421     HeapObject* parent, FixedArray* hash_table,
    422     ObjectStats::VirtualInstanceType type) {
    423   CHECK(hash_table->IsHashTable());
    424   // TODO(mlippautz): Implement over allocation for hash tables.
    425   RecordVirtualObjectStats(parent, hash_table, type, hash_table->Size(),
    426                            ObjectStats::kNoOverAllocation);
    427 }
    428 
    429 bool ObjectStatsCollectorImpl::RecordSimpleVirtualObjectStats(
    430     HeapObject* parent, HeapObject* obj,
    431     ObjectStats::VirtualInstanceType type) {
    432   return RecordVirtualObjectStats(parent, obj, type, obj->Size(),
    433                                   ObjectStats::kNoOverAllocation, kCheckCow);
    434 }
    435 
    436 bool ObjectStatsCollectorImpl::RecordVirtualObjectStats(
    437     HeapObject* parent, HeapObject* obj, ObjectStats::VirtualInstanceType type,
    438     size_t size, size_t over_allocated, CowMode check_cow_array) {
    439   if (!SameLiveness(parent, obj) || !ShouldRecordObject(obj, check_cow_array)) {
    440     return false;
    441   }
    442 
    443   if (virtual_objects_.find(obj) == virtual_objects_.end()) {
    444     virtual_objects_.insert(obj);
    445     stats_->RecordVirtualObjectStats(type, size, over_allocated);
    446     return true;
    447   }
    448   return false;
    449 }
    450 
    451 void ObjectStatsCollectorImpl::RecordExternalResourceStats(
    452     Address resource, ObjectStats::VirtualInstanceType type, size_t size) {
    453   if (external_resources_.find(resource) == external_resources_.end()) {
    454     external_resources_.insert(resource);
    455     stats_->RecordVirtualObjectStats(type, size, 0);
    456   }
    457 }
    458 
    459 void ObjectStatsCollectorImpl::RecordVirtualAllocationSiteDetails(
    460     AllocationSite* site) {
    461   if (!site->PointsToLiteral()) return;
    462   JSObject* boilerplate = site->boilerplate();
    463   if (boilerplate->IsJSArray()) {
    464     RecordSimpleVirtualObjectStats(site, boilerplate,
    465                                    ObjectStats::JS_ARRAY_BOILERPLATE_TYPE);
    466     // Array boilerplates cannot have properties.
    467   } else {
    468     RecordVirtualObjectStats(
    469         site, boilerplate, ObjectStats::JS_OBJECT_BOILERPLATE_TYPE,
    470         boilerplate->Size(), ObjectStats::kNoOverAllocation);
    471     if (boilerplate->HasFastProperties()) {
    472       // We'll mis-classify the empty_property_array here. Given that there is a
    473       // single instance, this is negligible.
    474       PropertyArray* properties = boilerplate->property_array();
    475       RecordSimpleVirtualObjectStats(
    476           site, properties, ObjectStats::BOILERPLATE_PROPERTY_ARRAY_TYPE);
    477     } else {
    478       NameDictionary* properties = boilerplate->property_dictionary();
    479       RecordSimpleVirtualObjectStats(
    480           site, properties, ObjectStats::BOILERPLATE_PROPERTY_DICTIONARY_TYPE);
    481     }
    482   }
    483   FixedArrayBase* elements = boilerplate->elements();
    484   RecordSimpleVirtualObjectStats(site, elements,
    485                                  ObjectStats::BOILERPLATE_ELEMENTS_TYPE);
    486 }
    487 
    488 void ObjectStatsCollectorImpl::RecordVirtualFunctionTemplateInfoDetails(
    489     FunctionTemplateInfo* fti) {
    490   // named_property_handler and indexed_property_handler are recorded as
    491   // INTERCEPTOR_INFO_TYPE.
    492   if (!fti->call_code()->IsUndefined(isolate())) {
    493     RecordSimpleVirtualObjectStats(
    494         fti, CallHandlerInfo::cast(fti->call_code()),
    495         ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
    496   }
    497   if (!fti->instance_call_handler()->IsUndefined(isolate())) {
    498     RecordSimpleVirtualObjectStats(
    499         fti, CallHandlerInfo::cast(fti->instance_call_handler()),
    500         ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
    501   }
    502 }
    503 
    504 void ObjectStatsCollectorImpl::RecordVirtualJSGlobalObjectDetails(
    505     JSGlobalObject* object) {
    506   // Properties.
    507   GlobalDictionary* properties = object->global_dictionary();
    508   RecordHashTableVirtualObjectStats(object, properties,
    509                                     ObjectStats::GLOBAL_PROPERTIES_TYPE);
    510   // Elements.
    511   FixedArrayBase* elements = object->elements();
    512   RecordSimpleVirtualObjectStats(object, elements,
    513                                  ObjectStats::GLOBAL_ELEMENTS_TYPE);
    514 }
    515 
    516 void ObjectStatsCollectorImpl::RecordVirtualJSCollectionDetails(
    517     JSObject* object) {
    518   if (object->IsJSMap()) {
    519     RecordSimpleVirtualObjectStats(
    520         object, FixedArray::cast(JSMap::cast(object)->table()),
    521         ObjectStats::JS_COLLETION_TABLE_TYPE);
    522   }
    523   if (object->IsJSSet()) {
    524     RecordSimpleVirtualObjectStats(
    525         object, FixedArray::cast(JSSet::cast(object)->table()),
    526         ObjectStats::JS_COLLETION_TABLE_TYPE);
    527   }
    528 }
    529 
    530 void ObjectStatsCollectorImpl::RecordVirtualJSObjectDetails(JSObject* object) {
    531   // JSGlobalObject is recorded separately.
    532   if (object->IsJSGlobalObject()) return;
    533 
    534   // Properties.
    535   if (object->HasFastProperties()) {
    536     PropertyArray* properties = object->property_array();
    537     CHECK_EQ(PROPERTY_ARRAY_TYPE, properties->map()->instance_type());
    538   } else {
    539     NameDictionary* properties = object->property_dictionary();
    540     RecordHashTableVirtualObjectStats(
    541         object, properties, ObjectStats::OBJECT_PROPERTY_DICTIONARY_TYPE);
    542   }
    543   // Elements.
    544   FixedArrayBase* elements = object->elements();
    545   RecordSimpleVirtualObjectStats(object, elements, ObjectStats::ELEMENTS_TYPE);
    546 }
    547 
    548 static ObjectStats::VirtualInstanceType GetFeedbackSlotType(
    549     MaybeObject* maybe_obj, FeedbackSlotKind kind, Isolate* isolate) {
    550   if (maybe_obj->IsClearedWeakHeapObject())
    551     return ObjectStats::FEEDBACK_VECTOR_SLOT_OTHER_TYPE;
    552   Object* obj = maybe_obj->GetHeapObjectOrSmi();
    553   switch (kind) {
    554     case FeedbackSlotKind::kCall:
    555       if (obj == *isolate->factory()->uninitialized_symbol() ||
    556           obj == *isolate->factory()->premonomorphic_symbol()) {
    557         return ObjectStats::FEEDBACK_VECTOR_SLOT_CALL_UNUSED_TYPE;
    558       }
    559       return ObjectStats::FEEDBACK_VECTOR_SLOT_CALL_TYPE;
    560 
    561     case FeedbackSlotKind::kLoadProperty:
    562     case FeedbackSlotKind::kLoadGlobalInsideTypeof:
    563     case FeedbackSlotKind::kLoadGlobalNotInsideTypeof:
    564     case FeedbackSlotKind::kLoadKeyed:
    565       if (obj == *isolate->factory()->uninitialized_symbol() ||
    566           obj == *isolate->factory()->premonomorphic_symbol()) {
    567         return ObjectStats::FEEDBACK_VECTOR_SLOT_LOAD_UNUSED_TYPE;
    568       }
    569       return ObjectStats::FEEDBACK_VECTOR_SLOT_LOAD_TYPE;
    570 
    571     case FeedbackSlotKind::kStoreNamedSloppy:
    572     case FeedbackSlotKind::kStoreNamedStrict:
    573     case FeedbackSlotKind::kStoreOwnNamed:
    574     case FeedbackSlotKind::kStoreGlobalSloppy:
    575     case FeedbackSlotKind::kStoreGlobalStrict:
    576     case FeedbackSlotKind::kStoreKeyedSloppy:
    577     case FeedbackSlotKind::kStoreKeyedStrict:
    578       if (obj == *isolate->factory()->uninitialized_symbol() ||
    579           obj == *isolate->factory()->premonomorphic_symbol()) {
    580         return ObjectStats::FEEDBACK_VECTOR_SLOT_STORE_UNUSED_TYPE;
    581       }
    582       return ObjectStats::FEEDBACK_VECTOR_SLOT_STORE_TYPE;
    583 
    584     case FeedbackSlotKind::kBinaryOp:
    585     case FeedbackSlotKind::kCompareOp:
    586       return ObjectStats::FEEDBACK_VECTOR_SLOT_ENUM_TYPE;
    587 
    588     default:
    589       return ObjectStats::FEEDBACK_VECTOR_SLOT_OTHER_TYPE;
    590   }
    591 }
    592 
    593 void ObjectStatsCollectorImpl::RecordVirtualFeedbackVectorDetails(
    594     FeedbackVector* vector) {
    595   if (virtual_objects_.find(vector) == virtual_objects_.end()) {
    596     // Manually insert the feedback vector into the virtual object list, since
    597     // we're logging its component parts separately.
    598     virtual_objects_.insert(vector);
    599 
    600     size_t calculated_size = 0;
    601 
    602     // Log the feedback vector's header (fixed fields).
    603     size_t header_size =
    604         reinterpret_cast<Address>(vector->slots_start()) - vector->address();
    605     stats_->RecordVirtualObjectStats(ObjectStats::FEEDBACK_VECTOR_HEADER_TYPE,
    606                                      header_size,
    607                                      ObjectStats::kNoOverAllocation);
    608     calculated_size += header_size;
    609 
    610     // Iterate over the feedback slots and log each one.
    611     if (!vector->shared_function_info()->HasFeedbackMetadata()) return;
    612 
    613     FeedbackMetadataIterator it(vector->metadata());
    614     while (it.HasNext()) {
    615       FeedbackSlot slot = it.Next();
    616       // Log the entry (or entries) taken up by this slot.
    617       size_t slot_size = it.entry_size() * kPointerSize;
    618       stats_->RecordVirtualObjectStats(
    619           GetFeedbackSlotType(vector->Get(slot), it.kind(), heap_->isolate()),
    620           slot_size, ObjectStats::kNoOverAllocation);
    621       calculated_size += slot_size;
    622 
    623       // Log the monomorphic/polymorphic helper objects that this slot owns.
    624       for (int i = 0; i < it.entry_size(); i++) {
    625         MaybeObject* raw_object = vector->get(slot.ToInt() + i);
    626         if (!raw_object->IsStrongOrWeakHeapObject()) continue;
    627         HeapObject* object = raw_object->GetHeapObject();
    628         if (object->IsCell() || object->IsWeakFixedArray()) {
    629           RecordSimpleVirtualObjectStats(
    630               vector, object, ObjectStats::FEEDBACK_VECTOR_ENTRY_TYPE);
    631         }
    632       }
    633     }
    634 
    635     CHECK_EQ(calculated_size, vector->Size());
    636   }
    637 }
    638 
    639 void ObjectStatsCollectorImpl::RecordVirtualFixedArrayDetails(
    640     FixedArray* array) {
    641   if (IsCowArray(array)) {
    642     RecordVirtualObjectStats(nullptr, array, ObjectStats::COW_ARRAY_TYPE,
    643                              array->Size(), ObjectStats::kNoOverAllocation,
    644                              kIgnoreCow);
    645   }
    646 }
    647 
    648 void ObjectStatsCollectorImpl::CollectStatistics(
    649     HeapObject* obj, Phase phase, CollectFieldStats collect_field_stats) {
    650   Map* map = obj->map();
    651   switch (phase) {
    652     case kPhase1:
    653       if (obj->IsFeedbackVector()) {
    654         RecordVirtualFeedbackVectorDetails(FeedbackVector::cast(obj));
    655       } else if (obj->IsMap()) {
    656         RecordVirtualMapDetails(Map::cast(obj));
    657       } else if (obj->IsBytecodeArray()) {
    658         RecordVirtualBytecodeArrayDetails(BytecodeArray::cast(obj));
    659       } else if (obj->IsCode()) {
    660         RecordVirtualCodeDetails(Code::cast(obj));
    661       } else if (obj->IsFunctionTemplateInfo()) {
    662         RecordVirtualFunctionTemplateInfoDetails(
    663             FunctionTemplateInfo::cast(obj));
    664       } else if (obj->IsJSFunction()) {
    665         RecordVirtualJSFunctionDetails(JSFunction::cast(obj));
    666       } else if (obj->IsJSGlobalObject()) {
    667         RecordVirtualJSGlobalObjectDetails(JSGlobalObject::cast(obj));
    668       } else if (obj->IsJSObject()) {
    669         // This phase needs to come after RecordVirtualAllocationSiteDetails
    670         // to properly split among boilerplates.
    671         RecordVirtualJSObjectDetails(JSObject::cast(obj));
    672       } else if (obj->IsJSCollection()) {
    673         RecordVirtualJSCollectionDetails(JSObject::cast(obj));
    674       } else if (obj->IsSharedFunctionInfo()) {
    675         RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo::cast(obj));
    676       } else if (obj->IsContext()) {
    677         RecordVirtualContext(Context::cast(obj));
    678       } else if (obj->IsScript()) {
    679         RecordVirtualScriptDetails(Script::cast(obj));
    680       } else if (obj->IsExternalString()) {
    681         RecordVirtualExternalStringDetails(ExternalString::cast(obj));
    682       } else if (obj->IsArrayBoilerplateDescription()) {
    683         RecordVirtualArrayBoilerplateDescription(
    684             ArrayBoilerplateDescription::cast(obj));
    685       } else if (obj->IsFixedArrayExact()) {
    686         // Has to go last as it triggers too eagerly.
    687         RecordVirtualFixedArrayDetails(FixedArray::cast(obj));
    688       }
    689       break;
    690     case kPhase2:
    691       RecordObjectStats(obj, map->instance_type(), obj->Size());
    692       if (collect_field_stats == CollectFieldStats::kYes) {
    693         field_stats_collector_.RecordStats(obj);
    694       }
    695       break;
    696   }
    697 }
    698 
    699 void ObjectStatsCollectorImpl::CollectGlobalStatistics() {
    700   // Iterate boilerplates first to disambiguate them from regular JS objects.
    701   Object* list = heap_->allocation_sites_list();
    702   while (list->IsAllocationSite()) {
    703     AllocationSite* site = AllocationSite::cast(list);
    704     RecordVirtualAllocationSiteDetails(site);
    705     list = site->weak_next();
    706   }
    707 
    708   // FixedArray.
    709   RecordSimpleVirtualObjectStats(nullptr, heap_->serialized_objects(),
    710                                  ObjectStats::SERIALIZED_OBJECTS_TYPE);
    711   RecordSimpleVirtualObjectStats(nullptr, heap_->number_string_cache(),
    712                                  ObjectStats::NUMBER_STRING_CACHE_TYPE);
    713   RecordSimpleVirtualObjectStats(
    714       nullptr, heap_->single_character_string_cache(),
    715       ObjectStats::SINGLE_CHARACTER_STRING_CACHE_TYPE);
    716   RecordSimpleVirtualObjectStats(nullptr, heap_->string_split_cache(),
    717                                  ObjectStats::STRING_SPLIT_CACHE_TYPE);
    718   RecordSimpleVirtualObjectStats(nullptr, heap_->regexp_multiple_cache(),
    719                                  ObjectStats::REGEXP_MULTIPLE_CACHE_TYPE);
    720   RecordSimpleVirtualObjectStats(nullptr, heap_->retained_maps(),
    721                                  ObjectStats::RETAINED_MAPS_TYPE);
    722 
    723   // WeakArrayList.
    724   RecordSimpleVirtualObjectStats(
    725       nullptr, WeakArrayList::cast(heap_->noscript_shared_function_infos()),
    726       ObjectStats::NOSCRIPT_SHARED_FUNCTION_INFOS_TYPE);
    727   RecordSimpleVirtualObjectStats(nullptr,
    728                                  WeakArrayList::cast(heap_->script_list()),
    729                                  ObjectStats::SCRIPT_LIST_TYPE);
    730 
    731   // HashTable.
    732   RecordHashTableVirtualObjectStats(nullptr, heap_->code_stubs(),
    733                                     ObjectStats::CODE_STUBS_TABLE_TYPE);
    734 }
    735 
    736 void ObjectStatsCollectorImpl::RecordObjectStats(HeapObject* obj,
    737                                                  InstanceType type,
    738                                                  size_t size) {
    739   if (virtual_objects_.find(obj) == virtual_objects_.end()) {
    740     stats_->RecordObjectStats(type, size);
    741   }
    742 }
    743 
    744 bool ObjectStatsCollectorImpl::CanRecordFixedArray(FixedArrayBase* array) {
    745   ReadOnlyRoots roots(heap_);
    746   return array != roots.empty_fixed_array() &&
    747          array != roots.empty_sloppy_arguments_elements() &&
    748          array != roots.empty_slow_element_dictionary() &&
    749          array != heap_->empty_property_dictionary();
    750 }
    751 
    752 bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase* array) {
    753   return array->map() == ReadOnlyRoots(heap_).fixed_cow_array_map();
    754 }
    755 
    756 bool ObjectStatsCollectorImpl::SameLiveness(HeapObject* obj1,
    757                                             HeapObject* obj2) {
    758   return obj1 == nullptr || obj2 == nullptr ||
    759          marking_state_->Color(obj1) == marking_state_->Color(obj2);
    760 }
    761 
    762 void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map* map) {
    763   // TODO(mlippautz): map->dependent_code(): DEPENDENT_CODE_TYPE.
    764 
    765   DescriptorArray* array = map->instance_descriptors();
    766   if (map->owns_descriptors() &&
    767       array != ReadOnlyRoots(heap_).empty_descriptor_array()) {
    768     // DescriptorArray has its own instance type.
    769     EnumCache* enum_cache = array->GetEnumCache();
    770     RecordSimpleVirtualObjectStats(array, enum_cache->keys(),
    771                                    ObjectStats::ENUM_CACHE_TYPE);
    772     RecordSimpleVirtualObjectStats(array, enum_cache->indices(),
    773                                    ObjectStats::ENUM_INDICES_CACHE_TYPE);
    774   }
    775 
    776   if (map->is_prototype_map()) {
    777     if (map->prototype_info()->IsPrototypeInfo()) {
    778       PrototypeInfo* info = PrototypeInfo::cast(map->prototype_info());
    779       Object* users = info->prototype_users();
    780       if (users->IsWeakFixedArray()) {
    781         RecordSimpleVirtualObjectStats(map, WeakArrayList::cast(users),
    782                                        ObjectStats::PROTOTYPE_USERS_TYPE);
    783       }
    784     }
    785   }
    786 }
    787 
    788 void ObjectStatsCollectorImpl::RecordVirtualScriptDetails(Script* script) {
    789   RecordSimpleVirtualObjectStats(
    790       script, script->shared_function_infos(),
    791       ObjectStats::SCRIPT_SHARED_FUNCTION_INFOS_TYPE);
    792 
    793   // Log the size of external source code.
    794   Object* raw_source = script->source();
    795   if (raw_source->IsExternalString()) {
    796     // The contents of external strings aren't on the heap, so we have to record
    797     // them manually. The on-heap String object is recorded indepentendely in
    798     // the normal pass.
    799     ExternalString* string = ExternalString::cast(raw_source);
    800     Address resource = string->resource_as_address();
    801     size_t off_heap_size = string->ExternalPayloadSize();
    802     RecordExternalResourceStats(
    803         resource,
    804         string->IsOneByteRepresentation()
    805             ? ObjectStats::SCRIPT_SOURCE_EXTERNAL_ONE_BYTE_TYPE
    806             : ObjectStats::SCRIPT_SOURCE_EXTERNAL_TWO_BYTE_TYPE,
    807         off_heap_size);
    808   } else if (raw_source->IsString()) {
    809     String* source = String::cast(raw_source);
    810     RecordSimpleVirtualObjectStats(
    811         script, HeapObject::cast(raw_source),
    812         source->IsOneByteRepresentation()
    813             ? ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_ONE_BYTE_TYPE
    814             : ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_TWO_BYTE_TYPE);
    815   }
    816 }
    817 
    818 void ObjectStatsCollectorImpl::RecordVirtualExternalStringDetails(
    819     ExternalString* string) {
    820   // Track the external string resource size in a separate category.
    821 
    822   Address resource = string->resource_as_address();
    823   size_t off_heap_size = string->ExternalPayloadSize();
    824   RecordExternalResourceStats(
    825       resource,
    826       string->IsOneByteRepresentation()
    827           ? ObjectStats::STRING_EXTERNAL_RESOURCE_ONE_BYTE_TYPE
    828           : ObjectStats::STRING_EXTERNAL_RESOURCE_TWO_BYTE_TYPE,
    829       off_heap_size);
    830 }
    831 
    832 void ObjectStatsCollectorImpl::RecordVirtualSharedFunctionInfoDetails(
    833     SharedFunctionInfo* info) {
    834   // Uncompiled SharedFunctionInfo gets its own category.
    835   if (!info->is_compiled()) {
    836     RecordSimpleVirtualObjectStats(
    837         nullptr, info, ObjectStats::UNCOMPILED_SHARED_FUNCTION_INFO_TYPE);
    838   }
    839 }
    840 
    841 void ObjectStatsCollectorImpl::RecordVirtualJSFunctionDetails(
    842     JSFunction* function) {
    843   // Uncompiled JSFunctions get their own category.
    844   if (!function->is_compiled()) {
    845     RecordSimpleVirtualObjectStats(nullptr, function,
    846                                    ObjectStats::UNCOMPILED_JS_FUNCTION_TYPE);
    847   }
    848 }
    849 void ObjectStatsCollectorImpl::RecordVirtualArrayBoilerplateDescription(
    850     ArrayBoilerplateDescription* description) {
    851   RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
    852       description, description->constant_elements(),
    853       ObjectStats::ARRAY_BOILERPLATE_DESCRIPTION_ELEMENTS_TYPE);
    854 }
    855 
    856 void ObjectStatsCollectorImpl::
    857     RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
    858         HeapObject* parent, HeapObject* object,
    859         ObjectStats::VirtualInstanceType type) {
    860   if (!RecordSimpleVirtualObjectStats(parent, object, type)) return;
    861   if (object->IsFixedArrayExact()) {
    862     FixedArray* array = FixedArray::cast(object);
    863     for (int i = 0; i < array->length(); i++) {
    864       Object* entry = array->get(i);
    865       if (!entry->IsHeapObject()) continue;
    866       RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
    867           array, HeapObject::cast(entry), type);
    868     }
    869   }
    870 }
    871 
    872 void ObjectStatsCollectorImpl::RecordVirtualBytecodeArrayDetails(
    873     BytecodeArray* bytecode) {
    874   RecordSimpleVirtualObjectStats(
    875       bytecode, bytecode->constant_pool(),
    876       ObjectStats::BYTECODE_ARRAY_CONSTANT_POOL_TYPE);
    877   // FixedArrays on constant pool are used for holding descriptor information.
    878   // They are shared with optimized code.
    879   FixedArray* constant_pool = FixedArray::cast(bytecode->constant_pool());
    880   for (int i = 0; i < constant_pool->length(); i++) {
    881     Object* entry = constant_pool->get(i);
    882     if (entry->IsFixedArrayExact()) {
    883       RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
    884           constant_pool, HeapObject::cast(entry),
    885           ObjectStats::EMBEDDED_OBJECT_TYPE);
    886     }
    887   }
    888   RecordSimpleVirtualObjectStats(
    889       bytecode, bytecode->handler_table(),
    890       ObjectStats::BYTECODE_ARRAY_HANDLER_TABLE_TYPE);
    891 }
    892 
    893 namespace {
    894 
    895 ObjectStats::VirtualInstanceType CodeKindToVirtualInstanceType(
    896     Code::Kind kind) {
    897   switch (kind) {
    898 #define CODE_KIND_CASE(type) \
    899   case Code::type:           \
    900     return ObjectStats::type;
    901     CODE_KIND_LIST(CODE_KIND_CASE)
    902 #undef CODE_KIND_CASE
    903     default:
    904       UNREACHABLE();
    905   }
    906   UNREACHABLE();
    907 }
    908 
    909 }  // namespace
    910 
    911 void ObjectStatsCollectorImpl::RecordVirtualCodeDetails(Code* code) {
    912   RecordSimpleVirtualObjectStats(nullptr, code,
    913                                  CodeKindToVirtualInstanceType(code->kind()));
    914   RecordSimpleVirtualObjectStats(code, code->deoptimization_data(),
    915                                  ObjectStats::DEOPTIMIZATION_DATA_TYPE);
    916   RecordSimpleVirtualObjectStats(code, code->relocation_info(),
    917                                  ObjectStats::RELOC_INFO_TYPE);
    918   Object* source_position_table = code->source_position_table();
    919   if (source_position_table->IsSourcePositionTableWithFrameCache()) {
    920     RecordSimpleVirtualObjectStats(
    921         code,
    922         SourcePositionTableWithFrameCache::cast(source_position_table)
    923             ->source_position_table(),
    924         ObjectStats::SOURCE_POSITION_TABLE_TYPE);
    925   } else if (source_position_table->IsHeapObject()) {
    926     RecordSimpleVirtualObjectStats(code,
    927                                    HeapObject::cast(source_position_table),
    928                                    ObjectStats::SOURCE_POSITION_TABLE_TYPE);
    929   }
    930   if (code->kind() == Code::Kind::OPTIMIZED_FUNCTION) {
    931     DeoptimizationData* input_data =
    932         DeoptimizationData::cast(code->deoptimization_data());
    933     if (input_data->length() > 0) {
    934       RecordSimpleVirtualObjectStats(code->deoptimization_data(),
    935                                      input_data->LiteralArray(),
    936                                      ObjectStats::OPTIMIZED_CODE_LITERALS_TYPE);
    937     }
    938   }
    939   int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
    940   for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
    941     RelocInfo::Mode mode = it.rinfo()->rmode();
    942     if (mode == RelocInfo::EMBEDDED_OBJECT) {
    943       Object* target = it.rinfo()->target_object();
    944       if (target->IsFixedArrayExact()) {
    945         RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
    946             code, HeapObject::cast(target), ObjectStats::EMBEDDED_OBJECT_TYPE);
    947       }
    948     }
    949   }
    950 }
    951 
    952 void ObjectStatsCollectorImpl::RecordVirtualContext(Context* context) {
    953   if (context->IsNativeContext()) {
    954     RecordObjectStats(context, NATIVE_CONTEXT_TYPE, context->Size());
    955   } else if (context->IsFunctionContext()) {
    956     RecordObjectStats(context, FUNCTION_CONTEXT_TYPE, context->Size());
    957   } else {
    958     RecordSimpleVirtualObjectStats(nullptr, context,
    959                                    ObjectStats::OTHER_CONTEXT_TYPE);
    960   }
    961 }
    962 
    963 class ObjectStatsVisitor {
    964  public:
    965   ObjectStatsVisitor(Heap* heap, ObjectStatsCollectorImpl* live_collector,
    966                      ObjectStatsCollectorImpl* dead_collector,
    967                      ObjectStatsCollectorImpl::Phase phase)
    968       : live_collector_(live_collector),
    969         dead_collector_(dead_collector),
    970         marking_state_(
    971             heap->mark_compact_collector()->non_atomic_marking_state()),
    972         phase_(phase) {}
    973 
    974   bool Visit(HeapObject* obj, int size) {
    975     if (marking_state_->IsBlack(obj)) {
    976       live_collector_->CollectStatistics(
    977           obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes);
    978     } else {
    979       DCHECK(!marking_state_->IsGrey(obj));
    980       dead_collector_->CollectStatistics(
    981           obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kNo);
    982     }
    983     return true;
    984   }
    985 
    986  private:
    987   ObjectStatsCollectorImpl* live_collector_;
    988   ObjectStatsCollectorImpl* dead_collector_;
    989   MarkCompactCollector::NonAtomicMarkingState* marking_state_;
    990   ObjectStatsCollectorImpl::Phase phase_;
    991 };
    992 
    993 namespace {
    994 
    995 void IterateHeap(Heap* heap, ObjectStatsVisitor* visitor) {
    996   SpaceIterator space_it(heap);
    997   HeapObject* obj = nullptr;
    998   while (space_it.has_next()) {
    999     std::unique_ptr<ObjectIterator> it(space_it.next()->GetObjectIterator());
   1000     ObjectIterator* obj_it = it.get();
   1001     while ((obj = obj_it->Next()) != nullptr) {
   1002       visitor->Visit(obj, obj->Size());
   1003     }
   1004   }
   1005 }
   1006 
   1007 }  // namespace
   1008 
   1009 void ObjectStatsCollector::Collect() {
   1010   ObjectStatsCollectorImpl live_collector(heap_, live_);
   1011   ObjectStatsCollectorImpl dead_collector(heap_, dead_);
   1012   live_collector.CollectGlobalStatistics();
   1013   for (int i = 0; i < ObjectStatsCollectorImpl::kNumberOfPhases; i++) {
   1014     ObjectStatsVisitor visitor(heap_, &live_collector, &dead_collector,
   1015                                static_cast<ObjectStatsCollectorImpl::Phase>(i));
   1016     IterateHeap(heap_, &visitor);
   1017   }
   1018 }
   1019 
   1020 }  // namespace internal
   1021 }  // namespace v8
   1022