1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/profiler/heap-snapshot-generator.h" 6 7 #include <utility> 8 9 #include "src/api-inl.h" 10 #include "src/code-stubs.h" 11 #include "src/conversions.h" 12 #include "src/debug/debug.h" 13 #include "src/global-handles.h" 14 #include "src/layout-descriptor.h" 15 #include "src/objects-body-descriptors.h" 16 #include "src/objects-inl.h" 17 #include "src/objects/api-callbacks.h" 18 #include "src/objects/hash-table-inl.h" 19 #include "src/objects/js-array-buffer-inl.h" 20 #include "src/objects/js-array-inl.h" 21 #include "src/objects/js-collection-inl.h" 22 #include "src/objects/js-generator-inl.h" 23 #include "src/objects/js-promise-inl.h" 24 #include "src/objects/js-regexp-inl.h" 25 #include "src/objects/literal-objects-inl.h" 26 #include "src/profiler/allocation-tracker.h" 27 #include "src/profiler/heap-profiler.h" 28 #include "src/profiler/heap-snapshot-generator-inl.h" 29 #include "src/prototype.h" 30 #include "src/transitions.h" 31 #include "src/visitors.h" 32 33 namespace v8 { 34 namespace internal { 35 36 37 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to) 38 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)), 39 to_index_(to), 40 name_(name) { 41 DCHECK(type == kContextVariable 42 || type == kProperty 43 || type == kInternal 44 || type == kShortcut 45 || type == kWeak); 46 } 47 48 49 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to) 50 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)), 51 to_index_(to), 52 index_(index) { 53 DCHECK(type == kElement || type == kHidden); 54 } 55 56 57 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) { 58 to_entry_ = &snapshot->entries()[to_index_]; 59 } 60 61 62 const int HeapEntry::kNoEntry = -1; 63 64 HeapEntry::HeapEntry(HeapSnapshot* snapshot, 65 Type type, 66 const char* name, 67 SnapshotObjectId id, 68 size_t self_size, 69 unsigned trace_node_id) 70 : type_(type), 71 children_count_(0), 72 children_index_(-1), 73 self_size_(self_size), 74 snapshot_(snapshot), 75 name_(name), 76 id_(id), 77 trace_node_id_(trace_node_id) { } 78 79 80 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type, 81 const char* name, 82 HeapEntry* entry) { 83 HeapGraphEdge edge(type, name, this->index(), entry->index()); 84 snapshot_->edges().push_back(edge); 85 ++children_count_; 86 } 87 88 89 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type, 90 int index, 91 HeapEntry* entry) { 92 HeapGraphEdge edge(type, index, this->index(), entry->index()); 93 snapshot_->edges().push_back(edge); 94 ++children_count_; 95 } 96 97 98 void HeapEntry::Print( 99 const char* prefix, const char* edge_name, int max_depth, int indent) { 100 STATIC_ASSERT(sizeof(unsigned) == sizeof(id())); 101 base::OS::Print("%6" PRIuS " @%6u %*c %s%s: ", self_size(), id(), indent, ' ', 102 prefix, edge_name); 103 if (type() != kString) { 104 base::OS::Print("%s %.40s\n", TypeAsString(), name_); 105 } else { 106 base::OS::Print("\""); 107 const char* c = name_; 108 while (*c && (c - name_) <= 40) { 109 if (*c != '\n') 110 base::OS::Print("%c", *c); 111 else 112 base::OS::Print("\\n"); 113 ++c; 114 } 115 base::OS::Print("\"\n"); 116 } 117 if (--max_depth == 0) return; 118 for (auto i = children_begin(); i != children_end(); ++i) { 119 HeapGraphEdge& edge = **i; 120 const char* edge_prefix = ""; 121 EmbeddedVector<char, 64> index; 122 const char* edge_name = index.start(); 123 switch (edge.type()) { 124 case HeapGraphEdge::kContextVariable: 125 edge_prefix = "#"; 126 edge_name = edge.name(); 127 break; 128 case HeapGraphEdge::kElement: 129 SNPrintF(index, "%d", edge.index()); 130 break; 131 case HeapGraphEdge::kInternal: 132 edge_prefix = "$"; 133 edge_name = edge.name(); 134 break; 135 case HeapGraphEdge::kProperty: 136 edge_name = edge.name(); 137 break; 138 case HeapGraphEdge::kHidden: 139 edge_prefix = "$"; 140 SNPrintF(index, "%d", edge.index()); 141 break; 142 case HeapGraphEdge::kShortcut: 143 edge_prefix = "^"; 144 edge_name = edge.name(); 145 break; 146 case HeapGraphEdge::kWeak: 147 edge_prefix = "w"; 148 edge_name = edge.name(); 149 break; 150 default: 151 SNPrintF(index, "!!! unknown edge type: %d ", edge.type()); 152 } 153 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2); 154 } 155 } 156 157 158 const char* HeapEntry::TypeAsString() { 159 switch (type()) { 160 case kHidden: return "/hidden/"; 161 case kObject: return "/object/"; 162 case kClosure: return "/closure/"; 163 case kString: return "/string/"; 164 case kCode: return "/code/"; 165 case kArray: return "/array/"; 166 case kRegExp: return "/regexp/"; 167 case kHeapNumber: return "/number/"; 168 case kNative: return "/native/"; 169 case kSynthetic: return "/synthetic/"; 170 case kConsString: return "/concatenated string/"; 171 case kSlicedString: return "/sliced string/"; 172 case kSymbol: return "/symbol/"; 173 case kBigInt: 174 return "/bigint/"; 175 default: return "???"; 176 } 177 } 178 179 180 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler) 181 : profiler_(profiler), 182 root_index_(HeapEntry::kNoEntry), 183 gc_roots_index_(HeapEntry::kNoEntry), 184 max_snapshot_js_object_id_(0) { 185 // It is very important to keep objects that form a heap snapshot 186 // as small as possible. Check assumptions about data structure sizes. 187 STATIC_ASSERT(((kPointerSize == 4) && (sizeof(HeapGraphEdge) == 12)) || 188 ((kPointerSize == 8) && (sizeof(HeapGraphEdge) == 24))); 189 STATIC_ASSERT(((kPointerSize == 4) && (sizeof(HeapEntry) == 28)) || 190 ((kPointerSize == 8) && (sizeof(HeapEntry) == 40))); 191 for (int i = 0; i < static_cast<int>(Root::kNumberOfRoots); ++i) { 192 gc_subroot_indexes_[i] = HeapEntry::kNoEntry; 193 } 194 } 195 196 197 void HeapSnapshot::Delete() { 198 profiler_->RemoveSnapshot(this); 199 } 200 201 202 void HeapSnapshot::RememberLastJSObjectId() { 203 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id(); 204 } 205 206 207 void HeapSnapshot::AddSyntheticRootEntries() { 208 AddRootEntry(); 209 AddGcRootsEntry(); 210 SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId; 211 for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) { 212 AddGcSubrootEntry(static_cast<Root>(root), id); 213 id += HeapObjectsMap::kObjectIdStep; 214 } 215 DCHECK_EQ(HeapObjectsMap::kFirstAvailableObjectId, id); 216 } 217 218 219 HeapEntry* HeapSnapshot::AddRootEntry() { 220 DCHECK_EQ(root_index_, HeapEntry::kNoEntry); 221 DCHECK(entries_.empty()); // Root entry must be the first one. 222 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, 223 "", 224 HeapObjectsMap::kInternalRootObjectId, 225 0, 226 0); 227 root_index_ = entry->index(); 228 DCHECK_EQ(root_index_, 0); 229 return entry; 230 } 231 232 233 HeapEntry* HeapSnapshot::AddGcRootsEntry() { 234 DCHECK_EQ(gc_roots_index_, HeapEntry::kNoEntry); 235 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, 236 "(GC roots)", 237 HeapObjectsMap::kGcRootsObjectId, 238 0, 239 0); 240 gc_roots_index_ = entry->index(); 241 return entry; 242 } 243 244 HeapEntry* HeapSnapshot::AddGcSubrootEntry(Root root, SnapshotObjectId id) { 245 DCHECK_EQ(gc_subroot_indexes_[static_cast<int>(root)], HeapEntry::kNoEntry); 246 HeapEntry* entry = 247 AddEntry(HeapEntry::kSynthetic, RootVisitor::RootName(root), id, 0, 0); 248 gc_subroot_indexes_[static_cast<int>(root)] = entry->index(); 249 return entry; 250 } 251 252 void HeapSnapshot::AddLocation(int entry, int scriptId, int line, int col) { 253 locations_.emplace_back(entry, scriptId, line, col); 254 } 255 256 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type, 257 const char* name, 258 SnapshotObjectId id, 259 size_t size, 260 unsigned trace_node_id) { 261 DCHECK(sorted_entries_.empty()); 262 entries_.emplace_back(this, type, name, id, size, trace_node_id); 263 return &entries_.back(); 264 } 265 266 267 void HeapSnapshot::FillChildren() { 268 DCHECK(children().empty()); 269 children().resize(edges().size()); 270 int children_index = 0; 271 for (HeapEntry& entry : entries()) { 272 children_index = entry.set_children_index(children_index); 273 } 274 DCHECK_EQ(edges().size(), static_cast<size_t>(children_index)); 275 for (HeapGraphEdge& edge : edges()) { 276 edge.ReplaceToIndexWithEntry(this); 277 edge.from()->add_child(&edge); 278 } 279 } 280 281 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) { 282 std::vector<HeapEntry*>* entries_by_id = GetSortedEntriesList(); 283 284 auto it = std::lower_bound( 285 entries_by_id->begin(), entries_by_id->end(), id, 286 [](HeapEntry* first, SnapshotObjectId val) { return first->id() < val; }); 287 288 if (it == entries_by_id->end() || (*it)->id() != id) return nullptr; 289 return *it; 290 } 291 292 struct SortByIds { 293 bool operator()(const HeapEntry* entry1_ptr, const HeapEntry* entry2_ptr) { 294 return entry1_ptr->id() < entry2_ptr->id(); 295 } 296 }; 297 298 std::vector<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() { 299 if (sorted_entries_.empty()) { 300 sorted_entries_.reserve(entries_.size()); 301 for (HeapEntry& entry : entries_) { 302 sorted_entries_.push_back(&entry); 303 } 304 std::sort(sorted_entries_.begin(), sorted_entries_.end(), SortByIds()); 305 } 306 return &sorted_entries_; 307 } 308 309 void HeapSnapshot::Print(int max_depth) { 310 root()->Print("", "", max_depth, 0); 311 } 312 313 // We split IDs on evens for embedder objects (see 314 // HeapObjectsMap::GenerateId) and odds for native objects. 315 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1; 316 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId = 317 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep; 318 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId = 319 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep; 320 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId = 321 HeapObjectsMap::kGcRootsFirstSubrootId + 322 static_cast<int>(Root::kNumberOfRoots) * HeapObjectsMap::kObjectIdStep; 323 324 HeapObjectsMap::HeapObjectsMap(Heap* heap) 325 : next_id_(kFirstAvailableObjectId), heap_(heap) { 326 // The dummy element at zero index is needed as entries_map_ cannot hold 327 // an entry with zero value. Otherwise it's impossible to tell if 328 // LookupOrInsert has added a new item or just returning exisiting one 329 // having the value of zero. 330 entries_.emplace_back(0, kNullAddress, 0, true); 331 } 332 333 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) { 334 DCHECK_NE(kNullAddress, to); 335 DCHECK_NE(kNullAddress, from); 336 if (from == to) return false; 337 void* from_value = entries_map_.Remove(reinterpret_cast<void*>(from), 338 ComputeAddressHash(from)); 339 if (from_value == nullptr) { 340 // It may occur that some untracked object moves to an address X and there 341 // is a tracked object at that address. In this case we should remove the 342 // entry as we know that the object has died. 343 void* to_value = entries_map_.Remove(reinterpret_cast<void*>(to), 344 ComputeAddressHash(to)); 345 if (to_value != nullptr) { 346 int to_entry_info_index = 347 static_cast<int>(reinterpret_cast<intptr_t>(to_value)); 348 entries_.at(to_entry_info_index).addr = kNullAddress; 349 } 350 } else { 351 base::HashMap::Entry* to_entry = entries_map_.LookupOrInsert( 352 reinterpret_cast<void*>(to), ComputeAddressHash(to)); 353 if (to_entry->value != nullptr) { 354 // We found the existing entry with to address for an old object. 355 // Without this operation we will have two EntryInfo's with the same 356 // value in addr field. It is bad because later at RemoveDeadEntries 357 // one of this entry will be removed with the corresponding entries_map_ 358 // entry. 359 int to_entry_info_index = 360 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value)); 361 entries_.at(to_entry_info_index).addr = kNullAddress; 362 } 363 int from_entry_info_index = 364 static_cast<int>(reinterpret_cast<intptr_t>(from_value)); 365 entries_.at(from_entry_info_index).addr = to; 366 // Size of an object can change during its life, so to keep information 367 // about the object in entries_ consistent, we have to adjust size when the 368 // object is migrated. 369 if (FLAG_heap_profiler_trace_objects) { 370 PrintF("Move object from %p to %p old size %6d new size %6d\n", 371 reinterpret_cast<void*>(from), reinterpret_cast<void*>(to), 372 entries_.at(from_entry_info_index).size, object_size); 373 } 374 entries_.at(from_entry_info_index).size = object_size; 375 to_entry->value = from_value; 376 } 377 return from_value != nullptr; 378 } 379 380 381 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) { 382 FindOrAddEntry(addr, size, false); 383 } 384 385 386 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) { 387 base::HashMap::Entry* entry = entries_map_.Lookup( 388 reinterpret_cast<void*>(addr), ComputeAddressHash(addr)); 389 if (entry == nullptr) return 0; 390 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); 391 EntryInfo& entry_info = entries_.at(entry_index); 392 DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy()); 393 return entry_info.id; 394 } 395 396 397 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr, 398 unsigned int size, 399 bool accessed) { 400 DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy()); 401 base::HashMap::Entry* entry = entries_map_.LookupOrInsert( 402 reinterpret_cast<void*>(addr), ComputeAddressHash(addr)); 403 if (entry->value != nullptr) { 404 int entry_index = 405 static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); 406 EntryInfo& entry_info = entries_.at(entry_index); 407 entry_info.accessed = accessed; 408 if (FLAG_heap_profiler_trace_objects) { 409 PrintF("Update object size : %p with old size %d and new size %d\n", 410 reinterpret_cast<void*>(addr), entry_info.size, size); 411 } 412 entry_info.size = size; 413 return entry_info.id; 414 } 415 entry->value = reinterpret_cast<void*>(entries_.size()); 416 SnapshotObjectId id = next_id_; 417 next_id_ += kObjectIdStep; 418 entries_.push_back(EntryInfo(id, addr, size, accessed)); 419 DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy()); 420 return id; 421 } 422 423 void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); } 424 425 void HeapObjectsMap::UpdateHeapObjectsMap() { 426 if (FLAG_heap_profiler_trace_objects) { 427 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", 428 entries_map_.occupancy()); 429 } 430 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask, 431 GarbageCollectionReason::kHeapProfiler); 432 HeapIterator iterator(heap_); 433 for (HeapObject* obj = iterator.next(); obj != nullptr; 434 obj = iterator.next()) { 435 FindOrAddEntry(obj->address(), obj->Size()); 436 if (FLAG_heap_profiler_trace_objects) { 437 PrintF("Update object : %p %6d. Next address is %p\n", 438 reinterpret_cast<void*>(obj->address()), obj->Size(), 439 reinterpret_cast<void*>(obj->address() + obj->Size())); 440 } 441 } 442 RemoveDeadEntries(); 443 if (FLAG_heap_profiler_trace_objects) { 444 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", 445 entries_map_.occupancy()); 446 } 447 } 448 449 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream, 450 int64_t* timestamp_us) { 451 UpdateHeapObjectsMap(); 452 time_intervals_.emplace_back(next_id_); 453 int prefered_chunk_size = stream->GetChunkSize(); 454 std::vector<v8::HeapStatsUpdate> stats_buffer; 455 DCHECK(!entries_.empty()); 456 EntryInfo* entry_info = &entries_.front(); 457 EntryInfo* end_entry_info = &entries_.back() + 1; 458 for (size_t time_interval_index = 0; 459 time_interval_index < time_intervals_.size(); ++time_interval_index) { 460 TimeInterval& time_interval = time_intervals_[time_interval_index]; 461 SnapshotObjectId time_interval_id = time_interval.id; 462 uint32_t entries_size = 0; 463 EntryInfo* start_entry_info = entry_info; 464 while (entry_info < end_entry_info && entry_info->id < time_interval_id) { 465 entries_size += entry_info->size; 466 ++entry_info; 467 } 468 uint32_t entries_count = 469 static_cast<uint32_t>(entry_info - start_entry_info); 470 if (time_interval.count != entries_count || 471 time_interval.size != entries_size) { 472 stats_buffer.emplace_back(static_cast<uint32_t>(time_interval_index), 473 time_interval.count = entries_count, 474 time_interval.size = entries_size); 475 if (static_cast<int>(stats_buffer.size()) >= prefered_chunk_size) { 476 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( 477 &stats_buffer.front(), static_cast<int>(stats_buffer.size())); 478 if (result == OutputStream::kAbort) return last_assigned_id(); 479 stats_buffer.clear(); 480 } 481 } 482 } 483 DCHECK(entry_info == end_entry_info); 484 if (!stats_buffer.empty()) { 485 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( 486 &stats_buffer.front(), static_cast<int>(stats_buffer.size())); 487 if (result == OutputStream::kAbort) return last_assigned_id(); 488 } 489 stream->EndOfStream(); 490 if (timestamp_us) { 491 *timestamp_us = 492 (time_intervals_.back().timestamp - time_intervals_.front().timestamp) 493 .InMicroseconds(); 494 } 495 return last_assigned_id(); 496 } 497 498 499 void HeapObjectsMap::RemoveDeadEntries() { 500 DCHECK(entries_.size() > 0 && entries_.at(0).id == 0 && 501 entries_.at(0).addr == kNullAddress); 502 size_t first_free_entry = 1; 503 for (size_t i = 1; i < entries_.size(); ++i) { 504 EntryInfo& entry_info = entries_.at(i); 505 if (entry_info.accessed) { 506 if (first_free_entry != i) { 507 entries_.at(first_free_entry) = entry_info; 508 } 509 entries_.at(first_free_entry).accessed = false; 510 base::HashMap::Entry* entry = 511 entries_map_.Lookup(reinterpret_cast<void*>(entry_info.addr), 512 ComputeAddressHash(entry_info.addr)); 513 DCHECK(entry); 514 entry->value = reinterpret_cast<void*>(first_free_entry); 515 ++first_free_entry; 516 } else { 517 if (entry_info.addr) { 518 entries_map_.Remove(reinterpret_cast<void*>(entry_info.addr), 519 ComputeAddressHash(entry_info.addr)); 520 } 521 } 522 } 523 entries_.erase(entries_.begin() + first_free_entry, entries_.end()); 524 525 DCHECK(static_cast<uint32_t>(entries_.size()) - 1 == 526 entries_map_.occupancy()); 527 } 528 529 530 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) { 531 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash()); 532 const char* label = info->GetLabel(); 533 id ^= StringHasher::HashSequentialString(label, 534 static_cast<int>(strlen(label)), 535 heap_->HashSeed()); 536 intptr_t element_count = info->GetElementCount(); 537 if (element_count != -1) { 538 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count)); 539 } 540 return id << 1; 541 } 542 543 HeapEntriesMap::HeapEntriesMap() : entries_() {} 544 545 int HeapEntriesMap::Map(HeapThing thing) { 546 base::HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing)); 547 if (cache_entry == nullptr) return HeapEntry::kNoEntry; 548 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value)); 549 } 550 551 552 void HeapEntriesMap::Pair(HeapThing thing, int entry) { 553 base::HashMap::Entry* cache_entry = 554 entries_.LookupOrInsert(thing, Hash(thing)); 555 DCHECK_NULL(cache_entry->value); 556 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry)); 557 } 558 559 HeapObjectsSet::HeapObjectsSet() : entries_() {} 560 561 void HeapObjectsSet::Clear() { 562 entries_.Clear(); 563 } 564 565 566 bool HeapObjectsSet::Contains(Object* obj) { 567 if (!obj->IsHeapObject()) return false; 568 HeapObject* object = HeapObject::cast(obj); 569 return entries_.Lookup(object, HeapEntriesMap::Hash(object)) != nullptr; 570 } 571 572 573 void HeapObjectsSet::Insert(Object* obj) { 574 if (!obj->IsHeapObject()) return; 575 HeapObject* object = HeapObject::cast(obj); 576 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object)); 577 } 578 579 580 const char* HeapObjectsSet::GetTag(Object* obj) { 581 HeapObject* object = HeapObject::cast(obj); 582 base::HashMap::Entry* cache_entry = 583 entries_.Lookup(object, HeapEntriesMap::Hash(object)); 584 return cache_entry != nullptr 585 ? reinterpret_cast<const char*>(cache_entry->value) 586 : nullptr; 587 } 588 589 590 V8_NOINLINE void HeapObjectsSet::SetTag(Object* obj, const char* tag) { 591 if (!obj->IsHeapObject()) return; 592 HeapObject* object = HeapObject::cast(obj); 593 base::HashMap::Entry* cache_entry = 594 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object)); 595 cache_entry->value = const_cast<char*>(tag); 596 } 597 598 V8HeapExplorer::V8HeapExplorer(HeapSnapshot* snapshot, 599 SnapshottingProgressReportingInterface* progress, 600 v8::HeapProfiler::ObjectNameResolver* resolver) 601 : heap_(snapshot->profiler()->heap_object_map()->heap()), 602 snapshot_(snapshot), 603 names_(snapshot_->profiler()->names()), 604 heap_object_map_(snapshot_->profiler()->heap_object_map()), 605 progress_(progress), 606 filler_(nullptr), 607 global_object_name_resolver_(resolver) {} 608 609 V8HeapExplorer::~V8HeapExplorer() { 610 } 611 612 613 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) { 614 return AddEntry(reinterpret_cast<HeapObject*>(ptr)); 615 } 616 617 void V8HeapExplorer::ExtractLocation(int entry, HeapObject* object) { 618 if (object->IsJSFunction()) { 619 JSFunction* func = JSFunction::cast(object); 620 ExtractLocationForJSFunction(entry, func); 621 622 } else if (object->IsJSGeneratorObject()) { 623 JSGeneratorObject* gen = JSGeneratorObject::cast(object); 624 ExtractLocationForJSFunction(entry, gen->function()); 625 626 } else if (object->IsJSObject()) { 627 JSObject* obj = JSObject::cast(object); 628 JSFunction* maybe_constructor = GetConstructor(obj); 629 630 if (maybe_constructor) 631 ExtractLocationForJSFunction(entry, maybe_constructor); 632 } 633 } 634 635 void V8HeapExplorer::ExtractLocationForJSFunction(int entry, JSFunction* func) { 636 if (!func->shared()->script()->IsScript()) return; 637 Script* script = Script::cast(func->shared()->script()); 638 int scriptId = script->id(); 639 int start = func->shared()->StartPosition(); 640 int line = script->GetLineNumber(start); 641 int col = script->GetColumnNumber(start); 642 snapshot_->AddLocation(entry, scriptId, line, col); 643 } 644 645 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) { 646 if (object->IsJSFunction()) { 647 JSFunction* func = JSFunction::cast(object); 648 SharedFunctionInfo* shared = func->shared(); 649 const char* name = names_->GetName(shared->Name()); 650 return AddEntry(object, HeapEntry::kClosure, name); 651 } else if (object->IsJSBoundFunction()) { 652 return AddEntry(object, HeapEntry::kClosure, "native_bind"); 653 } else if (object->IsJSRegExp()) { 654 JSRegExp* re = JSRegExp::cast(object); 655 return AddEntry(object, 656 HeapEntry::kRegExp, 657 names_->GetName(re->Pattern())); 658 } else if (object->IsJSObject()) { 659 const char* name = names_->GetName( 660 GetConstructorName(JSObject::cast(object))); 661 if (object->IsJSGlobalObject()) { 662 const char* tag = objects_tags_.GetTag(object); 663 if (tag != nullptr) { 664 name = names_->GetFormatted("%s / %s", name, tag); 665 } 666 } 667 return AddEntry(object, HeapEntry::kObject, name); 668 } else if (object->IsString()) { 669 String* string = String::cast(object); 670 if (string->IsConsString()) 671 return AddEntry(object, 672 HeapEntry::kConsString, 673 "(concatenated string)"); 674 if (string->IsSlicedString()) 675 return AddEntry(object, 676 HeapEntry::kSlicedString, 677 "(sliced string)"); 678 return AddEntry(object, 679 HeapEntry::kString, 680 names_->GetName(String::cast(object))); 681 } else if (object->IsSymbol()) { 682 if (Symbol::cast(object)->is_private()) 683 return AddEntry(object, HeapEntry::kHidden, "private symbol"); 684 else 685 return AddEntry(object, HeapEntry::kSymbol, "symbol"); 686 } else if (object->IsBigInt()) { 687 return AddEntry(object, HeapEntry::kBigInt, "bigint"); 688 } else if (object->IsCode()) { 689 return AddEntry(object, HeapEntry::kCode, ""); 690 } else if (object->IsSharedFunctionInfo()) { 691 String* name = SharedFunctionInfo::cast(object)->Name(); 692 return AddEntry(object, 693 HeapEntry::kCode, 694 names_->GetName(name)); 695 } else if (object->IsScript()) { 696 Object* name = Script::cast(object)->name(); 697 return AddEntry(object, 698 HeapEntry::kCode, 699 name->IsString() 700 ? names_->GetName(String::cast(name)) 701 : ""); 702 } else if (object->IsNativeContext()) { 703 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext"); 704 } else if (object->IsContext()) { 705 return AddEntry(object, HeapEntry::kObject, "system / Context"); 706 } else if (object->IsFixedArray() || object->IsFixedDoubleArray() || 707 object->IsByteArray()) { 708 return AddEntry(object, HeapEntry::kArray, ""); 709 } else if (object->IsHeapNumber()) { 710 return AddEntry(object, HeapEntry::kHeapNumber, "number"); 711 } 712 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object)); 713 } 714 715 716 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object, 717 HeapEntry::Type type, 718 const char* name) { 719 return AddEntry(object->address(), type, name, object->Size()); 720 } 721 722 723 HeapEntry* V8HeapExplorer::AddEntry(Address address, 724 HeapEntry::Type type, 725 const char* name, 726 size_t size) { 727 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry( 728 address, static_cast<unsigned int>(size)); 729 unsigned trace_node_id = 0; 730 if (AllocationTracker* allocation_tracker = 731 snapshot_->profiler()->allocation_tracker()) { 732 trace_node_id = 733 allocation_tracker->address_to_trace()->GetTraceNodeId(address); 734 } 735 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id); 736 } 737 738 739 class SnapshotFiller { 740 public: 741 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries) 742 : snapshot_(snapshot), 743 names_(snapshot->profiler()->names()), 744 entries_(entries) { } 745 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) { 746 HeapEntry* entry = allocator->AllocateEntry(ptr); 747 entries_->Pair(ptr, entry->index()); 748 return entry; 749 } 750 HeapEntry* FindEntry(HeapThing ptr) { 751 int index = entries_->Map(ptr); 752 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] 753 : nullptr; 754 } 755 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) { 756 HeapEntry* entry = FindEntry(ptr); 757 return entry != nullptr ? entry : AddEntry(ptr, allocator); 758 } 759 void SetIndexedReference(HeapGraphEdge::Type type, 760 int parent, 761 int index, 762 HeapEntry* child_entry) { 763 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 764 parent_entry->SetIndexedReference(type, index, child_entry); 765 } 766 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, 767 int parent, 768 HeapEntry* child_entry) { 769 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 770 int index = parent_entry->children_count() + 1; 771 parent_entry->SetIndexedReference(type, index, child_entry); 772 } 773 void SetNamedReference(HeapGraphEdge::Type type, 774 int parent, 775 const char* reference_name, 776 HeapEntry* child_entry) { 777 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 778 parent_entry->SetNamedReference(type, reference_name, child_entry); 779 } 780 void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent, 781 const char* description, 782 HeapEntry* child_entry) { 783 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 784 int index = parent_entry->children_count() + 1; 785 const char* name = description 786 ? names_->GetFormatted("%d / %s", index, description) 787 : names_->GetName(index); 788 parent_entry->SetNamedReference(type, name, child_entry); 789 } 790 791 private: 792 HeapSnapshot* snapshot_; 793 StringsStorage* names_; 794 HeapEntriesMap* entries_; 795 }; 796 797 798 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) { 799 switch (object->map()->instance_type()) { 800 case MAP_TYPE: 801 switch (Map::cast(object)->instance_type()) { 802 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \ 803 case instance_type: return "system / Map (" #Name ")"; 804 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE) 805 #undef MAKE_STRING_MAP_CASE 806 default: return "system / Map"; 807 } 808 case CELL_TYPE: return "system / Cell"; 809 case PROPERTY_CELL_TYPE: return "system / PropertyCell"; 810 case FOREIGN_TYPE: return "system / Foreign"; 811 case ODDBALL_TYPE: return "system / Oddball"; 812 case ALLOCATION_SITE_TYPE: 813 return "system / AllocationSite"; 814 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 815 case NAME##_TYPE: return "system / "#Name; 816 STRUCT_LIST(MAKE_STRUCT_CASE) 817 #undef MAKE_STRUCT_CASE 818 default: return "system"; 819 } 820 } 821 822 int V8HeapExplorer::EstimateObjectsCount() { 823 HeapIterator it(heap_, HeapIterator::kFilterUnreachable); 824 int objects_count = 0; 825 while (it.next()) ++objects_count; 826 return objects_count; 827 } 828 829 class IndexedReferencesExtractor : public ObjectVisitor { 830 public: 831 IndexedReferencesExtractor(V8HeapExplorer* generator, HeapObject* parent_obj, 832 int parent) 833 : generator_(generator), 834 parent_obj_(parent_obj), 835 parent_start_(HeapObject::RawField(parent_obj_, 0)), 836 parent_end_(HeapObject::RawField(parent_obj_, parent_obj_->Size())), 837 parent_(parent) {} 838 void VisitPointers(HeapObject* host, Object** start, Object** end) override { 839 VisitPointers(host, reinterpret_cast<MaybeObject**>(start), 840 reinterpret_cast<MaybeObject**>(end)); 841 } 842 void VisitPointers(HeapObject* host, MaybeObject** start, 843 MaybeObject** end) override { 844 int next_index = 0; 845 for (MaybeObject** p = start; p < end; p++) { 846 int index = static_cast<int>(reinterpret_cast<Object**>(p) - 847 HeapObject::RawField(parent_obj_, 0)); 848 ++next_index; 849 // |p| could be outside of the object, e.g., while visiting RelocInfo of 850 // code objects. 851 if (reinterpret_cast<Object**>(p) >= parent_start_ && 852 reinterpret_cast<Object**>(p) < parent_end_ && 853 generator_->visited_fields_[index]) { 854 generator_->visited_fields_[index] = false; 855 continue; 856 } 857 HeapObject* heap_object; 858 if ((*p)->ToWeakHeapObject(&heap_object) || 859 (*p)->ToStrongHeapObject(&heap_object)) { 860 generator_->SetHiddenReference(parent_obj_, parent_, next_index, 861 heap_object, index * kPointerSize); 862 } 863 } 864 } 865 866 private: 867 V8HeapExplorer* generator_; 868 HeapObject* parent_obj_; 869 Object** parent_start_; 870 Object** parent_end_; 871 int parent_; 872 }; 873 874 void V8HeapExplorer::ExtractReferences(int entry, HeapObject* obj) { 875 if (obj->IsJSGlobalProxy()) { 876 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj)); 877 } else if (obj->IsJSArrayBuffer()) { 878 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj)); 879 } else if (obj->IsJSObject()) { 880 if (obj->IsJSWeakSet()) { 881 ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj)); 882 } else if (obj->IsJSWeakMap()) { 883 ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj)); 884 } else if (obj->IsJSSet()) { 885 ExtractJSCollectionReferences(entry, JSSet::cast(obj)); 886 } else if (obj->IsJSMap()) { 887 ExtractJSCollectionReferences(entry, JSMap::cast(obj)); 888 } else if (obj->IsJSPromise()) { 889 ExtractJSPromiseReferences(entry, JSPromise::cast(obj)); 890 } else if (obj->IsJSGeneratorObject()) { 891 ExtractJSGeneratorObjectReferences(entry, JSGeneratorObject::cast(obj)); 892 } 893 ExtractJSObjectReferences(entry, JSObject::cast(obj)); 894 } else if (obj->IsString()) { 895 ExtractStringReferences(entry, String::cast(obj)); 896 } else if (obj->IsSymbol()) { 897 ExtractSymbolReferences(entry, Symbol::cast(obj)); 898 } else if (obj->IsMap()) { 899 ExtractMapReferences(entry, Map::cast(obj)); 900 } else if (obj->IsSharedFunctionInfo()) { 901 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj)); 902 } else if (obj->IsScript()) { 903 ExtractScriptReferences(entry, Script::cast(obj)); 904 } else if (obj->IsAccessorInfo()) { 905 ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj)); 906 } else if (obj->IsAccessorPair()) { 907 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj)); 908 } else if (obj->IsCode()) { 909 ExtractCodeReferences(entry, Code::cast(obj)); 910 } else if (obj->IsCell()) { 911 ExtractCellReferences(entry, Cell::cast(obj)); 912 } else if (obj->IsFeedbackCell()) { 913 ExtractFeedbackCellReferences(entry, FeedbackCell::cast(obj)); 914 } else if (obj->IsPropertyCell()) { 915 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj)); 916 } else if (obj->IsAllocationSite()) { 917 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj)); 918 } else if (obj->IsArrayBoilerplateDescription()) { 919 ExtractArrayBoilerplateDescriptionReferences( 920 entry, ArrayBoilerplateDescription::cast(obj)); 921 } else if (obj->IsFeedbackVector()) { 922 ExtractFeedbackVectorReferences(entry, FeedbackVector::cast(obj)); 923 } else if (obj->IsWeakFixedArray()) { 924 ExtractWeakArrayReferences(WeakFixedArray::kHeaderSize, entry, 925 WeakFixedArray::cast(obj)); 926 } else if (obj->IsWeakArrayList()) { 927 ExtractWeakArrayReferences(WeakArrayList::kHeaderSize, entry, 928 WeakArrayList::cast(obj)); 929 } else if (obj->IsContext()) { 930 ExtractContextReferences(entry, Context::cast(obj)); 931 } else if (obj->IsEphemeronHashTable()) { 932 ExtractEphemeronHashTableReferences(entry, EphemeronHashTable::cast(obj)); 933 } else if (obj->IsFixedArray()) { 934 ExtractFixedArrayReferences(entry, FixedArray::cast(obj)); 935 } 936 } 937 938 939 void V8HeapExplorer::ExtractJSGlobalProxyReferences( 940 int entry, JSGlobalProxy* proxy) { 941 SetInternalReference(proxy, entry, 942 "native_context", proxy->native_context(), 943 JSGlobalProxy::kNativeContextOffset); 944 } 945 946 947 void V8HeapExplorer::ExtractJSObjectReferences( 948 int entry, JSObject* js_obj) { 949 HeapObject* obj = js_obj; 950 ExtractPropertyReferences(js_obj, entry); 951 ExtractElementReferences(js_obj, entry); 952 ExtractInternalReferences(js_obj, entry); 953 PrototypeIterator iter(heap_->isolate(), js_obj); 954 ReadOnlyRoots roots(heap_); 955 SetPropertyReference(obj, entry, roots.proto_string(), iter.GetCurrent()); 956 if (obj->IsJSBoundFunction()) { 957 JSBoundFunction* js_fun = JSBoundFunction::cast(obj); 958 TagObject(js_fun->bound_arguments(), "(bound arguments)"); 959 SetInternalReference(js_fun, entry, "bindings", js_fun->bound_arguments(), 960 JSBoundFunction::kBoundArgumentsOffset); 961 SetInternalReference(js_obj, entry, "bound_this", js_fun->bound_this(), 962 JSBoundFunction::kBoundThisOffset); 963 SetInternalReference(js_obj, entry, "bound_function", 964 js_fun->bound_target_function(), 965 JSBoundFunction::kBoundTargetFunctionOffset); 966 FixedArray* bindings = js_fun->bound_arguments(); 967 for (int i = 0; i < bindings->length(); i++) { 968 const char* reference_name = names_->GetFormatted("bound_argument_%d", i); 969 SetNativeBindReference(js_obj, entry, reference_name, bindings->get(i)); 970 } 971 } else if (obj->IsJSFunction()) { 972 JSFunction* js_fun = JSFunction::cast(js_obj); 973 if (js_fun->has_prototype_slot()) { 974 Object* proto_or_map = js_fun->prototype_or_initial_map(); 975 if (!proto_or_map->IsTheHole(heap_->isolate())) { 976 if (!proto_or_map->IsMap()) { 977 SetPropertyReference(obj, entry, roots.prototype_string(), 978 proto_or_map, nullptr, 979 JSFunction::kPrototypeOrInitialMapOffset); 980 } else { 981 SetPropertyReference(obj, entry, roots.prototype_string(), 982 js_fun->prototype()); 983 SetInternalReference(obj, entry, "initial_map", proto_or_map, 984 JSFunction::kPrototypeOrInitialMapOffset); 985 } 986 } 987 } 988 SharedFunctionInfo* shared_info = js_fun->shared(); 989 TagObject(js_fun->feedback_cell(), "(function feedback cell)"); 990 SetInternalReference(js_fun, entry, "feedback_cell", 991 js_fun->feedback_cell(), 992 JSFunction::kFeedbackCellOffset); 993 TagObject(shared_info, "(shared function info)"); 994 SetInternalReference(js_fun, entry, 995 "shared", shared_info, 996 JSFunction::kSharedFunctionInfoOffset); 997 TagObject(js_fun->context(), "(context)"); 998 SetInternalReference(js_fun, entry, 999 "context", js_fun->context(), 1000 JSFunction::kContextOffset); 1001 TagCodeObject(js_fun->code()); 1002 SetInternalReference(js_fun, entry, "code", js_fun->code(), 1003 JSFunction::kCodeOffset); 1004 } else if (obj->IsJSGlobalObject()) { 1005 JSGlobalObject* global_obj = JSGlobalObject::cast(obj); 1006 SetInternalReference(global_obj, entry, "native_context", 1007 global_obj->native_context(), 1008 JSGlobalObject::kNativeContextOffset); 1009 SetInternalReference(global_obj, entry, "global_proxy", 1010 global_obj->global_proxy(), 1011 JSGlobalObject::kGlobalProxyOffset); 1012 STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize == 1013 2 * kPointerSize); 1014 } else if (obj->IsJSArrayBufferView()) { 1015 JSArrayBufferView* view = JSArrayBufferView::cast(obj); 1016 SetInternalReference(view, entry, "buffer", view->buffer(), 1017 JSArrayBufferView::kBufferOffset); 1018 } 1019 1020 TagObject(js_obj->raw_properties_or_hash(), "(object properties)"); 1021 SetInternalReference(obj, entry, "properties", 1022 js_obj->raw_properties_or_hash(), 1023 JSObject::kPropertiesOrHashOffset); 1024 1025 TagObject(js_obj->elements(), "(object elements)"); 1026 SetInternalReference(obj, entry, 1027 "elements", js_obj->elements(), 1028 JSObject::kElementsOffset); 1029 } 1030 1031 1032 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) { 1033 if (string->IsConsString()) { 1034 ConsString* cs = ConsString::cast(string); 1035 SetInternalReference(cs, entry, "first", cs->first(), 1036 ConsString::kFirstOffset); 1037 SetInternalReference(cs, entry, "second", cs->second(), 1038 ConsString::kSecondOffset); 1039 } else if (string->IsSlicedString()) { 1040 SlicedString* ss = SlicedString::cast(string); 1041 SetInternalReference(ss, entry, "parent", ss->parent(), 1042 SlicedString::kParentOffset); 1043 } else if (string->IsThinString()) { 1044 ThinString* ts = ThinString::cast(string); 1045 SetInternalReference(ts, entry, "actual", ts->actual(), 1046 ThinString::kActualOffset); 1047 } 1048 } 1049 1050 1051 void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) { 1052 SetInternalReference(symbol, entry, 1053 "name", symbol->name(), 1054 Symbol::kNameOffset); 1055 } 1056 1057 1058 void V8HeapExplorer::ExtractJSCollectionReferences(int entry, 1059 JSCollection* collection) { 1060 SetInternalReference(collection, entry, "table", collection->table(), 1061 JSCollection::kTableOffset); 1062 } 1063 1064 void V8HeapExplorer::ExtractJSWeakCollectionReferences(int entry, 1065 JSWeakCollection* obj) { 1066 SetInternalReference(obj, entry, "table", obj->table(), 1067 JSWeakCollection::kTableOffset); 1068 } 1069 1070 void V8HeapExplorer::ExtractEphemeronHashTableReferences( 1071 int entry, EphemeronHashTable* table) { 1072 for (int i = 0, capacity = table->Capacity(); i < capacity; ++i) { 1073 int key_index = EphemeronHashTable::EntryToIndex(i) + 1074 EphemeronHashTable::kEntryKeyIndex; 1075 int value_index = EphemeronHashTable::EntryToValueIndex(i); 1076 Object* key = table->get(key_index); 1077 Object* value = table->get(value_index); 1078 SetWeakReference(table, entry, key_index, key, 1079 table->OffsetOfElementAt(key_index)); 1080 SetInternalReference(table, entry, value_index, value, 1081 table->OffsetOfElementAt(value_index)); 1082 HeapEntry* key_entry = GetEntry(key); 1083 int key_entry_index = key_entry->index(); 1084 HeapEntry* value_entry = GetEntry(value); 1085 if (key_entry && value_entry) { 1086 const char* edge_name = 1087 names_->GetFormatted("key %s in WeakMap", key_entry->name()); 1088 filler_->SetNamedAutoIndexReference( 1089 HeapGraphEdge::kInternal, key_entry_index, edge_name, value_entry); 1090 } 1091 } 1092 } 1093 1094 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) { 1095 if (!context->IsNativeContext() && context->is_declaration_context()) { 1096 ScopeInfo* scope_info = context->scope_info(); 1097 // Add context allocated locals. 1098 int context_locals = scope_info->ContextLocalCount(); 1099 for (int i = 0; i < context_locals; ++i) { 1100 String* local_name = scope_info->ContextLocalName(i); 1101 int idx = Context::MIN_CONTEXT_SLOTS + i; 1102 SetContextReference(context, entry, local_name, context->get(idx), 1103 Context::OffsetOfElementAt(idx)); 1104 } 1105 if (scope_info->HasFunctionName()) { 1106 String* name = String::cast(scope_info->FunctionName()); 1107 int idx = scope_info->FunctionContextSlotIndex(name); 1108 if (idx >= 0) { 1109 SetContextReference(context, entry, name, context->get(idx), 1110 Context::OffsetOfElementAt(idx)); 1111 } 1112 } 1113 } 1114 1115 #define EXTRACT_CONTEXT_FIELD(index, type, name) \ 1116 if (Context::index < Context::FIRST_WEAK_SLOT || \ 1117 Context::index == Context::MAP_CACHE_INDEX) { \ 1118 SetInternalReference(context, entry, #name, context->get(Context::index), \ 1119 FixedArray::OffsetOfElementAt(Context::index)); \ 1120 } else { \ 1121 SetWeakReference(context, entry, #name, context->get(Context::index), \ 1122 FixedArray::OffsetOfElementAt(Context::index)); \ 1123 } 1124 EXTRACT_CONTEXT_FIELD(SCOPE_INFO_INDEX, ScopeInfo, scope_info); 1125 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous); 1126 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, HeapObject, extension); 1127 EXTRACT_CONTEXT_FIELD(NATIVE_CONTEXT_INDEX, Context, native_context); 1128 if (context->IsNativeContext()) { 1129 TagObject(context->normalized_map_cache(), "(context norm. map cache)"); 1130 TagObject(context->embedder_data(), "(context data)"); 1131 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD) 1132 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list); 1133 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list); 1134 #undef EXTRACT_CONTEXT_FIELD 1135 STATIC_ASSERT(Context::OPTIMIZED_CODE_LIST == Context::FIRST_WEAK_SLOT); 1136 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 == 1137 Context::NATIVE_CONTEXT_SLOTS); 1138 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 3 == 1139 Context::NATIVE_CONTEXT_SLOTS); 1140 } 1141 } 1142 1143 1144 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) { 1145 MaybeObject* maybe_raw_transitions_or_prototype_info = map->raw_transitions(); 1146 HeapObject* raw_transitions_or_prototype_info; 1147 if (maybe_raw_transitions_or_prototype_info->ToWeakHeapObject( 1148 &raw_transitions_or_prototype_info)) { 1149 DCHECK(raw_transitions_or_prototype_info->IsMap()); 1150 SetWeakReference(map, entry, "transition", 1151 raw_transitions_or_prototype_info, 1152 Map::kTransitionsOrPrototypeInfoOffset); 1153 } else if (maybe_raw_transitions_or_prototype_info->ToStrongHeapObject( 1154 &raw_transitions_or_prototype_info)) { 1155 if (raw_transitions_or_prototype_info->IsTransitionArray()) { 1156 TransitionArray* transitions = 1157 TransitionArray::cast(raw_transitions_or_prototype_info); 1158 if (map->CanTransition() && transitions->HasPrototypeTransitions()) { 1159 TagObject(transitions->GetPrototypeTransitions(), 1160 "(prototype transitions)"); 1161 } 1162 TagObject(transitions, "(transition array)"); 1163 SetInternalReference(map, entry, "transitions", transitions, 1164 Map::kTransitionsOrPrototypeInfoOffset); 1165 } else if (raw_transitions_or_prototype_info->IsTuple3() || 1166 raw_transitions_or_prototype_info->IsFixedArray()) { 1167 TagObject(raw_transitions_or_prototype_info, "(transition)"); 1168 SetInternalReference(map, entry, "transition", 1169 raw_transitions_or_prototype_info, 1170 Map::kTransitionsOrPrototypeInfoOffset); 1171 } else if (map->is_prototype_map()) { 1172 TagObject(raw_transitions_or_prototype_info, "prototype_info"); 1173 SetInternalReference(map, entry, "prototype_info", 1174 raw_transitions_or_prototype_info, 1175 Map::kTransitionsOrPrototypeInfoOffset); 1176 } 1177 } 1178 DescriptorArray* descriptors = map->instance_descriptors(); 1179 TagObject(descriptors, "(map descriptors)"); 1180 SetInternalReference(map, entry, "descriptors", descriptors, 1181 Map::kDescriptorsOffset); 1182 SetInternalReference(map, entry, "prototype", map->prototype(), 1183 Map::kPrototypeOffset); 1184 if (FLAG_unbox_double_fields) { 1185 SetInternalReference(map, entry, "layout_descriptor", 1186 map->layout_descriptor(), 1187 Map::kLayoutDescriptorOffset); 1188 } 1189 Object* constructor_or_backpointer = map->constructor_or_backpointer(); 1190 if (constructor_or_backpointer->IsMap()) { 1191 TagObject(constructor_or_backpointer, "(back pointer)"); 1192 SetInternalReference(map, entry, "back_pointer", constructor_or_backpointer, 1193 Map::kConstructorOrBackPointerOffset); 1194 } else if (constructor_or_backpointer->IsFunctionTemplateInfo()) { 1195 TagObject(constructor_or_backpointer, "(constructor function data)"); 1196 SetInternalReference(map, entry, "constructor_function_data", 1197 constructor_or_backpointer, 1198 Map::kConstructorOrBackPointerOffset); 1199 } else { 1200 SetInternalReference(map, entry, "constructor", constructor_or_backpointer, 1201 Map::kConstructorOrBackPointerOffset); 1202 } 1203 TagObject(map->dependent_code(), "(dependent code)"); 1204 SetInternalReference(map, entry, "dependent_code", map->dependent_code(), 1205 Map::kDependentCodeOffset); 1206 } 1207 1208 1209 void V8HeapExplorer::ExtractSharedFunctionInfoReferences( 1210 int entry, SharedFunctionInfo* shared) { 1211 HeapObject* obj = shared; 1212 String* shared_name = shared->DebugName(); 1213 const char* name = nullptr; 1214 if (shared_name != ReadOnlyRoots(heap_).empty_string()) { 1215 name = names_->GetName(shared_name); 1216 TagObject(shared->GetCode(), names_->GetFormatted("(code for %s)", name)); 1217 } else { 1218 TagObject(shared->GetCode(), 1219 names_->GetFormatted( 1220 "(%s code)", Code::Kind2String(shared->GetCode()->kind()))); 1221 } 1222 1223 if (shared->name_or_scope_info()->IsScopeInfo()) { 1224 TagObject(shared->name_or_scope_info(), "(function scope info)"); 1225 } 1226 SetInternalReference(obj, entry, "name_or_scope_info", 1227 shared->name_or_scope_info(), 1228 SharedFunctionInfo::kNameOrScopeInfoOffset); 1229 SetInternalReference(obj, entry, "script_or_debug_info", 1230 shared->script_or_debug_info(), 1231 SharedFunctionInfo::kScriptOrDebugInfoOffset); 1232 SetInternalReference(obj, entry, 1233 "function_data", shared->function_data(), 1234 SharedFunctionInfo::kFunctionDataOffset); 1235 SetInternalReference( 1236 obj, entry, "raw_outer_scope_info_or_feedback_metadata", 1237 shared->raw_outer_scope_info_or_feedback_metadata(), 1238 SharedFunctionInfo::kOuterScopeInfoOrFeedbackMetadataOffset); 1239 } 1240 1241 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) { 1242 HeapObject* obj = script; 1243 SetInternalReference(obj, entry, 1244 "source", script->source(), 1245 Script::kSourceOffset); 1246 SetInternalReference(obj, entry, 1247 "name", script->name(), 1248 Script::kNameOffset); 1249 SetInternalReference(obj, entry, 1250 "context_data", script->context_data(), 1251 Script::kContextOffset); 1252 TagObject(script->line_ends(), "(script line ends)"); 1253 SetInternalReference(obj, entry, 1254 "line_ends", script->line_ends(), 1255 Script::kLineEndsOffset); 1256 } 1257 1258 1259 void V8HeapExplorer::ExtractAccessorInfoReferences( 1260 int entry, AccessorInfo* accessor_info) { 1261 SetInternalReference(accessor_info, entry, "name", accessor_info->name(), 1262 AccessorInfo::kNameOffset); 1263 SetInternalReference(accessor_info, entry, "expected_receiver_type", 1264 accessor_info->expected_receiver_type(), 1265 AccessorInfo::kExpectedReceiverTypeOffset); 1266 SetInternalReference(accessor_info, entry, "getter", accessor_info->getter(), 1267 AccessorInfo::kGetterOffset); 1268 SetInternalReference(accessor_info, entry, "setter", accessor_info->setter(), 1269 AccessorInfo::kSetterOffset); 1270 SetInternalReference(accessor_info, entry, "data", accessor_info->data(), 1271 AccessorInfo::kDataOffset); 1272 } 1273 1274 void V8HeapExplorer::ExtractAccessorPairReferences( 1275 int entry, AccessorPair* accessors) { 1276 SetInternalReference(accessors, entry, "getter", accessors->getter(), 1277 AccessorPair::kGetterOffset); 1278 SetInternalReference(accessors, entry, "setter", accessors->setter(), 1279 AccessorPair::kSetterOffset); 1280 } 1281 1282 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) { 1283 TagObject(code, names_->GetFormatted("(%s builtin)", name)); 1284 } 1285 1286 void V8HeapExplorer::TagCodeObject(Code* code) { 1287 if (code->kind() == Code::STUB) { 1288 TagObject(code, names_->GetFormatted( 1289 "(%s code)", 1290 CodeStub::MajorName(CodeStub::GetMajorKey(code)))); 1291 } 1292 } 1293 1294 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) { 1295 TagCodeObject(code); 1296 TagObject(code->relocation_info(), "(code relocation info)"); 1297 SetInternalReference(code, entry, 1298 "relocation_info", code->relocation_info(), 1299 Code::kRelocationInfoOffset); 1300 TagObject(code->deoptimization_data(), "(code deopt data)"); 1301 SetInternalReference(code, entry, 1302 "deoptimization_data", code->deoptimization_data(), 1303 Code::kDeoptimizationDataOffset); 1304 TagObject(code->source_position_table(), "(source position table)"); 1305 SetInternalReference(code, entry, "source_position_table", 1306 code->source_position_table(), 1307 Code::kSourcePositionTableOffset); 1308 } 1309 1310 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) { 1311 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset); 1312 } 1313 1314 void V8HeapExplorer::ExtractFeedbackCellReferences( 1315 int entry, FeedbackCell* feedback_cell) { 1316 TagObject(feedback_cell, "(feedback cell)"); 1317 SetInternalReference(feedback_cell, entry, "value", feedback_cell->value(), 1318 FeedbackCell::kValueOffset); 1319 } 1320 1321 void V8HeapExplorer::ExtractPropertyCellReferences(int entry, 1322 PropertyCell* cell) { 1323 SetInternalReference(cell, entry, "value", cell->value(), 1324 PropertyCell::kValueOffset); 1325 TagObject(cell->dependent_code(), "(dependent code)"); 1326 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(), 1327 PropertyCell::kDependentCodeOffset); 1328 } 1329 1330 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry, 1331 AllocationSite* site) { 1332 SetInternalReference(site, entry, "transition_info", 1333 site->transition_info_or_boilerplate(), 1334 AllocationSite::kTransitionInfoOrBoilerplateOffset); 1335 SetInternalReference(site, entry, "nested_site", site->nested_site(), 1336 AllocationSite::kNestedSiteOffset); 1337 TagObject(site->dependent_code(), "(dependent code)"); 1338 SetInternalReference(site, entry, "dependent_code", site->dependent_code(), 1339 AllocationSite::kDependentCodeOffset); 1340 } 1341 1342 void V8HeapExplorer::ExtractArrayBoilerplateDescriptionReferences( 1343 int entry, ArrayBoilerplateDescription* value) { 1344 SetInternalReference(value, entry, "constant_elements", 1345 value->constant_elements(), 1346 ArrayBoilerplateDescription::kConstantElementsOffset); 1347 } 1348 1349 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator { 1350 public: 1351 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer) 1352 : size_(size) 1353 , explorer_(explorer) { 1354 } 1355 virtual HeapEntry* AllocateEntry(HeapThing ptr) { 1356 return explorer_->AddEntry(reinterpret_cast<Address>(ptr), 1357 HeapEntry::kNative, "system / JSArrayBufferData", 1358 size_); 1359 } 1360 private: 1361 size_t size_; 1362 V8HeapExplorer* explorer_; 1363 }; 1364 1365 void V8HeapExplorer::ExtractJSArrayBufferReferences( 1366 int entry, JSArrayBuffer* buffer) { 1367 // Setup a reference to a native memory backing_store object. 1368 if (!buffer->backing_store()) 1369 return; 1370 size_t data_size = NumberToSize(buffer->byte_length()); 1371 JSArrayBufferDataEntryAllocator allocator(data_size, this); 1372 HeapEntry* data_entry = 1373 filler_->FindOrAddEntry(buffer->backing_store(), &allocator); 1374 filler_->SetNamedReference(HeapGraphEdge::kInternal, 1375 entry, "backing_store", data_entry); 1376 } 1377 1378 void V8HeapExplorer::ExtractJSPromiseReferences(int entry, JSPromise* promise) { 1379 SetInternalReference(promise, entry, "reactions_or_result", 1380 promise->reactions_or_result(), 1381 JSPromise::kReactionsOrResultOffset); 1382 } 1383 1384 void V8HeapExplorer::ExtractJSGeneratorObjectReferences( 1385 int entry, JSGeneratorObject* generator) { 1386 SetInternalReference(generator, entry, "function", generator->function(), 1387 JSGeneratorObject::kFunctionOffset); 1388 SetInternalReference(generator, entry, "context", generator->context(), 1389 JSGeneratorObject::kContextOffset); 1390 SetInternalReference(generator, entry, "receiver", generator->receiver(), 1391 JSGeneratorObject::kReceiverOffset); 1392 SetInternalReference(generator, entry, "parameters_and_registers", 1393 generator->parameters_and_registers(), 1394 JSGeneratorObject::kParametersAndRegistersOffset); 1395 } 1396 1397 void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) { 1398 for (int i = 0, l = array->length(); i < l; ++i) { 1399 DCHECK(!HasWeakHeapObjectTag(array->get(i))); 1400 SetInternalReference(array, entry, i, array->get(i), 1401 array->OffsetOfElementAt(i)); 1402 } 1403 } 1404 1405 void V8HeapExplorer::ExtractFeedbackVectorReferences( 1406 int entry, FeedbackVector* feedback_vector) { 1407 MaybeObject* code = feedback_vector->optimized_code_weak_or_smi(); 1408 HeapObject* code_heap_object; 1409 if (code->ToWeakHeapObject(&code_heap_object)) { 1410 SetWeakReference(feedback_vector, entry, "optimized code", code_heap_object, 1411 FeedbackVector::kOptimizedCodeOffset); 1412 } 1413 } 1414 1415 template <typename T> 1416 void V8HeapExplorer::ExtractWeakArrayReferences(int header_size, int entry, 1417 T* array) { 1418 for (int i = 0; i < array->length(); ++i) { 1419 MaybeObject* object = array->Get(i); 1420 HeapObject* heap_object; 1421 if (object->ToWeakHeapObject(&heap_object)) { 1422 SetWeakReference(array, entry, i, heap_object, 1423 header_size + i * kPointerSize); 1424 } else if (object->ToStrongHeapObject(&heap_object)) { 1425 SetInternalReference(array, entry, i, heap_object, 1426 header_size + i * kPointerSize); 1427 } 1428 } 1429 } 1430 1431 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) { 1432 Isolate* isolate = js_obj->GetIsolate(); 1433 if (js_obj->HasFastProperties()) { 1434 DescriptorArray* descs = js_obj->map()->instance_descriptors(); 1435 int real_size = js_obj->map()->NumberOfOwnDescriptors(); 1436 for (int i = 0; i < real_size; i++) { 1437 PropertyDetails details = descs->GetDetails(i); 1438 switch (details.location()) { 1439 case kField: { 1440 Representation r = details.representation(); 1441 if (r.IsSmi() || r.IsDouble()) break; 1442 1443 Name* k = descs->GetKey(i); 1444 FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i); 1445 Object* value = js_obj->RawFastPropertyAt(field_index); 1446 int field_offset = 1447 field_index.is_inobject() ? field_index.offset() : -1; 1448 1449 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, k, 1450 value, nullptr, field_offset); 1451 break; 1452 } 1453 case kDescriptor: 1454 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, 1455 descs->GetKey(i), 1456 descs->GetStrongValue(i)); 1457 break; 1458 } 1459 } 1460 } else if (js_obj->IsJSGlobalObject()) { 1461 // We assume that global objects can only have slow properties. 1462 GlobalDictionary* dictionary = 1463 JSGlobalObject::cast(js_obj)->global_dictionary(); 1464 int length = dictionary->Capacity(); 1465 ReadOnlyRoots roots(isolate); 1466 for (int i = 0; i < length; ++i) { 1467 if (dictionary->IsKey(roots, dictionary->KeyAt(i))) { 1468 PropertyCell* cell = dictionary->CellAt(i); 1469 Name* name = cell->name(); 1470 Object* value = cell->value(); 1471 PropertyDetails details = cell->property_details(); 1472 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, name, 1473 value); 1474 } 1475 } 1476 } else { 1477 NameDictionary* dictionary = js_obj->property_dictionary(); 1478 int length = dictionary->Capacity(); 1479 ReadOnlyRoots roots(isolate); 1480 for (int i = 0; i < length; ++i) { 1481 Object* k = dictionary->KeyAt(i); 1482 if (dictionary->IsKey(roots, k)) { 1483 Object* value = dictionary->ValueAt(i); 1484 PropertyDetails details = dictionary->DetailsAt(i); 1485 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, 1486 Name::cast(k), value); 1487 } 1488 } 1489 } 1490 } 1491 1492 1493 void V8HeapExplorer::ExtractAccessorPairProperty(JSObject* js_obj, int entry, 1494 Name* key, 1495 Object* callback_obj, 1496 int field_offset) { 1497 if (!callback_obj->IsAccessorPair()) return; 1498 AccessorPair* accessors = AccessorPair::cast(callback_obj); 1499 SetPropertyReference(js_obj, entry, key, accessors, nullptr, field_offset); 1500 Object* getter = accessors->getter(); 1501 if (!getter->IsOddball()) { 1502 SetPropertyReference(js_obj, entry, key, getter, "get %s"); 1503 } 1504 Object* setter = accessors->setter(); 1505 if (!setter->IsOddball()) { 1506 SetPropertyReference(js_obj, entry, key, setter, "set %s"); 1507 } 1508 } 1509 1510 1511 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) { 1512 ReadOnlyRoots roots = js_obj->GetReadOnlyRoots(); 1513 if (js_obj->HasObjectElements()) { 1514 FixedArray* elements = FixedArray::cast(js_obj->elements()); 1515 int length = js_obj->IsJSArray() 1516 ? Smi::ToInt(JSArray::cast(js_obj)->length()) 1517 : elements->length(); 1518 for (int i = 0; i < length; ++i) { 1519 if (!elements->get(i)->IsTheHole(roots)) { 1520 SetElementReference(js_obj, entry, i, elements->get(i)); 1521 } 1522 } 1523 } else if (js_obj->HasDictionaryElements()) { 1524 NumberDictionary* dictionary = js_obj->element_dictionary(); 1525 int length = dictionary->Capacity(); 1526 for (int i = 0; i < length; ++i) { 1527 Object* k = dictionary->KeyAt(i); 1528 if (dictionary->IsKey(roots, k)) { 1529 DCHECK(k->IsNumber()); 1530 uint32_t index = static_cast<uint32_t>(k->Number()); 1531 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i)); 1532 } 1533 } 1534 } 1535 } 1536 1537 1538 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) { 1539 int length = js_obj->GetEmbedderFieldCount(); 1540 for (int i = 0; i < length; ++i) { 1541 Object* o = js_obj->GetEmbedderField(i); 1542 SetInternalReference(js_obj, entry, i, o, 1543 js_obj->GetEmbedderFieldOffset(i)); 1544 } 1545 } 1546 1547 JSFunction* V8HeapExplorer::GetConstructor(JSReceiver* receiver) { 1548 Isolate* isolate = receiver->GetIsolate(); 1549 DisallowHeapAllocation no_gc; 1550 HandleScope scope(isolate); 1551 MaybeHandle<JSFunction> maybe_constructor = 1552 JSReceiver::GetConstructor(handle(receiver, isolate)); 1553 1554 if (maybe_constructor.is_null()) return nullptr; 1555 1556 return *maybe_constructor.ToHandleChecked(); 1557 } 1558 1559 String* V8HeapExplorer::GetConstructorName(JSObject* object) { 1560 Isolate* isolate = object->GetIsolate(); 1561 if (object->IsJSFunction()) return ReadOnlyRoots(isolate).closure_string(); 1562 DisallowHeapAllocation no_gc; 1563 HandleScope scope(isolate); 1564 return *JSReceiver::GetConstructorName(handle(object, isolate)); 1565 } 1566 1567 1568 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) { 1569 if (!obj->IsHeapObject()) return nullptr; 1570 return filler_->FindOrAddEntry(obj, this); 1571 } 1572 1573 class RootsReferencesExtractor : public RootVisitor { 1574 public: 1575 explicit RootsReferencesExtractor(V8HeapExplorer* explorer) 1576 : explorer_(explorer), visiting_weak_roots_(false) {} 1577 1578 void SetVisitingWeakRoots() { visiting_weak_roots_ = true; } 1579 1580 void VisitRootPointer(Root root, const char* description, 1581 Object** object) override { 1582 if (root == Root::kBuiltins) { 1583 explorer_->TagBuiltinCodeObject(Code::cast(*object), description); 1584 } 1585 explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_, 1586 *object); 1587 } 1588 1589 void VisitRootPointers(Root root, const char* description, Object** start, 1590 Object** end) override { 1591 for (Object** p = start; p < end; p++) 1592 VisitRootPointer(root, description, p); 1593 } 1594 1595 private: 1596 V8HeapExplorer* explorer_; 1597 bool visiting_weak_roots_; 1598 }; 1599 1600 bool V8HeapExplorer::IterateAndExtractReferences(SnapshotFiller* filler) { 1601 filler_ = filler; 1602 1603 // Create references to the synthetic roots. 1604 SetRootGcRootsReference(); 1605 for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) { 1606 SetGcRootsReference(static_cast<Root>(root)); 1607 } 1608 1609 // Make sure builtin code objects get their builtin tags 1610 // first. Otherwise a particular JSFunction object could set 1611 // its custom name to a generic builtin. 1612 RootsReferencesExtractor extractor(this); 1613 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG); 1614 extractor.SetVisitingWeakRoots(); 1615 heap_->IterateWeakGlobalHandles(&extractor); 1616 1617 bool interrupted = false; 1618 1619 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); 1620 // Heap iteration with filtering must be finished in any case. 1621 for (HeapObject *obj = iterator.next(); obj != nullptr; 1622 obj = iterator.next(), progress_->ProgressStep()) { 1623 if (interrupted) continue; 1624 1625 size_t max_pointer = obj->Size() / kPointerSize; 1626 if (max_pointer > visited_fields_.size()) { 1627 // Clear the current bits. 1628 std::vector<bool>().swap(visited_fields_); 1629 // Reallocate to right size. 1630 visited_fields_.resize(max_pointer, false); 1631 } 1632 1633 HeapEntry* heap_entry = GetEntry(obj); 1634 int entry = heap_entry->index(); 1635 ExtractReferences(entry, obj); 1636 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset); 1637 // Extract unvisited fields as hidden references and restore tags 1638 // of visited fields. 1639 IndexedReferencesExtractor refs_extractor(this, obj, entry); 1640 obj->Iterate(&refs_extractor); 1641 1642 // Ensure visited_fields_ doesn't leak to the next object. 1643 for (size_t i = 0; i < max_pointer; ++i) { 1644 DCHECK(!visited_fields_[i]); 1645 } 1646 1647 // Extract location for specific object types 1648 ExtractLocation(entry, obj); 1649 1650 if (!progress_->ProgressReport(false)) interrupted = true; 1651 } 1652 1653 filler_ = nullptr; 1654 return interrupted ? false : progress_->ProgressReport(true); 1655 } 1656 1657 1658 bool V8HeapExplorer::IsEssentialObject(Object* object) { 1659 ReadOnlyRoots roots(heap_); 1660 return object->IsHeapObject() && !object->IsOddball() && 1661 object != roots.empty_byte_array() && 1662 object != roots.empty_fixed_array() && 1663 object != roots.empty_weak_fixed_array() && 1664 object != roots.empty_descriptor_array() && 1665 object != roots.fixed_array_map() && object != roots.cell_map() && 1666 object != roots.global_property_cell_map() && 1667 object != roots.shared_function_info_map() && 1668 object != roots.free_space_map() && 1669 object != roots.one_pointer_filler_map() && 1670 object != roots.two_pointer_filler_map(); 1671 } 1672 1673 bool V8HeapExplorer::IsEssentialHiddenReference(Object* parent, 1674 int field_offset) { 1675 if (parent->IsAllocationSite() && 1676 field_offset == AllocationSite::kWeakNextOffset) 1677 return false; 1678 if (parent->IsCodeDataContainer() && 1679 field_offset == CodeDataContainer::kNextCodeLinkOffset) 1680 return false; 1681 if (parent->IsContext() && 1682 field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK)) 1683 return false; 1684 return true; 1685 } 1686 1687 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj, 1688 int parent_entry, 1689 String* reference_name, 1690 Object* child_obj, 1691 int field_offset) { 1692 DCHECK(parent_entry == GetEntry(parent_obj)->index()); 1693 HeapEntry* child_entry = GetEntry(child_obj); 1694 if (child_entry == nullptr) return; 1695 filler_->SetNamedReference(HeapGraphEdge::kContextVariable, parent_entry, 1696 names_->GetName(reference_name), child_entry); 1697 MarkVisitedField(field_offset); 1698 } 1699 1700 void V8HeapExplorer::MarkVisitedField(int offset) { 1701 if (offset < 0) return; 1702 int index = offset / kPointerSize; 1703 DCHECK(!visited_fields_[index]); 1704 visited_fields_[index] = true; 1705 } 1706 1707 1708 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj, 1709 int parent_entry, 1710 const char* reference_name, 1711 Object* child_obj) { 1712 DCHECK(parent_entry == GetEntry(parent_obj)->index()); 1713 HeapEntry* child_entry = GetEntry(child_obj); 1714 if (child_entry == nullptr) return; 1715 filler_->SetNamedReference(HeapGraphEdge::kShortcut, parent_entry, 1716 reference_name, child_entry); 1717 } 1718 1719 1720 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj, 1721 int parent_entry, 1722 int index, 1723 Object* child_obj) { 1724 DCHECK(parent_entry == GetEntry(parent_obj)->index()); 1725 HeapEntry* child_entry = GetEntry(child_obj); 1726 if (child_entry == nullptr) return; 1727 filler_->SetIndexedReference(HeapGraphEdge::kElement, parent_entry, index, 1728 child_entry); 1729 } 1730 1731 1732 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, 1733 int parent_entry, 1734 const char* reference_name, 1735 Object* child_obj, 1736 int field_offset) { 1737 DCHECK(parent_entry == GetEntry(parent_obj)->index()); 1738 HeapEntry* child_entry = GetEntry(child_obj); 1739 if (child_entry == nullptr) return; 1740 if (IsEssentialObject(child_obj)) { 1741 filler_->SetNamedReference(HeapGraphEdge::kInternal, 1742 parent_entry, 1743 reference_name, 1744 child_entry); 1745 } 1746 MarkVisitedField(field_offset); 1747 } 1748 1749 1750 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, 1751 int parent_entry, 1752 int index, 1753 Object* child_obj, 1754 int field_offset) { 1755 DCHECK(parent_entry == GetEntry(parent_obj)->index()); 1756 HeapEntry* child_entry = GetEntry(child_obj); 1757 if (child_entry == nullptr) return; 1758 if (IsEssentialObject(child_obj)) { 1759 filler_->SetNamedReference(HeapGraphEdge::kInternal, 1760 parent_entry, 1761 names_->GetName(index), 1762 child_entry); 1763 } 1764 MarkVisitedField(field_offset); 1765 } 1766 1767 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj, 1768 int parent_entry, int index, 1769 Object* child_obj, int field_offset) { 1770 DCHECK(parent_entry == GetEntry(parent_obj)->index()); 1771 HeapEntry* child_entry = GetEntry(child_obj); 1772 if (child_entry != nullptr && IsEssentialObject(child_obj) && 1773 IsEssentialHiddenReference(parent_obj, field_offset)) { 1774 filler_->SetIndexedReference(HeapGraphEdge::kHidden, parent_entry, index, 1775 child_entry); 1776 } 1777 } 1778 1779 1780 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, 1781 int parent_entry, 1782 const char* reference_name, 1783 Object* child_obj, 1784 int field_offset) { 1785 DCHECK(parent_entry == GetEntry(parent_obj)->index()); 1786 HeapEntry* child_entry = GetEntry(child_obj); 1787 if (child_entry == nullptr) return; 1788 if (IsEssentialObject(child_obj)) { 1789 filler_->SetNamedReference(HeapGraphEdge::kWeak, 1790 parent_entry, 1791 reference_name, 1792 child_entry); 1793 } 1794 MarkVisitedField(field_offset); 1795 } 1796 1797 1798 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, 1799 int parent_entry, 1800 int index, 1801 Object* child_obj, 1802 int field_offset) { 1803 DCHECK(parent_entry == GetEntry(parent_obj)->index()); 1804 HeapEntry* child_entry = GetEntry(child_obj); 1805 if (child_entry == nullptr) return; 1806 if (IsEssentialObject(child_obj)) { 1807 filler_->SetNamedReference(HeapGraphEdge::kWeak, 1808 parent_entry, 1809 names_->GetFormatted("%d", index), 1810 child_entry); 1811 } 1812 MarkVisitedField(field_offset); 1813 } 1814 1815 void V8HeapExplorer::SetDataOrAccessorPropertyReference( 1816 PropertyKind kind, JSObject* parent_obj, int parent_entry, 1817 Name* reference_name, Object* child_obj, const char* name_format_string, 1818 int field_offset) { 1819 if (kind == kAccessor) { 1820 ExtractAccessorPairProperty(parent_obj, parent_entry, reference_name, 1821 child_obj, field_offset); 1822 } else { 1823 SetPropertyReference(parent_obj, parent_entry, reference_name, child_obj, 1824 name_format_string, field_offset); 1825 } 1826 } 1827 1828 1829 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj, 1830 int parent_entry, 1831 Name* reference_name, 1832 Object* child_obj, 1833 const char* name_format_string, 1834 int field_offset) { 1835 DCHECK(parent_entry == GetEntry(parent_obj)->index()); 1836 HeapEntry* child_entry = GetEntry(child_obj); 1837 if (child_entry == nullptr) return; 1838 HeapGraphEdge::Type type = 1839 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0 1840 ? HeapGraphEdge::kProperty 1841 : HeapGraphEdge::kInternal; 1842 const char* name = 1843 name_format_string != nullptr && reference_name->IsString() 1844 ? names_->GetFormatted( 1845 name_format_string, 1846 String::cast(reference_name) 1847 ->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL) 1848 .get()) 1849 : names_->GetName(reference_name); 1850 1851 filler_->SetNamedReference(type, parent_entry, name, child_entry); 1852 MarkVisitedField(field_offset); 1853 } 1854 1855 void V8HeapExplorer::SetRootGcRootsReference() { 1856 filler_->SetIndexedAutoIndexReference( 1857 HeapGraphEdge::kElement, 1858 snapshot_->root()->index(), 1859 snapshot_->gc_roots()); 1860 } 1861 1862 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) { 1863 HeapEntry* child_entry = GetEntry(child_obj); 1864 DCHECK_NOT_NULL(child_entry); 1865 filler_->SetNamedAutoIndexReference(HeapGraphEdge::kShortcut, 1866 snapshot_->root()->index(), nullptr, 1867 child_entry); 1868 } 1869 1870 void V8HeapExplorer::SetGcRootsReference(Root root) { 1871 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, 1872 snapshot_->gc_roots()->index(), 1873 snapshot_->gc_subroot(root)); 1874 } 1875 1876 void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description, 1877 bool is_weak, Object* child_obj) { 1878 HeapEntry* child_entry = GetEntry(child_obj); 1879 if (child_entry == nullptr) return; 1880 const char* name = GetStrongGcSubrootName(child_obj); 1881 HeapGraphEdge::Type edge_type = 1882 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kInternal; 1883 if (name != nullptr) { 1884 filler_->SetNamedReference(edge_type, snapshot_->gc_subroot(root)->index(), 1885 name, child_entry); 1886 } else { 1887 filler_->SetNamedAutoIndexReference(edge_type, 1888 snapshot_->gc_subroot(root)->index(), 1889 description, child_entry); 1890 } 1891 1892 // Add a shortcut to JS global object reference at snapshot root. 1893 // That allows the user to easily find global objects. They are 1894 // also used as starting points in distance calculations. 1895 if (is_weak || !child_obj->IsNativeContext()) return; 1896 1897 JSGlobalObject* global = Context::cast(child_obj)->global_object(); 1898 if (!global->IsJSGlobalObject()) return; 1899 1900 if (user_roots_.Contains(global)) return; 1901 1902 user_roots_.Insert(global); 1903 SetUserGlobalReference(global); 1904 } 1905 1906 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) { 1907 ReadOnlyRoots roots(heap_); 1908 if (strong_gc_subroot_names_.is_empty()) { 1909 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name); 1910 #define RO_NAME_ENTRY(name) \ 1911 strong_gc_subroot_names_.SetTag(roots.name(), #name); 1912 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name) 1913 STRONG_MUTABLE_ROOT_LIST(ROOT_NAME) 1914 #undef ROOT_NAME 1915 #define ROOT_NAME(type, name, camel_name) RO_NAME_ENTRY(name) 1916 STRONG_READ_ONLY_ROOT_LIST(ROOT_NAME) 1917 #undef ROOT_NAME 1918 #define STRUCT_MAP_NAME(NAME, Name, name) RO_NAME_ENTRY(name##_map) 1919 STRUCT_LIST(STRUCT_MAP_NAME) 1920 #undef STRUCT_MAP_NAME 1921 #define ALLOCATION_SITE_MAP_NAME(NAME, Name, Size, name) \ 1922 RO_NAME_ENTRY(name##_map) 1923 ALLOCATION_SITE_LIST(ALLOCATION_SITE_MAP_NAME) 1924 #undef ALLOCATION_SITE_MAP_NAME 1925 #define DATA_HANDLER_MAP_NAME(NAME, Name, Size, name) NAME_ENTRY(name##_map) 1926 DATA_HANDLER_LIST(DATA_HANDLER_MAP_NAME) 1927 #undef DATA_HANDLER_MAP_NAME 1928 #define STRING_NAME(name, str) RO_NAME_ENTRY(name) 1929 INTERNALIZED_STRING_LIST(STRING_NAME) 1930 #undef STRING_NAME 1931 #define SYMBOL_NAME(name) RO_NAME_ENTRY(name) 1932 PRIVATE_SYMBOL_LIST(SYMBOL_NAME) 1933 #undef SYMBOL_NAME 1934 #define SYMBOL_NAME(name, description) RO_NAME_ENTRY(name) 1935 PUBLIC_SYMBOL_LIST(SYMBOL_NAME) 1936 WELL_KNOWN_SYMBOL_LIST(SYMBOL_NAME) 1937 #undef SYMBOL_NAME 1938 #define ACCESSOR_NAME(accessor_name, AccessorName) \ 1939 NAME_ENTRY(accessor_name##_accessor) 1940 ACCESSOR_INFO_LIST(ACCESSOR_NAME) 1941 #undef ACCESSOR_NAME 1942 #undef NAME_ENTRY 1943 #undef RO_NAME_ENTRY 1944 CHECK(!strong_gc_subroot_names_.is_empty()); 1945 } 1946 return strong_gc_subroot_names_.GetTag(object); 1947 } 1948 1949 void V8HeapExplorer::TagObject(Object* obj, const char* tag) { 1950 if (IsEssentialObject(obj)) { 1951 HeapEntry* entry = GetEntry(obj); 1952 if (entry->name()[0] == '\0') { 1953 entry->set_name(tag); 1954 } 1955 } 1956 } 1957 1958 class GlobalObjectsEnumerator : public RootVisitor { 1959 public: 1960 void VisitRootPointers(Root root, const char* description, Object** start, 1961 Object** end) override { 1962 for (Object** p = start; p < end; p++) { 1963 if (!(*p)->IsNativeContext()) continue; 1964 JSObject* proxy = Context::cast(*p)->global_proxy(); 1965 if (!proxy->IsJSGlobalProxy()) continue; 1966 Object* global = proxy->map()->prototype(); 1967 if (!global->IsJSGlobalObject()) continue; 1968 objects_.push_back(Handle<JSGlobalObject>(JSGlobalObject::cast(global), 1969 proxy->GetIsolate())); 1970 } 1971 } 1972 int count() const { return static_cast<int>(objects_.size()); } 1973 Handle<JSGlobalObject>& at(int i) { return objects_[i]; } 1974 1975 private: 1976 std::vector<Handle<JSGlobalObject>> objects_; 1977 }; 1978 1979 1980 // Modifies heap. Must not be run during heap traversal. 1981 void V8HeapExplorer::TagGlobalObjects() { 1982 Isolate* isolate = heap_->isolate(); 1983 HandleScope scope(isolate); 1984 GlobalObjectsEnumerator enumerator; 1985 isolate->global_handles()->IterateAllRoots(&enumerator); 1986 std::vector<const char*> urls(enumerator.count()); 1987 for (int i = 0, l = enumerator.count(); i < l; ++i) { 1988 urls[i] = global_object_name_resolver_ 1989 ? global_object_name_resolver_->GetName(Utils::ToLocal( 1990 Handle<JSObject>::cast(enumerator.at(i)))) 1991 : nullptr; 1992 } 1993 1994 DisallowHeapAllocation no_allocation; 1995 for (int i = 0, l = enumerator.count(); i < l; ++i) { 1996 objects_tags_.SetTag(*enumerator.at(i), urls[i]); 1997 } 1998 } 1999 2000 class EmbedderGraphImpl : public EmbedderGraph { 2001 public: 2002 struct Edge { 2003 Node* from; 2004 Node* to; 2005 const char* name; 2006 }; 2007 2008 class V8NodeImpl : public Node { 2009 public: 2010 explicit V8NodeImpl(Object* object) : object_(object) {} 2011 Object* GetObject() { return object_; } 2012 2013 // Node overrides. 2014 bool IsEmbedderNode() override { return false; } 2015 const char* Name() override { 2016 // The name should be retrieved via GetObject(). 2017 UNREACHABLE(); 2018 return ""; 2019 } 2020 size_t SizeInBytes() override { 2021 // The size should be retrieved via GetObject(). 2022 UNREACHABLE(); 2023 return 0; 2024 } 2025 2026 private: 2027 Object* object_; 2028 }; 2029 2030 Node* V8Node(const v8::Local<v8::Value>& value) final { 2031 Handle<Object> object = v8::Utils::OpenHandle(*value); 2032 DCHECK(!object.is_null()); 2033 return AddNode(std::unique_ptr<Node>(new V8NodeImpl(*object))); 2034 } 2035 2036 Node* AddNode(std::unique_ptr<Node> node) final { 2037 Node* result = node.get(); 2038 nodes_.push_back(std::move(node)); 2039 return result; 2040 } 2041 2042 void AddEdge(Node* from, Node* to, const char* name) final { 2043 edges_.push_back({from, to, name}); 2044 } 2045 2046 const std::vector<std::unique_ptr<Node>>& nodes() { return nodes_; } 2047 const std::vector<Edge>& edges() { return edges_; } 2048 2049 private: 2050 std::vector<std::unique_ptr<Node>> nodes_; 2051 std::vector<Edge> edges_; 2052 }; 2053 2054 class GlobalHandlesExtractor : public PersistentHandleVisitor { 2055 public: 2056 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer) 2057 : explorer_(explorer) {} 2058 ~GlobalHandlesExtractor() override {} 2059 void VisitPersistentHandle(Persistent<Value>* value, 2060 uint16_t class_id) override { 2061 Handle<Object> object = Utils::OpenPersistent(value); 2062 explorer_->VisitSubtreeWrapper(object.location(), class_id); 2063 } 2064 2065 private: 2066 NativeObjectsExplorer* explorer_; 2067 }; 2068 2069 2070 class BasicHeapEntriesAllocator : public HeapEntriesAllocator { 2071 public: 2072 BasicHeapEntriesAllocator( 2073 HeapSnapshot* snapshot, 2074 HeapEntry::Type entries_type) 2075 : snapshot_(snapshot), 2076 names_(snapshot_->profiler()->names()), 2077 heap_object_map_(snapshot_->profiler()->heap_object_map()), 2078 entries_type_(entries_type) { 2079 } 2080 virtual HeapEntry* AllocateEntry(HeapThing ptr); 2081 private: 2082 HeapSnapshot* snapshot_; 2083 StringsStorage* names_; 2084 HeapObjectsMap* heap_object_map_; 2085 HeapEntry::Type entries_type_; 2086 }; 2087 2088 2089 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) { 2090 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr); 2091 intptr_t elements = info->GetElementCount(); 2092 intptr_t size = info->GetSizeInBytes(); 2093 const char* name = elements != -1 2094 ? names_->GetFormatted("%s / %" V8PRIdPTR " entries", 2095 info->GetLabel(), elements) 2096 : names_->GetCopy(info->GetLabel()); 2097 return snapshot_->AddEntry( 2098 entries_type_, 2099 name, 2100 heap_object_map_->GenerateId(info), 2101 size != -1 ? static_cast<int>(size) : 0, 2102 0); 2103 } 2104 2105 class EmbedderGraphEntriesAllocator : public HeapEntriesAllocator { 2106 public: 2107 explicit EmbedderGraphEntriesAllocator(HeapSnapshot* snapshot) 2108 : snapshot_(snapshot), 2109 names_(snapshot_->profiler()->names()), 2110 heap_object_map_(snapshot_->profiler()->heap_object_map()) {} 2111 virtual HeapEntry* AllocateEntry(HeapThing ptr); 2112 2113 private: 2114 HeapSnapshot* snapshot_; 2115 StringsStorage* names_; 2116 HeapObjectsMap* heap_object_map_; 2117 }; 2118 2119 namespace { 2120 2121 const char* EmbedderGraphNodeName(StringsStorage* names, 2122 EmbedderGraphImpl::Node* node) { 2123 const char* prefix = node->NamePrefix(); 2124 return prefix ? names->GetFormatted("%s %s", prefix, node->Name()) 2125 : names->GetCopy(node->Name()); 2126 } 2127 2128 HeapEntry::Type EmbedderGraphNodeType(EmbedderGraphImpl::Node* node) { 2129 return HeapEntry::kNative; 2130 } 2131 2132 // Merges the names of an embedder node and its wrapper node. 2133 // If the wrapper node name contains a tag suffix (part after '/') then the 2134 // result is the embedder node name concatenated with the tag suffix. 2135 // Otherwise, the result is the embedder node name. 2136 const char* MergeNames(StringsStorage* names, const char* embedder_name, 2137 const char* wrapper_name) { 2138 for (const char* suffix = wrapper_name; *suffix; suffix++) { 2139 if (*suffix == '/') { 2140 return names->GetFormatted("%s %s", embedder_name, suffix); 2141 } 2142 } 2143 return embedder_name; 2144 } 2145 2146 } // anonymous namespace 2147 2148 HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(HeapThing ptr) { 2149 EmbedderGraphImpl::Node* node = 2150 reinterpret_cast<EmbedderGraphImpl::Node*>(ptr); 2151 DCHECK(node->IsEmbedderNode()); 2152 size_t size = node->SizeInBytes(); 2153 return snapshot_->AddEntry( 2154 EmbedderGraphNodeType(node), EmbedderGraphNodeName(names_, node), 2155 static_cast<SnapshotObjectId>(reinterpret_cast<uintptr_t>(node) << 1), 2156 static_cast<int>(size), 0); 2157 } 2158 2159 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo { 2160 public: 2161 explicit NativeGroupRetainedObjectInfo(const char* label) 2162 : disposed_(false), 2163 hash_(reinterpret_cast<intptr_t>(label)), 2164 label_(label) {} 2165 2166 virtual ~NativeGroupRetainedObjectInfo() {} 2167 virtual void Dispose() { 2168 CHECK(!disposed_); 2169 disposed_ = true; 2170 delete this; 2171 } 2172 virtual bool IsEquivalent(RetainedObjectInfo* other) { 2173 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel()); 2174 } 2175 virtual intptr_t GetHash() { return hash_; } 2176 virtual const char* GetLabel() { return label_; } 2177 2178 private: 2179 bool disposed_; 2180 intptr_t hash_; 2181 const char* label_; 2182 }; 2183 2184 NativeObjectsExplorer::NativeObjectsExplorer( 2185 HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress) 2186 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()), 2187 snapshot_(snapshot), 2188 names_(snapshot_->profiler()->names()), 2189 embedder_queried_(false), 2190 native_groups_(0, SeededStringHasher(isolate_->heap()->HashSeed())), 2191 synthetic_entries_allocator_( 2192 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic)), 2193 native_entries_allocator_( 2194 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative)), 2195 embedder_graph_entries_allocator_( 2196 new EmbedderGraphEntriesAllocator(snapshot)), 2197 filler_(nullptr) {} 2198 2199 NativeObjectsExplorer::~NativeObjectsExplorer() { 2200 for (auto map_entry : objects_by_info_) { 2201 v8::RetainedObjectInfo* info = map_entry.first; 2202 info->Dispose(); 2203 std::vector<HeapObject*>* objects = map_entry.second; 2204 delete objects; 2205 } 2206 for (auto map_entry : native_groups_) { 2207 NativeGroupRetainedObjectInfo* info = map_entry.second; 2208 info->Dispose(); 2209 } 2210 } 2211 2212 2213 int NativeObjectsExplorer::EstimateObjectsCount() { 2214 FillRetainedObjects(); 2215 return static_cast<int>(objects_by_info_.size()); 2216 } 2217 2218 2219 void NativeObjectsExplorer::FillRetainedObjects() { 2220 if (embedder_queried_) return; 2221 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_)); 2222 v8::HeapProfiler::RetainerInfos infos = 2223 snapshot_->profiler()->GetRetainerInfos(isolate_); 2224 for (auto& pair : infos.groups) { 2225 std::vector<HeapObject*>* info = GetVectorMaybeDisposeInfo(pair.first); 2226 for (auto& persistent : pair.second) { 2227 if (persistent->IsEmpty()) continue; 2228 2229 Handle<Object> object = v8::Utils::OpenHandle( 2230 *persistent->Get(reinterpret_cast<v8::Isolate*>(isolate_))); 2231 DCHECK(!object.is_null()); 2232 HeapObject* heap_object = HeapObject::cast(*object); 2233 info->push_back(heap_object); 2234 in_groups_.Insert(heap_object); 2235 } 2236 } 2237 2238 // Record objects that are not in ObjectGroups, but have class ID. 2239 GlobalHandlesExtractor extractor(this); 2240 isolate_->global_handles()->IterateAllRootsWithClassIds(&extractor); 2241 2242 edges_ = std::move(infos.edges); 2243 embedder_queried_ = true; 2244 } 2245 2246 void NativeObjectsExplorer::FillEdges() { 2247 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_)); 2248 // Fill in actual edges found. 2249 for (auto& pair : edges_) { 2250 if (pair.first->IsEmpty() || pair.second->IsEmpty()) continue; 2251 2252 Handle<Object> parent_object = v8::Utils::OpenHandle( 2253 *pair.first->Get(reinterpret_cast<v8::Isolate*>(isolate_))); 2254 HeapObject* parent = HeapObject::cast(*parent_object); 2255 int parent_entry = 2256 filler_->FindOrAddEntry(parent, native_entries_allocator_.get()) 2257 ->index(); 2258 DCHECK_NE(parent_entry, HeapEntry::kNoEntry); 2259 Handle<Object> child_object = v8::Utils::OpenHandle( 2260 *pair.second->Get(reinterpret_cast<v8::Isolate*>(isolate_))); 2261 HeapObject* child = HeapObject::cast(*child_object); 2262 HeapEntry* child_entry = 2263 filler_->FindOrAddEntry(child, native_entries_allocator_.get()); 2264 filler_->SetNamedReference(HeapGraphEdge::kInternal, parent_entry, "native", 2265 child_entry); 2266 } 2267 edges_.clear(); 2268 } 2269 2270 std::vector<HeapObject*>* NativeObjectsExplorer::GetVectorMaybeDisposeInfo( 2271 v8::RetainedObjectInfo* info) { 2272 auto map_entry = objects_by_info_.find(info); 2273 if (map_entry != objects_by_info_.end()) { 2274 info->Dispose(); 2275 } else { 2276 objects_by_info_[info] = new std::vector<HeapObject*>(); 2277 } 2278 return objects_by_info_[info]; 2279 } 2280 2281 HeapEntry* NativeObjectsExplorer::EntryForEmbedderGraphNode( 2282 EmbedderGraphImpl::Node* node) { 2283 EmbedderGraphImpl::Node* wrapper = node->WrapperNode(); 2284 if (wrapper) { 2285 node = wrapper; 2286 } 2287 if (node->IsEmbedderNode()) { 2288 return filler_->FindOrAddEntry(node, 2289 embedder_graph_entries_allocator_.get()); 2290 } else { 2291 EmbedderGraphImpl::V8NodeImpl* v8_node = 2292 static_cast<EmbedderGraphImpl::V8NodeImpl*>(node); 2293 Object* object = v8_node->GetObject(); 2294 if (object->IsSmi()) return nullptr; 2295 HeapEntry* entry = filler_->FindEntry(HeapObject::cast(object)); 2296 return entry; 2297 } 2298 } 2299 2300 bool NativeObjectsExplorer::IterateAndExtractReferences( 2301 SnapshotFiller* filler) { 2302 filler_ = filler; 2303 2304 if (FLAG_heap_profiler_use_embedder_graph && 2305 snapshot_->profiler()->HasBuildEmbedderGraphCallback()) { 2306 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_)); 2307 DisallowHeapAllocation no_allocation; 2308 EmbedderGraphImpl graph; 2309 snapshot_->profiler()->BuildEmbedderGraph(isolate_, &graph); 2310 for (const auto& node : graph.nodes()) { 2311 if (node->IsRootNode()) { 2312 filler_->SetIndexedAutoIndexReference( 2313 HeapGraphEdge::kElement, snapshot_->root()->index(), 2314 EntryForEmbedderGraphNode(node.get())); 2315 } 2316 // Adjust the name and the type of the V8 wrapper node. 2317 auto wrapper = node->WrapperNode(); 2318 if (wrapper) { 2319 HeapEntry* wrapper_entry = EntryForEmbedderGraphNode(wrapper); 2320 wrapper_entry->set_name( 2321 MergeNames(names_, EmbedderGraphNodeName(names_, node.get()), 2322 wrapper_entry->name())); 2323 wrapper_entry->set_type(EmbedderGraphNodeType(node.get())); 2324 } 2325 } 2326 // Fill edges of the graph. 2327 for (const auto& edge : graph.edges()) { 2328 HeapEntry* from = EntryForEmbedderGraphNode(edge.from); 2329 // The |from| and |to| can nullptr if the corrsponding node is a V8 node 2330 // pointing to a Smi. 2331 if (!from) continue; 2332 // Adding an entry for |edge.to| can invalidate the |from| entry because 2333 // it is an address in std::vector. Use index instead of pointer. 2334 int from_index = from->index(); 2335 HeapEntry* to = EntryForEmbedderGraphNode(edge.to); 2336 if (to) { 2337 if (edge.name == nullptr) { 2338 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, 2339 from_index, to); 2340 } else { 2341 filler_->SetNamedReference(HeapGraphEdge::kInternal, from_index, 2342 edge.name, to); 2343 } 2344 } 2345 } 2346 } else { 2347 FillRetainedObjects(); 2348 FillEdges(); 2349 if (EstimateObjectsCount() > 0) { 2350 for (auto map_entry : objects_by_info_) { 2351 v8::RetainedObjectInfo* info = map_entry.first; 2352 SetNativeRootReference(info); 2353 std::vector<HeapObject*>* objects = map_entry.second; 2354 for (HeapObject* object : *objects) { 2355 SetWrapperNativeReferences(object, info); 2356 } 2357 } 2358 SetRootNativeRootsReference(); 2359 } 2360 } 2361 filler_ = nullptr; 2362 return true; 2363 } 2364 2365 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo( 2366 const char* label) { 2367 const char* label_copy = names_->GetCopy(label); 2368 auto map_entry = native_groups_.find(label_copy); 2369 if (map_entry == native_groups_.end()) { 2370 native_groups_[label_copy] = new NativeGroupRetainedObjectInfo(label); 2371 } 2372 return native_groups_[label_copy]; 2373 } 2374 2375 void NativeObjectsExplorer::SetNativeRootReference( 2376 v8::RetainedObjectInfo* info) { 2377 HeapEntry* child_entry = 2378 filler_->FindOrAddEntry(info, native_entries_allocator_.get()); 2379 DCHECK_NOT_NULL(child_entry); 2380 NativeGroupRetainedObjectInfo* group_info = 2381 FindOrAddGroupInfo(info->GetGroupLabel()); 2382 HeapEntry* group_entry = 2383 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_.get()); 2384 // |FindOrAddEntry| can move and resize the entries backing store. Reload 2385 // potentially-stale pointer. 2386 child_entry = filler_->FindEntry(info); 2387 filler_->SetNamedAutoIndexReference( 2388 HeapGraphEdge::kInternal, group_entry->index(), nullptr, child_entry); 2389 } 2390 2391 2392 void NativeObjectsExplorer::SetWrapperNativeReferences( 2393 HeapObject* wrapper, v8::RetainedObjectInfo* info) { 2394 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper); 2395 DCHECK_NOT_NULL(wrapper_entry); 2396 HeapEntry* info_entry = 2397 filler_->FindOrAddEntry(info, native_entries_allocator_.get()); 2398 DCHECK_NOT_NULL(info_entry); 2399 filler_->SetNamedReference(HeapGraphEdge::kInternal, 2400 wrapper_entry->index(), 2401 "native", 2402 info_entry); 2403 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, 2404 info_entry->index(), 2405 wrapper_entry); 2406 } 2407 2408 2409 void NativeObjectsExplorer::SetRootNativeRootsReference() { 2410 for (auto map_entry : native_groups_) { 2411 NativeGroupRetainedObjectInfo* group_info = map_entry.second; 2412 HeapEntry* group_entry = 2413 filler_->FindOrAddEntry(group_info, native_entries_allocator_.get()); 2414 DCHECK_NOT_NULL(group_entry); 2415 filler_->SetIndexedAutoIndexReference( 2416 HeapGraphEdge::kElement, 2417 snapshot_->root()->index(), 2418 group_entry); 2419 } 2420 } 2421 2422 2423 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) { 2424 if (in_groups_.Contains(*p)) return; 2425 Isolate* isolate = isolate_; 2426 v8::RetainedObjectInfo* info = 2427 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p); 2428 if (info == nullptr) return; 2429 GetVectorMaybeDisposeInfo(info)->push_back(HeapObject::cast(*p)); 2430 } 2431 2432 2433 HeapSnapshotGenerator::HeapSnapshotGenerator( 2434 HeapSnapshot* snapshot, 2435 v8::ActivityControl* control, 2436 v8::HeapProfiler::ObjectNameResolver* resolver, 2437 Heap* heap) 2438 : snapshot_(snapshot), 2439 control_(control), 2440 v8_heap_explorer_(snapshot_, this, resolver), 2441 dom_explorer_(snapshot_, this), 2442 heap_(heap) { 2443 } 2444 2445 namespace { 2446 class NullContextScope { 2447 public: 2448 explicit NullContextScope(Isolate* isolate) 2449 : isolate_(isolate), prev_(isolate->context()) { 2450 isolate_->set_context(nullptr); 2451 } 2452 ~NullContextScope() { isolate_->set_context(prev_); } 2453 2454 private: 2455 Isolate* isolate_; 2456 Context* prev_; 2457 }; 2458 } // namespace 2459 2460 bool HeapSnapshotGenerator::GenerateSnapshot() { 2461 v8_heap_explorer_.TagGlobalObjects(); 2462 2463 // TODO(1562) Profiler assumes that any object that is in the heap after 2464 // full GC is reachable from the root when computing dominators. 2465 // This is not true for weakly reachable objects. 2466 // As a temporary solution we call GC twice. 2467 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask, 2468 GarbageCollectionReason::kHeapProfiler); 2469 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask, 2470 GarbageCollectionReason::kHeapProfiler); 2471 2472 NullContextScope null_context_scope(heap_->isolate()); 2473 2474 #ifdef VERIFY_HEAP 2475 Heap* debug_heap = heap_; 2476 if (FLAG_verify_heap) { 2477 debug_heap->Verify(); 2478 } 2479 #endif 2480 2481 InitProgressCounter(); 2482 2483 #ifdef VERIFY_HEAP 2484 if (FLAG_verify_heap) { 2485 debug_heap->Verify(); 2486 } 2487 #endif 2488 2489 snapshot_->AddSyntheticRootEntries(); 2490 2491 if (!FillReferences()) return false; 2492 2493 snapshot_->FillChildren(); 2494 snapshot_->RememberLastJSObjectId(); 2495 2496 progress_counter_ = progress_total_; 2497 if (!ProgressReport(true)) return false; 2498 return true; 2499 } 2500 2501 void HeapSnapshotGenerator::ProgressStep() { 2502 ++progress_counter_; 2503 } 2504 2505 bool HeapSnapshotGenerator::ProgressReport(bool force) { 2506 const int kProgressReportGranularity = 10000; 2507 if (control_ != nullptr && 2508 (force || progress_counter_ % kProgressReportGranularity == 0)) { 2509 return control_->ReportProgressValue(progress_counter_, progress_total_) == 2510 v8::ActivityControl::kContinue; 2511 } 2512 return true; 2513 } 2514 2515 void HeapSnapshotGenerator::InitProgressCounter() { 2516 if (control_ == nullptr) return; 2517 // The +1 ensures that intermediate ProgressReport calls will never signal 2518 // that the work is finished (i.e. progress_counter_ == progress_total_). 2519 // Only the forced ProgressReport() at the end of GenerateSnapshot() 2520 // should signal that the work is finished because signalling finished twice 2521 // breaks the DevTools frontend. 2522 progress_total_ = v8_heap_explorer_.EstimateObjectsCount() + 2523 dom_explorer_.EstimateObjectsCount() + 1; 2524 progress_counter_ = 0; 2525 } 2526 2527 bool HeapSnapshotGenerator::FillReferences() { 2528 SnapshotFiller filler(snapshot_, &entries_); 2529 return v8_heap_explorer_.IterateAndExtractReferences(&filler) && 2530 dom_explorer_.IterateAndExtractReferences(&filler); 2531 } 2532 2533 2534 template<int bytes> struct MaxDecimalDigitsIn; 2535 template<> struct MaxDecimalDigitsIn<4> { 2536 static const int kSigned = 11; 2537 static const int kUnsigned = 10; 2538 }; 2539 template<> struct MaxDecimalDigitsIn<8> { 2540 static const int kSigned = 20; 2541 static const int kUnsigned = 20; 2542 }; 2543 2544 2545 class OutputStreamWriter { 2546 public: 2547 explicit OutputStreamWriter(v8::OutputStream* stream) 2548 : stream_(stream), 2549 chunk_size_(stream->GetChunkSize()), 2550 chunk_(chunk_size_), 2551 chunk_pos_(0), 2552 aborted_(false) { 2553 DCHECK_GT(chunk_size_, 0); 2554 } 2555 bool aborted() { return aborted_; } 2556 void AddCharacter(char c) { 2557 DCHECK_NE(c, '\0'); 2558 DCHECK(chunk_pos_ < chunk_size_); 2559 chunk_[chunk_pos_++] = c; 2560 MaybeWriteChunk(); 2561 } 2562 void AddString(const char* s) { 2563 AddSubstring(s, StrLength(s)); 2564 } 2565 void AddSubstring(const char* s, int n) { 2566 if (n <= 0) return; 2567 DCHECK(static_cast<size_t>(n) <= strlen(s)); 2568 const char* s_end = s + n; 2569 while (s < s_end) { 2570 int s_chunk_size = 2571 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s)); 2572 DCHECK_GT(s_chunk_size, 0); 2573 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size); 2574 s += s_chunk_size; 2575 chunk_pos_ += s_chunk_size; 2576 MaybeWriteChunk(); 2577 } 2578 } 2579 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); } 2580 void Finalize() { 2581 if (aborted_) return; 2582 DCHECK(chunk_pos_ < chunk_size_); 2583 if (chunk_pos_ != 0) { 2584 WriteChunk(); 2585 } 2586 stream_->EndOfStream(); 2587 } 2588 2589 private: 2590 template<typename T> 2591 void AddNumberImpl(T n, const char* format) { 2592 // Buffer for the longest value plus trailing \0 2593 static const int kMaxNumberSize = 2594 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1; 2595 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) { 2596 int result = SNPrintF( 2597 chunk_.SubVector(chunk_pos_, chunk_size_), format, n); 2598 DCHECK_NE(result, -1); 2599 chunk_pos_ += result; 2600 MaybeWriteChunk(); 2601 } else { 2602 EmbeddedVector<char, kMaxNumberSize> buffer; 2603 int result = SNPrintF(buffer, format, n); 2604 USE(result); 2605 DCHECK_NE(result, -1); 2606 AddString(buffer.start()); 2607 } 2608 } 2609 void MaybeWriteChunk() { 2610 DCHECK(chunk_pos_ <= chunk_size_); 2611 if (chunk_pos_ == chunk_size_) { 2612 WriteChunk(); 2613 } 2614 } 2615 void WriteChunk() { 2616 if (aborted_) return; 2617 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) == 2618 v8::OutputStream::kAbort) aborted_ = true; 2619 chunk_pos_ = 0; 2620 } 2621 2622 v8::OutputStream* stream_; 2623 int chunk_size_; 2624 ScopedVector<char> chunk_; 2625 int chunk_pos_; 2626 bool aborted_; 2627 }; 2628 2629 2630 // type, name|index, to_node. 2631 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3; 2632 // type, name, id, self_size, edge_count, trace_node_id. 2633 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6; 2634 2635 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) { 2636 if (AllocationTracker* allocation_tracker = 2637 snapshot_->profiler()->allocation_tracker()) { 2638 allocation_tracker->PrepareForSerialization(); 2639 } 2640 DCHECK_NULL(writer_); 2641 writer_ = new OutputStreamWriter(stream); 2642 SerializeImpl(); 2643 delete writer_; 2644 writer_ = nullptr; 2645 } 2646 2647 2648 void HeapSnapshotJSONSerializer::SerializeImpl() { 2649 DCHECK_EQ(0, snapshot_->root()->index()); 2650 writer_->AddCharacter('{'); 2651 writer_->AddString("\"snapshot\":{"); 2652 SerializeSnapshot(); 2653 if (writer_->aborted()) return; 2654 writer_->AddString("},\n"); 2655 writer_->AddString("\"nodes\":["); 2656 SerializeNodes(); 2657 if (writer_->aborted()) return; 2658 writer_->AddString("],\n"); 2659 writer_->AddString("\"edges\":["); 2660 SerializeEdges(); 2661 if (writer_->aborted()) return; 2662 writer_->AddString("],\n"); 2663 2664 writer_->AddString("\"trace_function_infos\":["); 2665 SerializeTraceNodeInfos(); 2666 if (writer_->aborted()) return; 2667 writer_->AddString("],\n"); 2668 writer_->AddString("\"trace_tree\":["); 2669 SerializeTraceTree(); 2670 if (writer_->aborted()) return; 2671 writer_->AddString("],\n"); 2672 2673 writer_->AddString("\"samples\":["); 2674 SerializeSamples(); 2675 if (writer_->aborted()) return; 2676 writer_->AddString("],\n"); 2677 2678 writer_->AddString("\"locations\":["); 2679 SerializeLocations(); 2680 if (writer_->aborted()) return; 2681 writer_->AddString("],\n"); 2682 2683 writer_->AddString("\"strings\":["); 2684 SerializeStrings(); 2685 if (writer_->aborted()) return; 2686 writer_->AddCharacter(']'); 2687 writer_->AddCharacter('}'); 2688 writer_->Finalize(); 2689 } 2690 2691 2692 int HeapSnapshotJSONSerializer::GetStringId(const char* s) { 2693 base::HashMap::Entry* cache_entry = 2694 strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s)); 2695 if (cache_entry->value == nullptr) { 2696 cache_entry->value = reinterpret_cast<void*>(next_string_id_++); 2697 } 2698 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value)); 2699 } 2700 2701 2702 namespace { 2703 2704 template<size_t size> struct ToUnsigned; 2705 2706 template<> struct ToUnsigned<4> { 2707 typedef uint32_t Type; 2708 }; 2709 2710 template<> struct ToUnsigned<8> { 2711 typedef uint64_t Type; 2712 }; 2713 2714 } // namespace 2715 2716 2717 template<typename T> 2718 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) { 2719 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned 2720 int number_of_digits = 0; 2721 T t = value; 2722 do { 2723 ++number_of_digits; 2724 } while (t /= 10); 2725 2726 buffer_pos += number_of_digits; 2727 int result = buffer_pos; 2728 do { 2729 int last_digit = static_cast<int>(value % 10); 2730 buffer[--buffer_pos] = '0' + last_digit; 2731 value /= 10; 2732 } while (value); 2733 return result; 2734 } 2735 2736 2737 template<typename T> 2738 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) { 2739 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value; 2740 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value)); 2741 return utoa_impl(unsigned_value, buffer, buffer_pos); 2742 } 2743 2744 2745 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge, 2746 bool first_edge) { 2747 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0 2748 static const int kBufferSize = 2749 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT 2750 EmbeddedVector<char, kBufferSize> buffer; 2751 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement 2752 || edge->type() == HeapGraphEdge::kHidden 2753 ? edge->index() : GetStringId(edge->name()); 2754 int buffer_pos = 0; 2755 if (!first_edge) { 2756 buffer[buffer_pos++] = ','; 2757 } 2758 buffer_pos = utoa(edge->type(), buffer, buffer_pos); 2759 buffer[buffer_pos++] = ','; 2760 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos); 2761 buffer[buffer_pos++] = ','; 2762 buffer_pos = utoa(to_node_index(edge->to()), buffer, buffer_pos); 2763 buffer[buffer_pos++] = '\n'; 2764 buffer[buffer_pos++] = '\0'; 2765 writer_->AddString(buffer.start()); 2766 } 2767 2768 2769 void HeapSnapshotJSONSerializer::SerializeEdges() { 2770 std::deque<HeapGraphEdge*>& edges = snapshot_->children(); 2771 for (size_t i = 0; i < edges.size(); ++i) { 2772 DCHECK(i == 0 || 2773 edges[i - 1]->from()->index() <= edges[i]->from()->index()); 2774 SerializeEdge(edges[i], i == 0); 2775 if (writer_->aborted()) return; 2776 } 2777 } 2778 2779 void HeapSnapshotJSONSerializer::SerializeNode(const HeapEntry* entry) { 2780 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0 2781 static const int kBufferSize = 2782 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT 2783 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT 2784 + 6 + 1 + 1; 2785 EmbeddedVector<char, kBufferSize> buffer; 2786 int buffer_pos = 0; 2787 if (to_node_index(entry) != 0) { 2788 buffer[buffer_pos++] = ','; 2789 } 2790 buffer_pos = utoa(entry->type(), buffer, buffer_pos); 2791 buffer[buffer_pos++] = ','; 2792 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos); 2793 buffer[buffer_pos++] = ','; 2794 buffer_pos = utoa(entry->id(), buffer, buffer_pos); 2795 buffer[buffer_pos++] = ','; 2796 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos); 2797 buffer[buffer_pos++] = ','; 2798 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos); 2799 buffer[buffer_pos++] = ','; 2800 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos); 2801 buffer[buffer_pos++] = '\n'; 2802 buffer[buffer_pos++] = '\0'; 2803 writer_->AddString(buffer.start()); 2804 } 2805 2806 2807 void HeapSnapshotJSONSerializer::SerializeNodes() { 2808 std::vector<HeapEntry>& entries = snapshot_->entries(); 2809 for (const HeapEntry& entry : entries) { 2810 SerializeNode(&entry); 2811 if (writer_->aborted()) return; 2812 } 2813 } 2814 2815 2816 void HeapSnapshotJSONSerializer::SerializeSnapshot() { 2817 writer_->AddString("\"meta\":"); 2818 // The object describing node serialization layout. 2819 // We use a set of macros to improve readability. 2820 2821 // clang-format off 2822 #define JSON_A(s) "[" s "]" 2823 #define JSON_O(s) "{" s "}" 2824 #define JSON_S(s) "\"" s "\"" 2825 writer_->AddString(JSON_O( 2826 JSON_S("node_fields") ":" JSON_A( 2827 JSON_S("type") "," 2828 JSON_S("name") "," 2829 JSON_S("id") "," 2830 JSON_S("self_size") "," 2831 JSON_S("edge_count") "," 2832 JSON_S("trace_node_id")) "," 2833 JSON_S("node_types") ":" JSON_A( 2834 JSON_A( 2835 JSON_S("hidden") "," 2836 JSON_S("array") "," 2837 JSON_S("string") "," 2838 JSON_S("object") "," 2839 JSON_S("code") "," 2840 JSON_S("closure") "," 2841 JSON_S("regexp") "," 2842 JSON_S("number") "," 2843 JSON_S("native") "," 2844 JSON_S("synthetic") "," 2845 JSON_S("concatenated string") "," 2846 JSON_S("sliced string") "," 2847 JSON_S("symbol") "," 2848 JSON_S("bigint")) "," 2849 JSON_S("string") "," 2850 JSON_S("number") "," 2851 JSON_S("number") "," 2852 JSON_S("number") "," 2853 JSON_S("number") "," 2854 JSON_S("number")) "," 2855 JSON_S("edge_fields") ":" JSON_A( 2856 JSON_S("type") "," 2857 JSON_S("name_or_index") "," 2858 JSON_S("to_node")) "," 2859 JSON_S("edge_types") ":" JSON_A( 2860 JSON_A( 2861 JSON_S("context") "," 2862 JSON_S("element") "," 2863 JSON_S("property") "," 2864 JSON_S("internal") "," 2865 JSON_S("hidden") "," 2866 JSON_S("shortcut") "," 2867 JSON_S("weak")) "," 2868 JSON_S("string_or_number") "," 2869 JSON_S("node")) "," 2870 JSON_S("trace_function_info_fields") ":" JSON_A( 2871 JSON_S("function_id") "," 2872 JSON_S("name") "," 2873 JSON_S("script_name") "," 2874 JSON_S("script_id") "," 2875 JSON_S("line") "," 2876 JSON_S("column")) "," 2877 JSON_S("trace_node_fields") ":" JSON_A( 2878 JSON_S("id") "," 2879 JSON_S("function_info_index") "," 2880 JSON_S("count") "," 2881 JSON_S("size") "," 2882 JSON_S("children")) "," 2883 JSON_S("sample_fields") ":" JSON_A( 2884 JSON_S("timestamp_us") "," 2885 JSON_S("last_assigned_id")) "," 2886 JSON_S("location_fields") ":" JSON_A( 2887 JSON_S("object_index") "," 2888 JSON_S("script_id") "," 2889 JSON_S("line") "," 2890 JSON_S("column")))); 2891 // clang-format on 2892 #undef JSON_S 2893 #undef JSON_O 2894 #undef JSON_A 2895 writer_->AddString(",\"node_count\":"); 2896 writer_->AddNumber(static_cast<unsigned>(snapshot_->entries().size())); 2897 writer_->AddString(",\"edge_count\":"); 2898 writer_->AddNumber(static_cast<double>(snapshot_->edges().size())); 2899 writer_->AddString(",\"trace_function_count\":"); 2900 uint32_t count = 0; 2901 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); 2902 if (tracker) { 2903 count = static_cast<uint32_t>(tracker->function_info_list().size()); 2904 } 2905 writer_->AddNumber(count); 2906 } 2907 2908 2909 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) { 2910 static const char hex_chars[] = "0123456789ABCDEF"; 2911 w->AddString("\\u"); 2912 w->AddCharacter(hex_chars[(u >> 12) & 0xF]); 2913 w->AddCharacter(hex_chars[(u >> 8) & 0xF]); 2914 w->AddCharacter(hex_chars[(u >> 4) & 0xF]); 2915 w->AddCharacter(hex_chars[u & 0xF]); 2916 } 2917 2918 2919 void HeapSnapshotJSONSerializer::SerializeTraceTree() { 2920 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); 2921 if (!tracker) return; 2922 AllocationTraceTree* traces = tracker->trace_tree(); 2923 SerializeTraceNode(traces->root()); 2924 } 2925 2926 2927 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) { 2928 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0 2929 const int kBufferSize = 2930 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT 2931 + 4 + 1 + 1; 2932 EmbeddedVector<char, kBufferSize> buffer; 2933 int buffer_pos = 0; 2934 buffer_pos = utoa(node->id(), buffer, buffer_pos); 2935 buffer[buffer_pos++] = ','; 2936 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos); 2937 buffer[buffer_pos++] = ','; 2938 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos); 2939 buffer[buffer_pos++] = ','; 2940 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos); 2941 buffer[buffer_pos++] = ','; 2942 buffer[buffer_pos++] = '['; 2943 buffer[buffer_pos++] = '\0'; 2944 writer_->AddString(buffer.start()); 2945 2946 int i = 0; 2947 for (AllocationTraceNode* child : node->children()) { 2948 if (i++ > 0) { 2949 writer_->AddCharacter(','); 2950 } 2951 SerializeTraceNode(child); 2952 } 2953 writer_->AddCharacter(']'); 2954 } 2955 2956 2957 // 0-based position is converted to 1-based during the serialization. 2958 static int SerializePosition(int position, const Vector<char>& buffer, 2959 int buffer_pos) { 2960 if (position == -1) { 2961 buffer[buffer_pos++] = '0'; 2962 } else { 2963 DCHECK_GE(position, 0); 2964 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos); 2965 } 2966 return buffer_pos; 2967 } 2968 2969 2970 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() { 2971 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); 2972 if (!tracker) return; 2973 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0 2974 const int kBufferSize = 2975 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT 2976 + 6 + 1 + 1; 2977 EmbeddedVector<char, kBufferSize> buffer; 2978 int i = 0; 2979 for (AllocationTracker::FunctionInfo* info : tracker->function_info_list()) { 2980 int buffer_pos = 0; 2981 if (i++ > 0) { 2982 buffer[buffer_pos++] = ','; 2983 } 2984 buffer_pos = utoa(info->function_id, buffer, buffer_pos); 2985 buffer[buffer_pos++] = ','; 2986 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos); 2987 buffer[buffer_pos++] = ','; 2988 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos); 2989 buffer[buffer_pos++] = ','; 2990 // The cast is safe because script id is a non-negative Smi. 2991 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer, 2992 buffer_pos); 2993 buffer[buffer_pos++] = ','; 2994 buffer_pos = SerializePosition(info->line, buffer, buffer_pos); 2995 buffer[buffer_pos++] = ','; 2996 buffer_pos = SerializePosition(info->column, buffer, buffer_pos); 2997 buffer[buffer_pos++] = '\n'; 2998 buffer[buffer_pos++] = '\0'; 2999 writer_->AddString(buffer.start()); 3000 } 3001 } 3002 3003 3004 void HeapSnapshotJSONSerializer::SerializeSamples() { 3005 const std::vector<HeapObjectsMap::TimeInterval>& samples = 3006 snapshot_->profiler()->heap_object_map()->samples(); 3007 if (samples.empty()) return; 3008 base::TimeTicks start_time = samples[0].timestamp; 3009 // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0 3010 const int kBufferSize = MaxDecimalDigitsIn<sizeof( 3011 base::TimeDelta().InMicroseconds())>::kUnsigned + 3012 MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned + 3013 2 + 1 + 1; 3014 EmbeddedVector<char, kBufferSize> buffer; 3015 int i = 0; 3016 for (const HeapObjectsMap::TimeInterval& sample : samples) { 3017 int buffer_pos = 0; 3018 if (i++ > 0) { 3019 buffer[buffer_pos++] = ','; 3020 } 3021 base::TimeDelta time_delta = sample.timestamp - start_time; 3022 buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos); 3023 buffer[buffer_pos++] = ','; 3024 buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos); 3025 buffer[buffer_pos++] = '\n'; 3026 buffer[buffer_pos++] = '\0'; 3027 writer_->AddString(buffer.start()); 3028 } 3029 } 3030 3031 3032 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) { 3033 writer_->AddCharacter('\n'); 3034 writer_->AddCharacter('\"'); 3035 for ( ; *s != '\0'; ++s) { 3036 switch (*s) { 3037 case '\b': 3038 writer_->AddString("\\b"); 3039 continue; 3040 case '\f': 3041 writer_->AddString("\\f"); 3042 continue; 3043 case '\n': 3044 writer_->AddString("\\n"); 3045 continue; 3046 case '\r': 3047 writer_->AddString("\\r"); 3048 continue; 3049 case '\t': 3050 writer_->AddString("\\t"); 3051 continue; 3052 case '\"': 3053 case '\\': 3054 writer_->AddCharacter('\\'); 3055 writer_->AddCharacter(*s); 3056 continue; 3057 default: 3058 if (*s > 31 && *s < 128) { 3059 writer_->AddCharacter(*s); 3060 } else if (*s <= 31) { 3061 // Special character with no dedicated literal. 3062 WriteUChar(writer_, *s); 3063 } else { 3064 // Convert UTF-8 into \u UTF-16 literal. 3065 size_t length = 1, cursor = 0; 3066 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { } 3067 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor); 3068 if (c != unibrow::Utf8::kBadChar) { 3069 WriteUChar(writer_, c); 3070 DCHECK_NE(cursor, 0); 3071 s += cursor - 1; 3072 } else { 3073 writer_->AddCharacter('?'); 3074 } 3075 } 3076 } 3077 } 3078 writer_->AddCharacter('\"'); 3079 } 3080 3081 3082 void HeapSnapshotJSONSerializer::SerializeStrings() { 3083 ScopedVector<const unsigned char*> sorted_strings( 3084 strings_.occupancy() + 1); 3085 for (base::HashMap::Entry* entry = strings_.Start(); entry != nullptr; 3086 entry = strings_.Next(entry)) { 3087 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value)); 3088 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key); 3089 } 3090 writer_->AddString("\"<dummy>\""); 3091 for (int i = 1; i < sorted_strings.length(); ++i) { 3092 writer_->AddCharacter(','); 3093 SerializeString(sorted_strings[i]); 3094 if (writer_->aborted()) return; 3095 } 3096 } 3097 3098 void HeapSnapshotJSONSerializer::SerializeLocation( 3099 const SourceLocation& location) { 3100 // The buffer needs space for 4 unsigned ints, 3 commas, \n and \0 3101 static const int kBufferSize = 3102 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 4 + 3 + 2; 3103 EmbeddedVector<char, kBufferSize> buffer; 3104 int buffer_pos = 0; 3105 buffer_pos = utoa(to_node_index(location.entry_index), buffer, buffer_pos); 3106 buffer[buffer_pos++] = ','; 3107 buffer_pos = utoa(location.scriptId, buffer, buffer_pos); 3108 buffer[buffer_pos++] = ','; 3109 buffer_pos = utoa(location.line, buffer, buffer_pos); 3110 buffer[buffer_pos++] = ','; 3111 buffer_pos = utoa(location.col, buffer, buffer_pos); 3112 buffer[buffer_pos++] = '\n'; 3113 buffer[buffer_pos++] = '\0'; 3114 writer_->AddString(buffer.start()); 3115 } 3116 3117 void HeapSnapshotJSONSerializer::SerializeLocations() { 3118 const std::vector<SourceLocation>& locations = snapshot_->locations(); 3119 for (size_t i = 0; i < locations.size(); i++) { 3120 if (i > 0) writer_->AddCharacter(','); 3121 SerializeLocation(locations[i]); 3122 if (writer_->aborted()) return; 3123 } 3124 } 3125 3126 } // namespace internal 3127 } // namespace v8 3128