1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/v8.h" 6 7 #include "src/heap-snapshot-generator-inl.h" 8 9 #include "src/allocation-tracker.h" 10 #include "src/code-stubs.h" 11 #include "src/conversions.h" 12 #include "src/debug.h" 13 #include "src/heap-profiler.h" 14 #include "src/types.h" 15 16 namespace v8 { 17 namespace internal { 18 19 20 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to) 21 : type_(type), 22 from_index_(from), 23 to_index_(to), 24 name_(name) { 25 ASSERT(type == kContextVariable 26 || type == kProperty 27 || type == kInternal 28 || type == kShortcut 29 || type == kWeak); 30 } 31 32 33 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to) 34 : type_(type), 35 from_index_(from), 36 to_index_(to), 37 index_(index) { 38 ASSERT(type == kElement || type == kHidden); 39 } 40 41 42 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) { 43 to_entry_ = &snapshot->entries()[to_index_]; 44 } 45 46 47 const int HeapEntry::kNoEntry = -1; 48 49 HeapEntry::HeapEntry(HeapSnapshot* snapshot, 50 Type type, 51 const char* name, 52 SnapshotObjectId id, 53 size_t self_size, 54 unsigned trace_node_id) 55 : type_(type), 56 children_count_(0), 57 children_index_(-1), 58 self_size_(self_size), 59 snapshot_(snapshot), 60 name_(name), 61 id_(id), 62 trace_node_id_(trace_node_id) { } 63 64 65 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type, 66 const char* name, 67 HeapEntry* entry) { 68 HeapGraphEdge edge(type, name, this->index(), entry->index()); 69 snapshot_->edges().Add(edge); 70 ++children_count_; 71 } 72 73 74 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type, 75 int index, 76 HeapEntry* entry) { 77 HeapGraphEdge edge(type, index, this->index(), entry->index()); 78 snapshot_->edges().Add(edge); 79 ++children_count_; 80 } 81 82 83 void HeapEntry::Print( 84 const char* prefix, const char* edge_name, int max_depth, int indent) { 85 STATIC_ASSERT(sizeof(unsigned) == sizeof(id())); 86 OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ", 87 self_size(), id(), indent, ' ', prefix, edge_name); 88 if (type() != kString) { 89 OS::Print("%s %.40s\n", TypeAsString(), name_); 90 } else { 91 OS::Print("\""); 92 const char* c = name_; 93 while (*c && (c - name_) <= 40) { 94 if (*c != '\n') 95 OS::Print("%c", *c); 96 else 97 OS::Print("\\n"); 98 ++c; 99 } 100 OS::Print("\"\n"); 101 } 102 if (--max_depth == 0) return; 103 Vector<HeapGraphEdge*> ch = children(); 104 for (int i = 0; i < ch.length(); ++i) { 105 HeapGraphEdge& edge = *ch[i]; 106 const char* edge_prefix = ""; 107 EmbeddedVector<char, 64> index; 108 const char* edge_name = index.start(); 109 switch (edge.type()) { 110 case HeapGraphEdge::kContextVariable: 111 edge_prefix = "#"; 112 edge_name = edge.name(); 113 break; 114 case HeapGraphEdge::kElement: 115 SNPrintF(index, "%d", edge.index()); 116 break; 117 case HeapGraphEdge::kInternal: 118 edge_prefix = "$"; 119 edge_name = edge.name(); 120 break; 121 case HeapGraphEdge::kProperty: 122 edge_name = edge.name(); 123 break; 124 case HeapGraphEdge::kHidden: 125 edge_prefix = "$"; 126 SNPrintF(index, "%d", edge.index()); 127 break; 128 case HeapGraphEdge::kShortcut: 129 edge_prefix = "^"; 130 edge_name = edge.name(); 131 break; 132 case HeapGraphEdge::kWeak: 133 edge_prefix = "w"; 134 edge_name = edge.name(); 135 break; 136 default: 137 SNPrintF(index, "!!! unknown edge type: %d ", edge.type()); 138 } 139 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2); 140 } 141 } 142 143 144 const char* HeapEntry::TypeAsString() { 145 switch (type()) { 146 case kHidden: return "/hidden/"; 147 case kObject: return "/object/"; 148 case kClosure: return "/closure/"; 149 case kString: return "/string/"; 150 case kCode: return "/code/"; 151 case kArray: return "/array/"; 152 case kRegExp: return "/regexp/"; 153 case kHeapNumber: return "/number/"; 154 case kNative: return "/native/"; 155 case kSynthetic: return "/synthetic/"; 156 case kConsString: return "/concatenated string/"; 157 case kSlicedString: return "/sliced string/"; 158 case kSymbol: return "/symbol/"; 159 default: return "???"; 160 } 161 } 162 163 164 // It is very important to keep objects that form a heap snapshot 165 // as small as possible. 166 namespace { // Avoid littering the global namespace. 167 168 template <size_t ptr_size> struct SnapshotSizeConstants; 169 170 template <> struct SnapshotSizeConstants<4> { 171 static const int kExpectedHeapGraphEdgeSize = 12; 172 static const int kExpectedHeapEntrySize = 28; 173 }; 174 175 template <> struct SnapshotSizeConstants<8> { 176 static const int kExpectedHeapGraphEdgeSize = 24; 177 static const int kExpectedHeapEntrySize = 40; 178 }; 179 180 } // namespace 181 182 183 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler, 184 const char* title, 185 unsigned uid) 186 : profiler_(profiler), 187 title_(title), 188 uid_(uid), 189 root_index_(HeapEntry::kNoEntry), 190 gc_roots_index_(HeapEntry::kNoEntry), 191 natives_root_index_(HeapEntry::kNoEntry), 192 max_snapshot_js_object_id_(0) { 193 STATIC_ASSERT( 194 sizeof(HeapGraphEdge) == 195 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize); 196 STATIC_ASSERT( 197 sizeof(HeapEntry) == 198 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize); 199 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize); 200 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize); 201 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize); 202 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize); 203 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) { 204 gc_subroot_indexes_[i] = HeapEntry::kNoEntry; 205 } 206 } 207 208 209 void HeapSnapshot::Delete() { 210 profiler_->RemoveSnapshot(this); 211 delete this; 212 } 213 214 215 void HeapSnapshot::RememberLastJSObjectId() { 216 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id(); 217 } 218 219 220 HeapEntry* HeapSnapshot::AddRootEntry() { 221 ASSERT(root_index_ == HeapEntry::kNoEntry); 222 ASSERT(entries_.is_empty()); // Root entry must be the first one. 223 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, 224 "", 225 HeapObjectsMap::kInternalRootObjectId, 226 0, 227 0); 228 root_index_ = entry->index(); 229 ASSERT(root_index_ == 0); 230 return entry; 231 } 232 233 234 HeapEntry* HeapSnapshot::AddGcRootsEntry() { 235 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry); 236 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, 237 "(GC roots)", 238 HeapObjectsMap::kGcRootsObjectId, 239 0, 240 0); 241 gc_roots_index_ = entry->index(); 242 return entry; 243 } 244 245 246 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) { 247 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry); 248 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags); 249 HeapEntry* entry = AddEntry( 250 HeapEntry::kSynthetic, 251 VisitorSynchronization::kTagNames[tag], 252 HeapObjectsMap::GetNthGcSubrootId(tag), 253 0, 254 0); 255 gc_subroot_indexes_[tag] = entry->index(); 256 return entry; 257 } 258 259 260 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type, 261 const char* name, 262 SnapshotObjectId id, 263 size_t size, 264 unsigned trace_node_id) { 265 HeapEntry entry(this, type, name, id, size, trace_node_id); 266 entries_.Add(entry); 267 return &entries_.last(); 268 } 269 270 271 void HeapSnapshot::FillChildren() { 272 ASSERT(children().is_empty()); 273 children().Allocate(edges().length()); 274 int children_index = 0; 275 for (int i = 0; i < entries().length(); ++i) { 276 HeapEntry* entry = &entries()[i]; 277 children_index = entry->set_children_index(children_index); 278 } 279 ASSERT(edges().length() == children_index); 280 for (int i = 0; i < edges().length(); ++i) { 281 HeapGraphEdge* edge = &edges()[i]; 282 edge->ReplaceToIndexWithEntry(this); 283 edge->from()->add_child(edge); 284 } 285 } 286 287 288 class FindEntryById { 289 public: 290 explicit FindEntryById(SnapshotObjectId id) : id_(id) { } 291 int operator()(HeapEntry* const* entry) { 292 if ((*entry)->id() == id_) return 0; 293 return (*entry)->id() < id_ ? -1 : 1; 294 } 295 private: 296 SnapshotObjectId id_; 297 }; 298 299 300 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) { 301 List<HeapEntry*>* entries_by_id = GetSortedEntriesList(); 302 // Perform a binary search by id. 303 int index = SortedListBSearch(*entries_by_id, FindEntryById(id)); 304 if (index == -1) 305 return NULL; 306 return entries_by_id->at(index); 307 } 308 309 310 template<class T> 311 static int SortByIds(const T* entry1_ptr, 312 const T* entry2_ptr) { 313 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0; 314 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1; 315 } 316 317 318 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() { 319 if (sorted_entries_.is_empty()) { 320 sorted_entries_.Allocate(entries_.length()); 321 for (int i = 0; i < entries_.length(); ++i) { 322 sorted_entries_[i] = &entries_[i]; 323 } 324 sorted_entries_.Sort(SortByIds); 325 } 326 return &sorted_entries_; 327 } 328 329 330 void HeapSnapshot::Print(int max_depth) { 331 root()->Print("", "", max_depth, 0); 332 } 333 334 335 size_t HeapSnapshot::RawSnapshotSize() const { 336 return 337 sizeof(*this) + 338 GetMemoryUsedByList(entries_) + 339 GetMemoryUsedByList(edges_) + 340 GetMemoryUsedByList(children_) + 341 GetMemoryUsedByList(sorted_entries_); 342 } 343 344 345 // We split IDs on evens for embedder objects (see 346 // HeapObjectsMap::GenerateId) and odds for native objects. 347 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1; 348 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId = 349 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep; 350 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId = 351 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep; 352 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId = 353 HeapObjectsMap::kGcRootsFirstSubrootId + 354 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep; 355 356 357 static bool AddressesMatch(void* key1, void* key2) { 358 return key1 == key2; 359 } 360 361 362 HeapObjectsMap::HeapObjectsMap(Heap* heap) 363 : next_id_(kFirstAvailableObjectId), 364 entries_map_(AddressesMatch), 365 heap_(heap) { 366 // This dummy element solves a problem with entries_map_. 367 // When we do lookup in HashMap we see no difference between two cases: 368 // it has an entry with NULL as the value or it has created 369 // a new entry on the fly with NULL as the default value. 370 // With such dummy element we have a guaranty that all entries_map_ entries 371 // will have the value field grater than 0. 372 // This fact is using in MoveObject method. 373 entries_.Add(EntryInfo(0, NULL, 0)); 374 } 375 376 377 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) { 378 ASSERT(to != NULL); 379 ASSERT(from != NULL); 380 if (from == to) return false; 381 void* from_value = entries_map_.Remove(from, ComputePointerHash(from)); 382 if (from_value == NULL) { 383 // It may occur that some untracked object moves to an address X and there 384 // is a tracked object at that address. In this case we should remove the 385 // entry as we know that the object has died. 386 void* to_value = entries_map_.Remove(to, ComputePointerHash(to)); 387 if (to_value != NULL) { 388 int to_entry_info_index = 389 static_cast<int>(reinterpret_cast<intptr_t>(to_value)); 390 entries_.at(to_entry_info_index).addr = NULL; 391 } 392 } else { 393 HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to), 394 true); 395 if (to_entry->value != NULL) { 396 // We found the existing entry with to address for an old object. 397 // Without this operation we will have two EntryInfo's with the same 398 // value in addr field. It is bad because later at RemoveDeadEntries 399 // one of this entry will be removed with the corresponding entries_map_ 400 // entry. 401 int to_entry_info_index = 402 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value)); 403 entries_.at(to_entry_info_index).addr = NULL; 404 } 405 int from_entry_info_index = 406 static_cast<int>(reinterpret_cast<intptr_t>(from_value)); 407 entries_.at(from_entry_info_index).addr = to; 408 // Size of an object can change during its life, so to keep information 409 // about the object in entries_ consistent, we have to adjust size when the 410 // object is migrated. 411 if (FLAG_heap_profiler_trace_objects) { 412 PrintF("Move object from %p to %p old size %6d new size %6d\n", 413 from, 414 to, 415 entries_.at(from_entry_info_index).size, 416 object_size); 417 } 418 entries_.at(from_entry_info_index).size = object_size; 419 to_entry->value = from_value; 420 } 421 return from_value != NULL; 422 } 423 424 425 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) { 426 FindOrAddEntry(addr, size, false); 427 } 428 429 430 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) { 431 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), 432 false); 433 if (entry == NULL) return 0; 434 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); 435 EntryInfo& entry_info = entries_.at(entry_index); 436 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); 437 return entry_info.id; 438 } 439 440 441 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr, 442 unsigned int size, 443 bool accessed) { 444 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); 445 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), 446 true); 447 if (entry->value != NULL) { 448 int entry_index = 449 static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); 450 EntryInfo& entry_info = entries_.at(entry_index); 451 entry_info.accessed = accessed; 452 if (FLAG_heap_profiler_trace_objects) { 453 PrintF("Update object size : %p with old size %d and new size %d\n", 454 addr, 455 entry_info.size, 456 size); 457 } 458 entry_info.size = size; 459 return entry_info.id; 460 } 461 entry->value = reinterpret_cast<void*>(entries_.length()); 462 SnapshotObjectId id = next_id_; 463 next_id_ += kObjectIdStep; 464 entries_.Add(EntryInfo(id, addr, size, accessed)); 465 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); 466 return id; 467 } 468 469 470 void HeapObjectsMap::StopHeapObjectsTracking() { 471 time_intervals_.Clear(); 472 } 473 474 475 void HeapObjectsMap::UpdateHeapObjectsMap() { 476 if (FLAG_heap_profiler_trace_objects) { 477 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", 478 entries_map_.occupancy()); 479 } 480 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask, 481 "HeapObjectsMap::UpdateHeapObjectsMap"); 482 HeapIterator iterator(heap_); 483 for (HeapObject* obj = iterator.next(); 484 obj != NULL; 485 obj = iterator.next()) { 486 FindOrAddEntry(obj->address(), obj->Size()); 487 if (FLAG_heap_profiler_trace_objects) { 488 PrintF("Update object : %p %6d. Next address is %p\n", 489 obj->address(), 490 obj->Size(), 491 obj->address() + obj->Size()); 492 } 493 } 494 RemoveDeadEntries(); 495 if (FLAG_heap_profiler_trace_objects) { 496 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", 497 entries_map_.occupancy()); 498 } 499 } 500 501 502 namespace { 503 504 505 struct HeapObjectInfo { 506 HeapObjectInfo(HeapObject* obj, int expected_size) 507 : obj(obj), 508 expected_size(expected_size) { 509 } 510 511 HeapObject* obj; 512 int expected_size; 513 514 bool IsValid() const { return expected_size == obj->Size(); } 515 516 void Print() const { 517 if (expected_size == 0) { 518 PrintF("Untracked object : %p %6d. Next address is %p\n", 519 obj->address(), 520 obj->Size(), 521 obj->address() + obj->Size()); 522 } else if (obj->Size() != expected_size) { 523 PrintF("Wrong size %6d: %p %6d. Next address is %p\n", 524 expected_size, 525 obj->address(), 526 obj->Size(), 527 obj->address() + obj->Size()); 528 } else { 529 PrintF("Good object : %p %6d. Next address is %p\n", 530 obj->address(), 531 expected_size, 532 obj->address() + obj->Size()); 533 } 534 } 535 }; 536 537 538 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) { 539 if (a->obj < b->obj) return -1; 540 if (a->obj > b->obj) return 1; 541 return 0; 542 } 543 544 545 } // namespace 546 547 548 int HeapObjectsMap::FindUntrackedObjects() { 549 List<HeapObjectInfo> heap_objects(1000); 550 551 HeapIterator iterator(heap_); 552 int untracked = 0; 553 for (HeapObject* obj = iterator.next(); 554 obj != NULL; 555 obj = iterator.next()) { 556 HashMap::Entry* entry = entries_map_.Lookup( 557 obj->address(), ComputePointerHash(obj->address()), false); 558 if (entry == NULL) { 559 ++untracked; 560 if (FLAG_heap_profiler_trace_objects) { 561 heap_objects.Add(HeapObjectInfo(obj, 0)); 562 } 563 } else { 564 int entry_index = static_cast<int>( 565 reinterpret_cast<intptr_t>(entry->value)); 566 EntryInfo& entry_info = entries_.at(entry_index); 567 if (FLAG_heap_profiler_trace_objects) { 568 heap_objects.Add(HeapObjectInfo(obj, 569 static_cast<int>(entry_info.size))); 570 if (obj->Size() != static_cast<int>(entry_info.size)) 571 ++untracked; 572 } else { 573 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size)); 574 } 575 } 576 } 577 if (FLAG_heap_profiler_trace_objects) { 578 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n", 579 entries_map_.occupancy()); 580 heap_objects.Sort(comparator); 581 int last_printed_object = -1; 582 bool print_next_object = false; 583 for (int i = 0; i < heap_objects.length(); ++i) { 584 const HeapObjectInfo& object_info = heap_objects[i]; 585 if (!object_info.IsValid()) { 586 ++untracked; 587 if (last_printed_object != i - 1) { 588 if (i > 0) { 589 PrintF("%d objects were skipped\n", i - 1 - last_printed_object); 590 heap_objects[i - 1].Print(); 591 } 592 } 593 object_info.Print(); 594 last_printed_object = i; 595 print_next_object = true; 596 } else if (print_next_object) { 597 object_info.Print(); 598 print_next_object = false; 599 last_printed_object = i; 600 } 601 } 602 if (last_printed_object < heap_objects.length() - 1) { 603 PrintF("Last %d objects were skipped\n", 604 heap_objects.length() - 1 - last_printed_object); 605 } 606 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n", 607 entries_map_.occupancy()); 608 } 609 return untracked; 610 } 611 612 613 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) { 614 UpdateHeapObjectsMap(); 615 time_intervals_.Add(TimeInterval(next_id_)); 616 int prefered_chunk_size = stream->GetChunkSize(); 617 List<v8::HeapStatsUpdate> stats_buffer; 618 ASSERT(!entries_.is_empty()); 619 EntryInfo* entry_info = &entries_.first(); 620 EntryInfo* end_entry_info = &entries_.last() + 1; 621 for (int time_interval_index = 0; 622 time_interval_index < time_intervals_.length(); 623 ++time_interval_index) { 624 TimeInterval& time_interval = time_intervals_[time_interval_index]; 625 SnapshotObjectId time_interval_id = time_interval.id; 626 uint32_t entries_size = 0; 627 EntryInfo* start_entry_info = entry_info; 628 while (entry_info < end_entry_info && entry_info->id < time_interval_id) { 629 entries_size += entry_info->size; 630 ++entry_info; 631 } 632 uint32_t entries_count = 633 static_cast<uint32_t>(entry_info - start_entry_info); 634 if (time_interval.count != entries_count || 635 time_interval.size != entries_size) { 636 stats_buffer.Add(v8::HeapStatsUpdate( 637 time_interval_index, 638 time_interval.count = entries_count, 639 time_interval.size = entries_size)); 640 if (stats_buffer.length() >= prefered_chunk_size) { 641 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( 642 &stats_buffer.first(), stats_buffer.length()); 643 if (result == OutputStream::kAbort) return last_assigned_id(); 644 stats_buffer.Clear(); 645 } 646 } 647 } 648 ASSERT(entry_info == end_entry_info); 649 if (!stats_buffer.is_empty()) { 650 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( 651 &stats_buffer.first(), stats_buffer.length()); 652 if (result == OutputStream::kAbort) return last_assigned_id(); 653 } 654 stream->EndOfStream(); 655 return last_assigned_id(); 656 } 657 658 659 void HeapObjectsMap::RemoveDeadEntries() { 660 ASSERT(entries_.length() > 0 && 661 entries_.at(0).id == 0 && 662 entries_.at(0).addr == NULL); 663 int first_free_entry = 1; 664 for (int i = 1; i < entries_.length(); ++i) { 665 EntryInfo& entry_info = entries_.at(i); 666 if (entry_info.accessed) { 667 if (first_free_entry != i) { 668 entries_.at(first_free_entry) = entry_info; 669 } 670 entries_.at(first_free_entry).accessed = false; 671 HashMap::Entry* entry = entries_map_.Lookup( 672 entry_info.addr, ComputePointerHash(entry_info.addr), false); 673 ASSERT(entry); 674 entry->value = reinterpret_cast<void*>(first_free_entry); 675 ++first_free_entry; 676 } else { 677 if (entry_info.addr) { 678 entries_map_.Remove(entry_info.addr, 679 ComputePointerHash(entry_info.addr)); 680 } 681 } 682 } 683 entries_.Rewind(first_free_entry); 684 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 == 685 entries_map_.occupancy()); 686 } 687 688 689 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) { 690 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash()); 691 const char* label = info->GetLabel(); 692 id ^= StringHasher::HashSequentialString(label, 693 static_cast<int>(strlen(label)), 694 heap_->HashSeed()); 695 intptr_t element_count = info->GetElementCount(); 696 if (element_count != -1) 697 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count), 698 v8::internal::kZeroHashSeed); 699 return id << 1; 700 } 701 702 703 size_t HeapObjectsMap::GetUsedMemorySize() const { 704 return 705 sizeof(*this) + 706 sizeof(HashMap::Entry) * entries_map_.capacity() + 707 GetMemoryUsedByList(entries_) + 708 GetMemoryUsedByList(time_intervals_); 709 } 710 711 712 HeapEntriesMap::HeapEntriesMap() 713 : entries_(HashMap::PointersMatch) { 714 } 715 716 717 int HeapEntriesMap::Map(HeapThing thing) { 718 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false); 719 if (cache_entry == NULL) return HeapEntry::kNoEntry; 720 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value)); 721 } 722 723 724 void HeapEntriesMap::Pair(HeapThing thing, int entry) { 725 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true); 726 ASSERT(cache_entry->value == NULL); 727 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry)); 728 } 729 730 731 HeapObjectsSet::HeapObjectsSet() 732 : entries_(HashMap::PointersMatch) { 733 } 734 735 736 void HeapObjectsSet::Clear() { 737 entries_.Clear(); 738 } 739 740 741 bool HeapObjectsSet::Contains(Object* obj) { 742 if (!obj->IsHeapObject()) return false; 743 HeapObject* object = HeapObject::cast(obj); 744 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL; 745 } 746 747 748 void HeapObjectsSet::Insert(Object* obj) { 749 if (!obj->IsHeapObject()) return; 750 HeapObject* object = HeapObject::cast(obj); 751 entries_.Lookup(object, HeapEntriesMap::Hash(object), true); 752 } 753 754 755 const char* HeapObjectsSet::GetTag(Object* obj) { 756 HeapObject* object = HeapObject::cast(obj); 757 HashMap::Entry* cache_entry = 758 entries_.Lookup(object, HeapEntriesMap::Hash(object), false); 759 return cache_entry != NULL 760 ? reinterpret_cast<const char*>(cache_entry->value) 761 : NULL; 762 } 763 764 765 void HeapObjectsSet::SetTag(Object* obj, const char* tag) { 766 if (!obj->IsHeapObject()) return; 767 HeapObject* object = HeapObject::cast(obj); 768 HashMap::Entry* cache_entry = 769 entries_.Lookup(object, HeapEntriesMap::Hash(object), true); 770 cache_entry->value = const_cast<char*>(tag); 771 } 772 773 774 HeapObject* const V8HeapExplorer::kInternalRootObject = 775 reinterpret_cast<HeapObject*>( 776 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId)); 777 HeapObject* const V8HeapExplorer::kGcRootsObject = 778 reinterpret_cast<HeapObject*>( 779 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId)); 780 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject = 781 reinterpret_cast<HeapObject*>( 782 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId)); 783 HeapObject* const V8HeapExplorer::kLastGcSubrootObject = 784 reinterpret_cast<HeapObject*>( 785 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId)); 786 787 788 V8HeapExplorer::V8HeapExplorer( 789 HeapSnapshot* snapshot, 790 SnapshottingProgressReportingInterface* progress, 791 v8::HeapProfiler::ObjectNameResolver* resolver) 792 : heap_(snapshot->profiler()->heap_object_map()->heap()), 793 snapshot_(snapshot), 794 names_(snapshot_->profiler()->names()), 795 heap_object_map_(snapshot_->profiler()->heap_object_map()), 796 progress_(progress), 797 filler_(NULL), 798 global_object_name_resolver_(resolver) { 799 } 800 801 802 V8HeapExplorer::~V8HeapExplorer() { 803 } 804 805 806 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) { 807 return AddEntry(reinterpret_cast<HeapObject*>(ptr)); 808 } 809 810 811 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) { 812 if (object == kInternalRootObject) { 813 snapshot_->AddRootEntry(); 814 return snapshot_->root(); 815 } else if (object == kGcRootsObject) { 816 HeapEntry* entry = snapshot_->AddGcRootsEntry(); 817 return entry; 818 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) { 819 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object)); 820 return entry; 821 } else if (object->IsJSFunction()) { 822 JSFunction* func = JSFunction::cast(object); 823 SharedFunctionInfo* shared = func->shared(); 824 const char* name = shared->bound() ? "native_bind" : 825 names_->GetName(String::cast(shared->name())); 826 return AddEntry(object, HeapEntry::kClosure, name); 827 } else if (object->IsJSRegExp()) { 828 JSRegExp* re = JSRegExp::cast(object); 829 return AddEntry(object, 830 HeapEntry::kRegExp, 831 names_->GetName(re->Pattern())); 832 } else if (object->IsJSObject()) { 833 const char* name = names_->GetName( 834 GetConstructorName(JSObject::cast(object))); 835 if (object->IsJSGlobalObject()) { 836 const char* tag = objects_tags_.GetTag(object); 837 if (tag != NULL) { 838 name = names_->GetFormatted("%s / %s", name, tag); 839 } 840 } 841 return AddEntry(object, HeapEntry::kObject, name); 842 } else if (object->IsString()) { 843 String* string = String::cast(object); 844 if (string->IsConsString()) 845 return AddEntry(object, 846 HeapEntry::kConsString, 847 "(concatenated string)"); 848 if (string->IsSlicedString()) 849 return AddEntry(object, 850 HeapEntry::kSlicedString, 851 "(sliced string)"); 852 return AddEntry(object, 853 HeapEntry::kString, 854 names_->GetName(String::cast(object))); 855 } else if (object->IsSymbol()) { 856 return AddEntry(object, HeapEntry::kSymbol, "symbol"); 857 } else if (object->IsCode()) { 858 return AddEntry(object, HeapEntry::kCode, ""); 859 } else if (object->IsSharedFunctionInfo()) { 860 String* name = String::cast(SharedFunctionInfo::cast(object)->name()); 861 return AddEntry(object, 862 HeapEntry::kCode, 863 names_->GetName(name)); 864 } else if (object->IsScript()) { 865 Object* name = Script::cast(object)->name(); 866 return AddEntry(object, 867 HeapEntry::kCode, 868 name->IsString() 869 ? names_->GetName(String::cast(name)) 870 : ""); 871 } else if (object->IsNativeContext()) { 872 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext"); 873 } else if (object->IsContext()) { 874 return AddEntry(object, HeapEntry::kObject, "system / Context"); 875 } else if (object->IsFixedArray() || 876 object->IsFixedDoubleArray() || 877 object->IsByteArray() || 878 object->IsExternalArray()) { 879 return AddEntry(object, HeapEntry::kArray, ""); 880 } else if (object->IsHeapNumber()) { 881 return AddEntry(object, HeapEntry::kHeapNumber, "number"); 882 } 883 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object)); 884 } 885 886 887 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object, 888 HeapEntry::Type type, 889 const char* name) { 890 return AddEntry(object->address(), type, name, object->Size()); 891 } 892 893 894 HeapEntry* V8HeapExplorer::AddEntry(Address address, 895 HeapEntry::Type type, 896 const char* name, 897 size_t size) { 898 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry( 899 address, static_cast<unsigned int>(size)); 900 unsigned trace_node_id = 0; 901 if (AllocationTracker* allocation_tracker = 902 snapshot_->profiler()->allocation_tracker()) { 903 trace_node_id = 904 allocation_tracker->address_to_trace()->GetTraceNodeId(address); 905 } 906 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id); 907 } 908 909 910 class SnapshotFiller { 911 public: 912 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries) 913 : snapshot_(snapshot), 914 names_(snapshot->profiler()->names()), 915 entries_(entries) { } 916 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) { 917 HeapEntry* entry = allocator->AllocateEntry(ptr); 918 entries_->Pair(ptr, entry->index()); 919 return entry; 920 } 921 HeapEntry* FindEntry(HeapThing ptr) { 922 int index = entries_->Map(ptr); 923 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL; 924 } 925 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) { 926 HeapEntry* entry = FindEntry(ptr); 927 return entry != NULL ? entry : AddEntry(ptr, allocator); 928 } 929 void SetIndexedReference(HeapGraphEdge::Type type, 930 int parent, 931 int index, 932 HeapEntry* child_entry) { 933 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 934 parent_entry->SetIndexedReference(type, index, child_entry); 935 } 936 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, 937 int parent, 938 HeapEntry* child_entry) { 939 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 940 int index = parent_entry->children_count() + 1; 941 parent_entry->SetIndexedReference(type, index, child_entry); 942 } 943 void SetNamedReference(HeapGraphEdge::Type type, 944 int parent, 945 const char* reference_name, 946 HeapEntry* child_entry) { 947 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 948 parent_entry->SetNamedReference(type, reference_name, child_entry); 949 } 950 void SetNamedAutoIndexReference(HeapGraphEdge::Type type, 951 int parent, 952 HeapEntry* child_entry) { 953 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 954 int index = parent_entry->children_count() + 1; 955 parent_entry->SetNamedReference( 956 type, 957 names_->GetName(index), 958 child_entry); 959 } 960 961 private: 962 HeapSnapshot* snapshot_; 963 StringsStorage* names_; 964 HeapEntriesMap* entries_; 965 }; 966 967 968 class GcSubrootsEnumerator : public ObjectVisitor { 969 public: 970 GcSubrootsEnumerator( 971 SnapshotFiller* filler, V8HeapExplorer* explorer) 972 : filler_(filler), 973 explorer_(explorer), 974 previous_object_count_(0), 975 object_count_(0) { 976 } 977 void VisitPointers(Object** start, Object** end) { 978 object_count_ += end - start; 979 } 980 void Synchronize(VisitorSynchronization::SyncTag tag) { 981 // Skip empty subroots. 982 if (previous_object_count_ != object_count_) { 983 previous_object_count_ = object_count_; 984 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_); 985 } 986 } 987 private: 988 SnapshotFiller* filler_; 989 V8HeapExplorer* explorer_; 990 intptr_t previous_object_count_; 991 intptr_t object_count_; 992 }; 993 994 995 void V8HeapExplorer::AddRootEntries(SnapshotFiller* filler) { 996 filler->AddEntry(kInternalRootObject, this); 997 filler->AddEntry(kGcRootsObject, this); 998 GcSubrootsEnumerator enumerator(filler, this); 999 heap_->IterateRoots(&enumerator, VISIT_ALL); 1000 } 1001 1002 1003 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) { 1004 switch (object->map()->instance_type()) { 1005 case MAP_TYPE: 1006 switch (Map::cast(object)->instance_type()) { 1007 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \ 1008 case instance_type: return "system / Map (" #Name ")"; 1009 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE) 1010 #undef MAKE_STRING_MAP_CASE 1011 default: return "system / Map"; 1012 } 1013 case CELL_TYPE: return "system / Cell"; 1014 case PROPERTY_CELL_TYPE: return "system / PropertyCell"; 1015 case FOREIGN_TYPE: return "system / Foreign"; 1016 case ODDBALL_TYPE: return "system / Oddball"; 1017 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 1018 case NAME##_TYPE: return "system / "#Name; 1019 STRUCT_LIST(MAKE_STRUCT_CASE) 1020 #undef MAKE_STRUCT_CASE 1021 default: return "system"; 1022 } 1023 } 1024 1025 1026 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) { 1027 int objects_count = 0; 1028 for (HeapObject* obj = iterator->next(); 1029 obj != NULL; 1030 obj = iterator->next()) { 1031 objects_count++; 1032 } 1033 return objects_count; 1034 } 1035 1036 1037 class IndexedReferencesExtractor : public ObjectVisitor { 1038 public: 1039 IndexedReferencesExtractor(V8HeapExplorer* generator, 1040 HeapObject* parent_obj, 1041 int parent) 1042 : generator_(generator), 1043 parent_obj_(parent_obj), 1044 parent_(parent), 1045 next_index_(0) { 1046 } 1047 void VisitCodeEntry(Address entry_address) { 1048 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); 1049 generator_->SetInternalReference(parent_obj_, parent_, "code", code); 1050 generator_->TagCodeObject(code); 1051 } 1052 void VisitPointers(Object** start, Object** end) { 1053 for (Object** p = start; p < end; p++) { 1054 ++next_index_; 1055 if (CheckVisitedAndUnmark(p)) continue; 1056 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p); 1057 } 1058 } 1059 static void MarkVisitedField(HeapObject* obj, int offset) { 1060 if (offset < 0) return; 1061 Address field = obj->address() + offset; 1062 ASSERT(Memory::Object_at(field)->IsHeapObject()); 1063 intptr_t p = reinterpret_cast<intptr_t>(Memory::Object_at(field)); 1064 ASSERT(!IsMarked(p)); 1065 intptr_t p_tagged = p | kTag; 1066 Memory::Object_at(field) = reinterpret_cast<Object*>(p_tagged); 1067 } 1068 1069 private: 1070 bool CheckVisitedAndUnmark(Object** field) { 1071 intptr_t p = reinterpret_cast<intptr_t>(*field); 1072 if (IsMarked(p)) { 1073 intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag; 1074 *field = reinterpret_cast<Object*>(p_untagged); 1075 ASSERT((*field)->IsHeapObject()); 1076 return true; 1077 } 1078 return false; 1079 } 1080 1081 static const intptr_t kTaggingMask = 3; 1082 static const intptr_t kTag = 3; 1083 1084 static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; } 1085 1086 V8HeapExplorer* generator_; 1087 HeapObject* parent_obj_; 1088 int parent_; 1089 int next_index_; 1090 }; 1091 1092 1093 bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) { 1094 if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2 1095 1096 if (obj->IsJSGlobalProxy()) { 1097 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj)); 1098 } else if (obj->IsJSArrayBuffer()) { 1099 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj)); 1100 } else if (obj->IsJSWeakSet()) { 1101 ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj)); 1102 } else if (obj->IsJSWeakMap()) { 1103 ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj)); 1104 } else if (obj->IsJSObject()) { 1105 ExtractJSObjectReferences(entry, JSObject::cast(obj)); 1106 } else if (obj->IsString()) { 1107 ExtractStringReferences(entry, String::cast(obj)); 1108 } else if (obj->IsSymbol()) { 1109 ExtractSymbolReferences(entry, Symbol::cast(obj)); 1110 } else if (obj->IsMap()) { 1111 ExtractMapReferences(entry, Map::cast(obj)); 1112 } else if (obj->IsSharedFunctionInfo()) { 1113 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj)); 1114 } else if (obj->IsScript()) { 1115 ExtractScriptReferences(entry, Script::cast(obj)); 1116 } else if (obj->IsAccessorPair()) { 1117 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj)); 1118 } else if (obj->IsCodeCache()) { 1119 ExtractCodeCacheReferences(entry, CodeCache::cast(obj)); 1120 } else if (obj->IsCode()) { 1121 ExtractCodeReferences(entry, Code::cast(obj)); 1122 } else if (obj->IsBox()) { 1123 ExtractBoxReferences(entry, Box::cast(obj)); 1124 } else if (obj->IsCell()) { 1125 ExtractCellReferences(entry, Cell::cast(obj)); 1126 } else if (obj->IsPropertyCell()) { 1127 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj)); 1128 } else if (obj->IsAllocationSite()) { 1129 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj)); 1130 } 1131 return true; 1132 } 1133 1134 1135 bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) { 1136 if (!obj->IsFixedArray()) return false; 1137 1138 if (obj->IsContext()) { 1139 ExtractContextReferences(entry, Context::cast(obj)); 1140 } else { 1141 ExtractFixedArrayReferences(entry, FixedArray::cast(obj)); 1142 } 1143 return true; 1144 } 1145 1146 1147 void V8HeapExplorer::ExtractJSGlobalProxyReferences( 1148 int entry, JSGlobalProxy* proxy) { 1149 SetInternalReference(proxy, entry, 1150 "native_context", proxy->native_context(), 1151 JSGlobalProxy::kNativeContextOffset); 1152 } 1153 1154 1155 void V8HeapExplorer::ExtractJSObjectReferences( 1156 int entry, JSObject* js_obj) { 1157 HeapObject* obj = js_obj; 1158 ExtractClosureReferences(js_obj, entry); 1159 ExtractPropertyReferences(js_obj, entry); 1160 ExtractElementReferences(js_obj, entry); 1161 ExtractInternalReferences(js_obj, entry); 1162 SetPropertyReference( 1163 obj, entry, heap_->proto_string(), js_obj->GetPrototype()); 1164 if (obj->IsJSFunction()) { 1165 JSFunction* js_fun = JSFunction::cast(js_obj); 1166 Object* proto_or_map = js_fun->prototype_or_initial_map(); 1167 if (!proto_or_map->IsTheHole()) { 1168 if (!proto_or_map->IsMap()) { 1169 SetPropertyReference( 1170 obj, entry, 1171 heap_->prototype_string(), proto_or_map, 1172 NULL, 1173 JSFunction::kPrototypeOrInitialMapOffset); 1174 } else { 1175 SetPropertyReference( 1176 obj, entry, 1177 heap_->prototype_string(), js_fun->prototype()); 1178 SetInternalReference( 1179 obj, entry, "initial_map", proto_or_map, 1180 JSFunction::kPrototypeOrInitialMapOffset); 1181 } 1182 } 1183 SharedFunctionInfo* shared_info = js_fun->shared(); 1184 // JSFunction has either bindings or literals and never both. 1185 bool bound = shared_info->bound(); 1186 TagObject(js_fun->literals_or_bindings(), 1187 bound ? "(function bindings)" : "(function literals)"); 1188 SetInternalReference(js_fun, entry, 1189 bound ? "bindings" : "literals", 1190 js_fun->literals_or_bindings(), 1191 JSFunction::kLiteralsOffset); 1192 TagObject(shared_info, "(shared function info)"); 1193 SetInternalReference(js_fun, entry, 1194 "shared", shared_info, 1195 JSFunction::kSharedFunctionInfoOffset); 1196 TagObject(js_fun->context(), "(context)"); 1197 SetInternalReference(js_fun, entry, 1198 "context", js_fun->context(), 1199 JSFunction::kContextOffset); 1200 SetWeakReference(js_fun, entry, 1201 "next_function_link", js_fun->next_function_link(), 1202 JSFunction::kNextFunctionLinkOffset); 1203 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset 1204 == JSFunction::kNonWeakFieldsEndOffset); 1205 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize 1206 == JSFunction::kSize); 1207 } else if (obj->IsGlobalObject()) { 1208 GlobalObject* global_obj = GlobalObject::cast(obj); 1209 SetInternalReference(global_obj, entry, 1210 "builtins", global_obj->builtins(), 1211 GlobalObject::kBuiltinsOffset); 1212 SetInternalReference(global_obj, entry, 1213 "native_context", global_obj->native_context(), 1214 GlobalObject::kNativeContextOffset); 1215 SetInternalReference(global_obj, entry, 1216 "global_context", global_obj->global_context(), 1217 GlobalObject::kGlobalContextOffset); 1218 SetInternalReference(global_obj, entry, 1219 "global_receiver", global_obj->global_receiver(), 1220 GlobalObject::kGlobalReceiverOffset); 1221 STATIC_ASSERT(GlobalObject::kHeaderSize - JSObject::kHeaderSize == 1222 4 * kPointerSize); 1223 } else if (obj->IsJSArrayBufferView()) { 1224 JSArrayBufferView* view = JSArrayBufferView::cast(obj); 1225 SetInternalReference(view, entry, "buffer", view->buffer(), 1226 JSArrayBufferView::kBufferOffset); 1227 SetWeakReference(view, entry, "weak_next", view->weak_next(), 1228 JSArrayBufferView::kWeakNextOffset); 1229 } 1230 TagObject(js_obj->properties(), "(object properties)"); 1231 SetInternalReference(obj, entry, 1232 "properties", js_obj->properties(), 1233 JSObject::kPropertiesOffset); 1234 TagObject(js_obj->elements(), "(object elements)"); 1235 SetInternalReference(obj, entry, 1236 "elements", js_obj->elements(), 1237 JSObject::kElementsOffset); 1238 } 1239 1240 1241 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) { 1242 if (string->IsConsString()) { 1243 ConsString* cs = ConsString::cast(string); 1244 SetInternalReference(cs, entry, "first", cs->first(), 1245 ConsString::kFirstOffset); 1246 SetInternalReference(cs, entry, "second", cs->second(), 1247 ConsString::kSecondOffset); 1248 } else if (string->IsSlicedString()) { 1249 SlicedString* ss = SlicedString::cast(string); 1250 SetInternalReference(ss, entry, "parent", ss->parent(), 1251 SlicedString::kParentOffset); 1252 } 1253 } 1254 1255 1256 void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) { 1257 SetInternalReference(symbol, entry, 1258 "name", symbol->name(), 1259 Symbol::kNameOffset); 1260 } 1261 1262 1263 void V8HeapExplorer::ExtractJSWeakCollectionReferences( 1264 int entry, JSWeakCollection* collection) { 1265 MarkAsWeakContainer(collection->table()); 1266 SetInternalReference(collection, entry, 1267 "table", collection->table(), 1268 JSWeakCollection::kTableOffset); 1269 } 1270 1271 1272 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) { 1273 if (context == context->declaration_context()) { 1274 ScopeInfo* scope_info = context->closure()->shared()->scope_info(); 1275 // Add context allocated locals. 1276 int context_locals = scope_info->ContextLocalCount(); 1277 for (int i = 0; i < context_locals; ++i) { 1278 String* local_name = scope_info->ContextLocalName(i); 1279 int idx = Context::MIN_CONTEXT_SLOTS + i; 1280 SetContextReference(context, entry, local_name, context->get(idx), 1281 Context::OffsetOfElementAt(idx)); 1282 } 1283 if (scope_info->HasFunctionName()) { 1284 String* name = scope_info->FunctionName(); 1285 VariableMode mode; 1286 int idx = scope_info->FunctionContextSlotIndex(name, &mode); 1287 if (idx >= 0) { 1288 SetContextReference(context, entry, name, context->get(idx), 1289 Context::OffsetOfElementAt(idx)); 1290 } 1291 } 1292 } 1293 1294 #define EXTRACT_CONTEXT_FIELD(index, type, name) \ 1295 if (Context::index < Context::FIRST_WEAK_SLOT || \ 1296 Context::index == Context::MAP_CACHE_INDEX) { \ 1297 SetInternalReference(context, entry, #name, context->get(Context::index), \ 1298 FixedArray::OffsetOfElementAt(Context::index)); \ 1299 } else { \ 1300 SetWeakReference(context, entry, #name, context->get(Context::index), \ 1301 FixedArray::OffsetOfElementAt(Context::index)); \ 1302 } 1303 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure); 1304 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous); 1305 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension); 1306 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global); 1307 if (context->IsNativeContext()) { 1308 TagObject(context->jsfunction_result_caches(), 1309 "(context func. result caches)"); 1310 TagObject(context->normalized_map_cache(), "(context norm. map cache)"); 1311 TagObject(context->runtime_context(), "(runtime context)"); 1312 TagObject(context->embedder_data(), "(context data)"); 1313 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD); 1314 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused, 1315 optimized_functions_list); 1316 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list); 1317 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list); 1318 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link); 1319 #undef EXTRACT_CONTEXT_FIELD 1320 STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST == 1321 Context::FIRST_WEAK_SLOT); 1322 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 == 1323 Context::NATIVE_CONTEXT_SLOTS); 1324 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 5 == 1325 Context::NATIVE_CONTEXT_SLOTS); 1326 } 1327 } 1328 1329 1330 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) { 1331 if (map->HasTransitionArray()) { 1332 TransitionArray* transitions = map->transitions(); 1333 int transitions_entry = GetEntry(transitions)->index(); 1334 Object* back_pointer = transitions->back_pointer_storage(); 1335 TagObject(back_pointer, "(back pointer)"); 1336 SetInternalReference(transitions, transitions_entry, 1337 "back_pointer", back_pointer); 1338 1339 if (FLAG_collect_maps && map->CanTransition()) { 1340 if (!transitions->IsSimpleTransition()) { 1341 if (transitions->HasPrototypeTransitions()) { 1342 FixedArray* prototype_transitions = 1343 transitions->GetPrototypeTransitions(); 1344 MarkAsWeakContainer(prototype_transitions); 1345 TagObject(prototype_transitions, "(prototype transitions"); 1346 SetInternalReference(transitions, transitions_entry, 1347 "prototype_transitions", prototype_transitions); 1348 } 1349 // TODO(alph): transitions keys are strong links. 1350 MarkAsWeakContainer(transitions); 1351 } 1352 } 1353 1354 TagObject(transitions, "(transition array)"); 1355 SetInternalReference(map, entry, 1356 "transitions", transitions, 1357 Map::kTransitionsOrBackPointerOffset); 1358 } else { 1359 Object* back_pointer = map->GetBackPointer(); 1360 TagObject(back_pointer, "(back pointer)"); 1361 SetInternalReference(map, entry, 1362 "back_pointer", back_pointer, 1363 Map::kTransitionsOrBackPointerOffset); 1364 } 1365 DescriptorArray* descriptors = map->instance_descriptors(); 1366 TagObject(descriptors, "(map descriptors)"); 1367 SetInternalReference(map, entry, 1368 "descriptors", descriptors, 1369 Map::kDescriptorsOffset); 1370 1371 MarkAsWeakContainer(map->code_cache()); 1372 SetInternalReference(map, entry, 1373 "code_cache", map->code_cache(), 1374 Map::kCodeCacheOffset); 1375 SetInternalReference(map, entry, 1376 "prototype", map->prototype(), Map::kPrototypeOffset); 1377 SetInternalReference(map, entry, 1378 "constructor", map->constructor(), 1379 Map::kConstructorOffset); 1380 TagObject(map->dependent_code(), "(dependent code)"); 1381 MarkAsWeakContainer(map->dependent_code()); 1382 SetInternalReference(map, entry, 1383 "dependent_code", map->dependent_code(), 1384 Map::kDependentCodeOffset); 1385 } 1386 1387 1388 void V8HeapExplorer::ExtractSharedFunctionInfoReferences( 1389 int entry, SharedFunctionInfo* shared) { 1390 HeapObject* obj = shared; 1391 String* shared_name = shared->DebugName(); 1392 const char* name = NULL; 1393 if (shared_name != *heap_->isolate()->factory()->empty_string()) { 1394 name = names_->GetName(shared_name); 1395 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name)); 1396 } else { 1397 TagObject(shared->code(), names_->GetFormatted("(%s code)", 1398 Code::Kind2String(shared->code()->kind()))); 1399 } 1400 1401 SetInternalReference(obj, entry, 1402 "name", shared->name(), 1403 SharedFunctionInfo::kNameOffset); 1404 SetInternalReference(obj, entry, 1405 "code", shared->code(), 1406 SharedFunctionInfo::kCodeOffset); 1407 TagObject(shared->scope_info(), "(function scope info)"); 1408 SetInternalReference(obj, entry, 1409 "scope_info", shared->scope_info(), 1410 SharedFunctionInfo::kScopeInfoOffset); 1411 SetInternalReference(obj, entry, 1412 "instance_class_name", shared->instance_class_name(), 1413 SharedFunctionInfo::kInstanceClassNameOffset); 1414 SetInternalReference(obj, entry, 1415 "script", shared->script(), 1416 SharedFunctionInfo::kScriptOffset); 1417 const char* construct_stub_name = name ? 1418 names_->GetFormatted("(construct stub code for %s)", name) : 1419 "(construct stub code)"; 1420 TagObject(shared->construct_stub(), construct_stub_name); 1421 SetInternalReference(obj, entry, 1422 "construct_stub", shared->construct_stub(), 1423 SharedFunctionInfo::kConstructStubOffset); 1424 SetInternalReference(obj, entry, 1425 "function_data", shared->function_data(), 1426 SharedFunctionInfo::kFunctionDataOffset); 1427 SetInternalReference(obj, entry, 1428 "debug_info", shared->debug_info(), 1429 SharedFunctionInfo::kDebugInfoOffset); 1430 SetInternalReference(obj, entry, 1431 "inferred_name", shared->inferred_name(), 1432 SharedFunctionInfo::kInferredNameOffset); 1433 SetInternalReference(obj, entry, 1434 "optimized_code_map", shared->optimized_code_map(), 1435 SharedFunctionInfo::kOptimizedCodeMapOffset); 1436 SetInternalReference(obj, entry, 1437 "feedback_vector", shared->feedback_vector(), 1438 SharedFunctionInfo::kFeedbackVectorOffset); 1439 } 1440 1441 1442 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) { 1443 HeapObject* obj = script; 1444 SetInternalReference(obj, entry, 1445 "source", script->source(), 1446 Script::kSourceOffset); 1447 SetInternalReference(obj, entry, 1448 "name", script->name(), 1449 Script::kNameOffset); 1450 SetInternalReference(obj, entry, 1451 "context_data", script->context_data(), 1452 Script::kContextOffset); 1453 TagObject(script->line_ends(), "(script line ends)"); 1454 SetInternalReference(obj, entry, 1455 "line_ends", script->line_ends(), 1456 Script::kLineEndsOffset); 1457 } 1458 1459 1460 void V8HeapExplorer::ExtractAccessorPairReferences( 1461 int entry, AccessorPair* accessors) { 1462 SetInternalReference(accessors, entry, "getter", accessors->getter(), 1463 AccessorPair::kGetterOffset); 1464 SetInternalReference(accessors, entry, "setter", accessors->setter(), 1465 AccessorPair::kSetterOffset); 1466 } 1467 1468 1469 void V8HeapExplorer::ExtractCodeCacheReferences( 1470 int entry, CodeCache* code_cache) { 1471 TagObject(code_cache->default_cache(), "(default code cache)"); 1472 SetInternalReference(code_cache, entry, 1473 "default_cache", code_cache->default_cache(), 1474 CodeCache::kDefaultCacheOffset); 1475 TagObject(code_cache->normal_type_cache(), "(code type cache)"); 1476 SetInternalReference(code_cache, entry, 1477 "type_cache", code_cache->normal_type_cache(), 1478 CodeCache::kNormalTypeCacheOffset); 1479 } 1480 1481 1482 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) { 1483 TagObject(code, names_->GetFormatted("(%s builtin)", name)); 1484 } 1485 1486 1487 void V8HeapExplorer::TagCodeObject(Code* code) { 1488 if (code->kind() == Code::STUB) { 1489 TagObject(code, names_->GetFormatted( 1490 "(%s code)", CodeStub::MajorName( 1491 static_cast<CodeStub::Major>(code->major_key()), true))); 1492 } 1493 } 1494 1495 1496 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) { 1497 TagCodeObject(code); 1498 TagObject(code->relocation_info(), "(code relocation info)"); 1499 SetInternalReference(code, entry, 1500 "relocation_info", code->relocation_info(), 1501 Code::kRelocationInfoOffset); 1502 SetInternalReference(code, entry, 1503 "handler_table", code->handler_table(), 1504 Code::kHandlerTableOffset); 1505 TagObject(code->deoptimization_data(), "(code deopt data)"); 1506 SetInternalReference(code, entry, 1507 "deoptimization_data", code->deoptimization_data(), 1508 Code::kDeoptimizationDataOffset); 1509 if (code->kind() == Code::FUNCTION) { 1510 SetInternalReference(code, entry, 1511 "type_feedback_info", code->type_feedback_info(), 1512 Code::kTypeFeedbackInfoOffset); 1513 } 1514 SetInternalReference(code, entry, 1515 "gc_metadata", code->gc_metadata(), 1516 Code::kGCMetadataOffset); 1517 SetInternalReference(code, entry, 1518 "constant_pool", code->constant_pool(), 1519 Code::kConstantPoolOffset); 1520 if (code->kind() == Code::OPTIMIZED_FUNCTION) { 1521 SetWeakReference(code, entry, 1522 "next_code_link", code->next_code_link(), 1523 Code::kNextCodeLinkOffset); 1524 } 1525 } 1526 1527 1528 void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) { 1529 SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset); 1530 } 1531 1532 1533 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) { 1534 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset); 1535 } 1536 1537 1538 void V8HeapExplorer::ExtractPropertyCellReferences(int entry, 1539 PropertyCell* cell) { 1540 ExtractCellReferences(entry, cell); 1541 SetInternalReference(cell, entry, "type", cell->type(), 1542 PropertyCell::kTypeOffset); 1543 MarkAsWeakContainer(cell->dependent_code()); 1544 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(), 1545 PropertyCell::kDependentCodeOffset); 1546 } 1547 1548 1549 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry, 1550 AllocationSite* site) { 1551 SetInternalReference(site, entry, "transition_info", site->transition_info(), 1552 AllocationSite::kTransitionInfoOffset); 1553 SetInternalReference(site, entry, "nested_site", site->nested_site(), 1554 AllocationSite::kNestedSiteOffset); 1555 MarkAsWeakContainer(site->dependent_code()); 1556 SetInternalReference(site, entry, "dependent_code", site->dependent_code(), 1557 AllocationSite::kDependentCodeOffset); 1558 // Do not visit weak_next as it is not visited by the StaticVisitor, 1559 // and we're not very interested in weak_next field here. 1560 STATIC_ASSERT(AllocationSite::kWeakNextOffset >= 1561 AllocationSite::BodyDescriptor::kEndOffset); 1562 } 1563 1564 1565 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator { 1566 public: 1567 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer) 1568 : size_(size) 1569 , explorer_(explorer) { 1570 } 1571 virtual HeapEntry* AllocateEntry(HeapThing ptr) { 1572 return explorer_->AddEntry( 1573 static_cast<Address>(ptr), 1574 HeapEntry::kNative, "system / JSArrayBufferData", size_); 1575 } 1576 private: 1577 size_t size_; 1578 V8HeapExplorer* explorer_; 1579 }; 1580 1581 1582 void V8HeapExplorer::ExtractJSArrayBufferReferences( 1583 int entry, JSArrayBuffer* buffer) { 1584 SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(), 1585 JSArrayBuffer::kWeakNextOffset); 1586 SetWeakReference(buffer, entry, 1587 "weak_first_view", buffer->weak_first_view(), 1588 JSArrayBuffer::kWeakFirstViewOffset); 1589 // Setup a reference to a native memory backing_store object. 1590 if (!buffer->backing_store()) 1591 return; 1592 size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length()); 1593 JSArrayBufferDataEntryAllocator allocator(data_size, this); 1594 HeapEntry* data_entry = 1595 filler_->FindOrAddEntry(buffer->backing_store(), &allocator); 1596 filler_->SetNamedReference(HeapGraphEdge::kInternal, 1597 entry, "backing_store", data_entry); 1598 } 1599 1600 1601 void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) { 1602 bool is_weak = weak_containers_.Contains(array); 1603 for (int i = 0, l = array->length(); i < l; ++i) { 1604 if (is_weak) { 1605 SetWeakReference(array, entry, 1606 i, array->get(i), array->OffsetOfElementAt(i)); 1607 } else { 1608 SetInternalReference(array, entry, 1609 i, array->get(i), array->OffsetOfElementAt(i)); 1610 } 1611 } 1612 } 1613 1614 1615 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) { 1616 if (!js_obj->IsJSFunction()) return; 1617 1618 JSFunction* func = JSFunction::cast(js_obj); 1619 if (func->shared()->bound()) { 1620 FixedArray* bindings = func->function_bindings(); 1621 SetNativeBindReference(js_obj, entry, "bound_this", 1622 bindings->get(JSFunction::kBoundThisIndex)); 1623 SetNativeBindReference(js_obj, entry, "bound_function", 1624 bindings->get(JSFunction::kBoundFunctionIndex)); 1625 for (int i = JSFunction::kBoundArgumentsStartIndex; 1626 i < bindings->length(); i++) { 1627 const char* reference_name = names_->GetFormatted( 1628 "bound_argument_%d", 1629 i - JSFunction::kBoundArgumentsStartIndex); 1630 SetNativeBindReference(js_obj, entry, reference_name, 1631 bindings->get(i)); 1632 } 1633 } 1634 } 1635 1636 1637 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) { 1638 if (js_obj->HasFastProperties()) { 1639 DescriptorArray* descs = js_obj->map()->instance_descriptors(); 1640 int real_size = js_obj->map()->NumberOfOwnDescriptors(); 1641 for (int i = 0; i < real_size; i++) { 1642 switch (descs->GetType(i)) { 1643 case FIELD: { 1644 int index = descs->GetFieldIndex(i); 1645 1646 Name* k = descs->GetKey(i); 1647 if (index < js_obj->map()->inobject_properties()) { 1648 Object* value = js_obj->InObjectPropertyAt(index); 1649 if (k != heap_->hidden_string()) { 1650 SetPropertyReference( 1651 js_obj, entry, 1652 k, value, 1653 NULL, 1654 js_obj->GetInObjectPropertyOffset(index)); 1655 } else { 1656 TagObject(value, "(hidden properties)"); 1657 SetInternalReference( 1658 js_obj, entry, 1659 "hidden_properties", value, 1660 js_obj->GetInObjectPropertyOffset(index)); 1661 } 1662 } else { 1663 FieldIndex field_index = 1664 FieldIndex::ForDescriptor(js_obj->map(), i); 1665 Object* value = js_obj->RawFastPropertyAt(field_index); 1666 if (k != heap_->hidden_string()) { 1667 SetPropertyReference(js_obj, entry, k, value); 1668 } else { 1669 TagObject(value, "(hidden properties)"); 1670 SetInternalReference(js_obj, entry, "hidden_properties", value); 1671 } 1672 } 1673 break; 1674 } 1675 case CONSTANT: 1676 SetPropertyReference( 1677 js_obj, entry, 1678 descs->GetKey(i), descs->GetConstant(i)); 1679 break; 1680 case CALLBACKS: 1681 ExtractAccessorPairProperty( 1682 js_obj, entry, 1683 descs->GetKey(i), descs->GetValue(i)); 1684 break; 1685 case NORMAL: // only in slow mode 1686 case HANDLER: // only in lookup results, not in descriptors 1687 case INTERCEPTOR: // only in lookup results, not in descriptors 1688 break; 1689 case NONEXISTENT: 1690 UNREACHABLE(); 1691 break; 1692 } 1693 } 1694 } else { 1695 NameDictionary* dictionary = js_obj->property_dictionary(); 1696 int length = dictionary->Capacity(); 1697 for (int i = 0; i < length; ++i) { 1698 Object* k = dictionary->KeyAt(i); 1699 if (dictionary->IsKey(k)) { 1700 Object* target = dictionary->ValueAt(i); 1701 // We assume that global objects can only have slow properties. 1702 Object* value = target->IsPropertyCell() 1703 ? PropertyCell::cast(target)->value() 1704 : target; 1705 if (k == heap_->hidden_string()) { 1706 TagObject(value, "(hidden properties)"); 1707 SetInternalReference(js_obj, entry, "hidden_properties", value); 1708 continue; 1709 } 1710 if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue; 1711 SetPropertyReference(js_obj, entry, String::cast(k), value); 1712 } 1713 } 1714 } 1715 } 1716 1717 1718 bool V8HeapExplorer::ExtractAccessorPairProperty( 1719 JSObject* js_obj, int entry, Object* key, Object* callback_obj) { 1720 if (!callback_obj->IsAccessorPair()) return false; 1721 AccessorPair* accessors = AccessorPair::cast(callback_obj); 1722 Object* getter = accessors->getter(); 1723 if (!getter->IsOddball()) { 1724 SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s"); 1725 } 1726 Object* setter = accessors->setter(); 1727 if (!setter->IsOddball()) { 1728 SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s"); 1729 } 1730 return true; 1731 } 1732 1733 1734 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) { 1735 if (js_obj->HasFastObjectElements()) { 1736 FixedArray* elements = FixedArray::cast(js_obj->elements()); 1737 int length = js_obj->IsJSArray() ? 1738 Smi::cast(JSArray::cast(js_obj)->length())->value() : 1739 elements->length(); 1740 for (int i = 0; i < length; ++i) { 1741 if (!elements->get(i)->IsTheHole()) { 1742 SetElementReference(js_obj, entry, i, elements->get(i)); 1743 } 1744 } 1745 } else if (js_obj->HasDictionaryElements()) { 1746 SeededNumberDictionary* dictionary = js_obj->element_dictionary(); 1747 int length = dictionary->Capacity(); 1748 for (int i = 0; i < length; ++i) { 1749 Object* k = dictionary->KeyAt(i); 1750 if (dictionary->IsKey(k)) { 1751 ASSERT(k->IsNumber()); 1752 uint32_t index = static_cast<uint32_t>(k->Number()); 1753 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i)); 1754 } 1755 } 1756 } 1757 } 1758 1759 1760 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) { 1761 int length = js_obj->GetInternalFieldCount(); 1762 for (int i = 0; i < length; ++i) { 1763 Object* o = js_obj->GetInternalField(i); 1764 SetInternalReference( 1765 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i)); 1766 } 1767 } 1768 1769 1770 String* V8HeapExplorer::GetConstructorName(JSObject* object) { 1771 Heap* heap = object->GetHeap(); 1772 if (object->IsJSFunction()) return heap->closure_string(); 1773 String* constructor_name = object->constructor_name(); 1774 if (constructor_name == heap->Object_string()) { 1775 // Look up an immediate "constructor" property, if it is a function, 1776 // return its name. This is for instances of binding objects, which 1777 // have prototype constructor type "Object". 1778 Object* constructor_prop = NULL; 1779 Isolate* isolate = heap->isolate(); 1780 LookupResult result(isolate); 1781 object->LookupOwnRealNamedProperty( 1782 isolate->factory()->constructor_string(), &result); 1783 if (!result.IsFound()) return object->constructor_name(); 1784 1785 constructor_prop = result.GetLazyValue(); 1786 if (constructor_prop->IsJSFunction()) { 1787 Object* maybe_name = 1788 JSFunction::cast(constructor_prop)->shared()->name(); 1789 if (maybe_name->IsString()) { 1790 String* name = String::cast(maybe_name); 1791 if (name->length() > 0) return name; 1792 } 1793 } 1794 } 1795 return object->constructor_name(); 1796 } 1797 1798 1799 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) { 1800 if (!obj->IsHeapObject()) return NULL; 1801 return filler_->FindOrAddEntry(obj, this); 1802 } 1803 1804 1805 class RootsReferencesExtractor : public ObjectVisitor { 1806 private: 1807 struct IndexTag { 1808 IndexTag(int index, VisitorSynchronization::SyncTag tag) 1809 : index(index), tag(tag) { } 1810 int index; 1811 VisitorSynchronization::SyncTag tag; 1812 }; 1813 1814 public: 1815 explicit RootsReferencesExtractor(Heap* heap) 1816 : collecting_all_references_(false), 1817 previous_reference_count_(0), 1818 heap_(heap) { 1819 } 1820 1821 void VisitPointers(Object** start, Object** end) { 1822 if (collecting_all_references_) { 1823 for (Object** p = start; p < end; p++) all_references_.Add(*p); 1824 } else { 1825 for (Object** p = start; p < end; p++) strong_references_.Add(*p); 1826 } 1827 } 1828 1829 void SetCollectingAllReferences() { collecting_all_references_ = true; } 1830 1831 void FillReferences(V8HeapExplorer* explorer) { 1832 ASSERT(strong_references_.length() <= all_references_.length()); 1833 Builtins* builtins = heap_->isolate()->builtins(); 1834 for (int i = 0; i < reference_tags_.length(); ++i) { 1835 explorer->SetGcRootsReference(reference_tags_[i].tag); 1836 } 1837 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0; 1838 while (all_index < all_references_.length()) { 1839 bool is_strong = strong_index < strong_references_.length() 1840 && strong_references_[strong_index] == all_references_[all_index]; 1841 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag, 1842 !is_strong, 1843 all_references_[all_index]); 1844 if (reference_tags_[tags_index].tag == 1845 VisitorSynchronization::kBuiltins) { 1846 ASSERT(all_references_[all_index]->IsCode()); 1847 explorer->TagBuiltinCodeObject( 1848 Code::cast(all_references_[all_index]), 1849 builtins->name(builtin_index++)); 1850 } 1851 ++all_index; 1852 if (is_strong) ++strong_index; 1853 if (reference_tags_[tags_index].index == all_index) ++tags_index; 1854 } 1855 } 1856 1857 void Synchronize(VisitorSynchronization::SyncTag tag) { 1858 if (collecting_all_references_ && 1859 previous_reference_count_ != all_references_.length()) { 1860 previous_reference_count_ = all_references_.length(); 1861 reference_tags_.Add(IndexTag(previous_reference_count_, tag)); 1862 } 1863 } 1864 1865 private: 1866 bool collecting_all_references_; 1867 List<Object*> strong_references_; 1868 List<Object*> all_references_; 1869 int previous_reference_count_; 1870 List<IndexTag> reference_tags_; 1871 Heap* heap_; 1872 }; 1873 1874 1875 bool V8HeapExplorer::IterateAndExtractReferences( 1876 SnapshotFiller* filler) { 1877 filler_ = filler; 1878 1879 // Make sure builtin code objects get their builtin tags 1880 // first. Otherwise a particular JSFunction object could set 1881 // its custom name to a generic builtin. 1882 SetRootGcRootsReference(); 1883 RootsReferencesExtractor extractor(heap_); 1884 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG); 1885 extractor.SetCollectingAllReferences(); 1886 heap_->IterateRoots(&extractor, VISIT_ALL); 1887 extractor.FillReferences(this); 1888 1889 // We have to do two passes as sometimes FixedArrays are used 1890 // to weakly hold their items, and it's impossible to distinguish 1891 // between these cases without processing the array owner first. 1892 bool interrupted = 1893 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() || 1894 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>(); 1895 1896 if (interrupted) { 1897 filler_ = NULL; 1898 return false; 1899 } 1900 1901 filler_ = NULL; 1902 return progress_->ProgressReport(true); 1903 } 1904 1905 1906 template<V8HeapExplorer::ExtractReferencesMethod extractor> 1907 bool V8HeapExplorer::IterateAndExtractSinglePass() { 1908 // Now iterate the whole heap. 1909 bool interrupted = false; 1910 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); 1911 // Heap iteration with filtering must be finished in any case. 1912 for (HeapObject* obj = iterator.next(); 1913 obj != NULL; 1914 obj = iterator.next(), progress_->ProgressStep()) { 1915 if (interrupted) continue; 1916 1917 HeapEntry* heap_entry = GetEntry(obj); 1918 int entry = heap_entry->index(); 1919 if ((this->*extractor)(entry, obj)) { 1920 SetInternalReference(obj, entry, 1921 "map", obj->map(), HeapObject::kMapOffset); 1922 // Extract unvisited fields as hidden references and restore tags 1923 // of visited fields. 1924 IndexedReferencesExtractor refs_extractor(this, obj, entry); 1925 obj->Iterate(&refs_extractor); 1926 } 1927 1928 if (!progress_->ProgressReport(false)) interrupted = true; 1929 } 1930 return interrupted; 1931 } 1932 1933 1934 bool V8HeapExplorer::IsEssentialObject(Object* object) { 1935 return object->IsHeapObject() 1936 && !object->IsOddball() 1937 && object != heap_->empty_byte_array() 1938 && object != heap_->empty_fixed_array() 1939 && object != heap_->empty_descriptor_array() 1940 && object != heap_->fixed_array_map() 1941 && object != heap_->cell_map() 1942 && object != heap_->global_property_cell_map() 1943 && object != heap_->shared_function_info_map() 1944 && object != heap_->free_space_map() 1945 && object != heap_->one_pointer_filler_map() 1946 && object != heap_->two_pointer_filler_map(); 1947 } 1948 1949 1950 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj, 1951 int parent_entry, 1952 String* reference_name, 1953 Object* child_obj, 1954 int field_offset) { 1955 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1956 HeapEntry* child_entry = GetEntry(child_obj); 1957 if (child_entry != NULL) { 1958 filler_->SetNamedReference(HeapGraphEdge::kContextVariable, 1959 parent_entry, 1960 names_->GetName(reference_name), 1961 child_entry); 1962 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 1963 } 1964 } 1965 1966 1967 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj, 1968 int parent_entry, 1969 const char* reference_name, 1970 Object* child_obj) { 1971 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1972 HeapEntry* child_entry = GetEntry(child_obj); 1973 if (child_entry != NULL) { 1974 filler_->SetNamedReference(HeapGraphEdge::kShortcut, 1975 parent_entry, 1976 reference_name, 1977 child_entry); 1978 } 1979 } 1980 1981 1982 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj, 1983 int parent_entry, 1984 int index, 1985 Object* child_obj) { 1986 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1987 HeapEntry* child_entry = GetEntry(child_obj); 1988 if (child_entry != NULL) { 1989 filler_->SetIndexedReference(HeapGraphEdge::kElement, 1990 parent_entry, 1991 index, 1992 child_entry); 1993 } 1994 } 1995 1996 1997 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, 1998 int parent_entry, 1999 const char* reference_name, 2000 Object* child_obj, 2001 int field_offset) { 2002 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 2003 HeapEntry* child_entry = GetEntry(child_obj); 2004 if (child_entry == NULL) return; 2005 if (IsEssentialObject(child_obj)) { 2006 filler_->SetNamedReference(HeapGraphEdge::kInternal, 2007 parent_entry, 2008 reference_name, 2009 child_entry); 2010 } 2011 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 2012 } 2013 2014 2015 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, 2016 int parent_entry, 2017 int index, 2018 Object* child_obj, 2019 int field_offset) { 2020 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 2021 HeapEntry* child_entry = GetEntry(child_obj); 2022 if (child_entry == NULL) return; 2023 if (IsEssentialObject(child_obj)) { 2024 filler_->SetNamedReference(HeapGraphEdge::kInternal, 2025 parent_entry, 2026 names_->GetName(index), 2027 child_entry); 2028 } 2029 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 2030 } 2031 2032 2033 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj, 2034 int parent_entry, 2035 int index, 2036 Object* child_obj) { 2037 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 2038 HeapEntry* child_entry = GetEntry(child_obj); 2039 if (child_entry != NULL && IsEssentialObject(child_obj)) { 2040 filler_->SetIndexedReference(HeapGraphEdge::kHidden, 2041 parent_entry, 2042 index, 2043 child_entry); 2044 } 2045 } 2046 2047 2048 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, 2049 int parent_entry, 2050 const char* reference_name, 2051 Object* child_obj, 2052 int field_offset) { 2053 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 2054 HeapEntry* child_entry = GetEntry(child_obj); 2055 if (child_entry == NULL) return; 2056 if (IsEssentialObject(child_obj)) { 2057 filler_->SetNamedReference(HeapGraphEdge::kWeak, 2058 parent_entry, 2059 reference_name, 2060 child_entry); 2061 } 2062 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 2063 } 2064 2065 2066 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, 2067 int parent_entry, 2068 int index, 2069 Object* child_obj, 2070 int field_offset) { 2071 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 2072 HeapEntry* child_entry = GetEntry(child_obj); 2073 if (child_entry == NULL) return; 2074 if (IsEssentialObject(child_obj)) { 2075 filler_->SetNamedReference(HeapGraphEdge::kWeak, 2076 parent_entry, 2077 names_->GetFormatted("%d", index), 2078 child_entry); 2079 } 2080 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 2081 } 2082 2083 2084 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj, 2085 int parent_entry, 2086 Name* reference_name, 2087 Object* child_obj, 2088 const char* name_format_string, 2089 int field_offset) { 2090 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 2091 HeapEntry* child_entry = GetEntry(child_obj); 2092 if (child_entry != NULL) { 2093 HeapGraphEdge::Type type = 2094 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0 2095 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal; 2096 const char* name = name_format_string != NULL && reference_name->IsString() 2097 ? names_->GetFormatted( 2098 name_format_string, 2099 String::cast(reference_name)->ToCString( 2100 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) : 2101 names_->GetName(reference_name); 2102 2103 filler_->SetNamedReference(type, 2104 parent_entry, 2105 name, 2106 child_entry); 2107 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 2108 } 2109 } 2110 2111 2112 void V8HeapExplorer::SetRootGcRootsReference() { 2113 filler_->SetIndexedAutoIndexReference( 2114 HeapGraphEdge::kElement, 2115 snapshot_->root()->index(), 2116 snapshot_->gc_roots()); 2117 } 2118 2119 2120 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) { 2121 HeapEntry* child_entry = GetEntry(child_obj); 2122 ASSERT(child_entry != NULL); 2123 filler_->SetNamedAutoIndexReference( 2124 HeapGraphEdge::kShortcut, 2125 snapshot_->root()->index(), 2126 child_entry); 2127 } 2128 2129 2130 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) { 2131 filler_->SetIndexedAutoIndexReference( 2132 HeapGraphEdge::kElement, 2133 snapshot_->gc_roots()->index(), 2134 snapshot_->gc_subroot(tag)); 2135 } 2136 2137 2138 void V8HeapExplorer::SetGcSubrootReference( 2139 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) { 2140 HeapEntry* child_entry = GetEntry(child_obj); 2141 if (child_entry != NULL) { 2142 const char* name = GetStrongGcSubrootName(child_obj); 2143 if (name != NULL) { 2144 filler_->SetNamedReference( 2145 HeapGraphEdge::kInternal, 2146 snapshot_->gc_subroot(tag)->index(), 2147 name, 2148 child_entry); 2149 } else { 2150 if (is_weak) { 2151 filler_->SetNamedAutoIndexReference( 2152 HeapGraphEdge::kWeak, 2153 snapshot_->gc_subroot(tag)->index(), 2154 child_entry); 2155 } else { 2156 filler_->SetIndexedAutoIndexReference( 2157 HeapGraphEdge::kElement, 2158 snapshot_->gc_subroot(tag)->index(), 2159 child_entry); 2160 } 2161 } 2162 2163 // Add a shortcut to JS global object reference at snapshot root. 2164 if (child_obj->IsNativeContext()) { 2165 Context* context = Context::cast(child_obj); 2166 GlobalObject* global = context->global_object(); 2167 if (global->IsJSGlobalObject()) { 2168 bool is_debug_object = false; 2169 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global); 2170 if (!is_debug_object && !user_roots_.Contains(global)) { 2171 user_roots_.Insert(global); 2172 SetUserGlobalReference(global); 2173 } 2174 } 2175 } 2176 } 2177 } 2178 2179 2180 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) { 2181 if (strong_gc_subroot_names_.is_empty()) { 2182 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name); 2183 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name) 2184 STRONG_ROOT_LIST(ROOT_NAME) 2185 #undef ROOT_NAME 2186 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map) 2187 STRUCT_LIST(STRUCT_MAP_NAME) 2188 #undef STRUCT_MAP_NAME 2189 #define STRING_NAME(name, str) NAME_ENTRY(name) 2190 INTERNALIZED_STRING_LIST(STRING_NAME) 2191 #undef STRING_NAME 2192 #undef NAME_ENTRY 2193 CHECK(!strong_gc_subroot_names_.is_empty()); 2194 } 2195 return strong_gc_subroot_names_.GetTag(object); 2196 } 2197 2198 2199 void V8HeapExplorer::TagObject(Object* obj, const char* tag) { 2200 if (IsEssentialObject(obj)) { 2201 HeapEntry* entry = GetEntry(obj); 2202 if (entry->name()[0] == '\0') { 2203 entry->set_name(tag); 2204 } 2205 } 2206 } 2207 2208 2209 void V8HeapExplorer::MarkAsWeakContainer(Object* object) { 2210 if (IsEssentialObject(object) && object->IsFixedArray()) { 2211 weak_containers_.Insert(object); 2212 } 2213 } 2214 2215 2216 class GlobalObjectsEnumerator : public ObjectVisitor { 2217 public: 2218 virtual void VisitPointers(Object** start, Object** end) { 2219 for (Object** p = start; p < end; p++) { 2220 if ((*p)->IsNativeContext()) { 2221 Context* context = Context::cast(*p); 2222 JSObject* proxy = context->global_proxy(); 2223 if (proxy->IsJSGlobalProxy()) { 2224 Object* global = proxy->map()->prototype(); 2225 if (global->IsJSGlobalObject()) { 2226 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global))); 2227 } 2228 } 2229 } 2230 } 2231 } 2232 int count() { return objects_.length(); } 2233 Handle<JSGlobalObject>& at(int i) { return objects_[i]; } 2234 2235 private: 2236 List<Handle<JSGlobalObject> > objects_; 2237 }; 2238 2239 2240 // Modifies heap. Must not be run during heap traversal. 2241 void V8HeapExplorer::TagGlobalObjects() { 2242 Isolate* isolate = heap_->isolate(); 2243 HandleScope scope(isolate); 2244 GlobalObjectsEnumerator enumerator; 2245 isolate->global_handles()->IterateAllRoots(&enumerator); 2246 const char** urls = NewArray<const char*>(enumerator.count()); 2247 for (int i = 0, l = enumerator.count(); i < l; ++i) { 2248 if (global_object_name_resolver_) { 2249 HandleScope scope(isolate); 2250 Handle<JSGlobalObject> global_obj = enumerator.at(i); 2251 urls[i] = global_object_name_resolver_->GetName( 2252 Utils::ToLocal(Handle<JSObject>::cast(global_obj))); 2253 } else { 2254 urls[i] = NULL; 2255 } 2256 } 2257 2258 DisallowHeapAllocation no_allocation; 2259 for (int i = 0, l = enumerator.count(); i < l; ++i) { 2260 objects_tags_.SetTag(*enumerator.at(i), urls[i]); 2261 } 2262 2263 DeleteArray(urls); 2264 } 2265 2266 2267 class GlobalHandlesExtractor : public ObjectVisitor { 2268 public: 2269 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer) 2270 : explorer_(explorer) {} 2271 virtual ~GlobalHandlesExtractor() {} 2272 virtual void VisitPointers(Object** start, Object** end) { 2273 UNREACHABLE(); 2274 } 2275 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) { 2276 explorer_->VisitSubtreeWrapper(p, class_id); 2277 } 2278 private: 2279 NativeObjectsExplorer* explorer_; 2280 }; 2281 2282 2283 class BasicHeapEntriesAllocator : public HeapEntriesAllocator { 2284 public: 2285 BasicHeapEntriesAllocator( 2286 HeapSnapshot* snapshot, 2287 HeapEntry::Type entries_type) 2288 : snapshot_(snapshot), 2289 names_(snapshot_->profiler()->names()), 2290 heap_object_map_(snapshot_->profiler()->heap_object_map()), 2291 entries_type_(entries_type) { 2292 } 2293 virtual HeapEntry* AllocateEntry(HeapThing ptr); 2294 private: 2295 HeapSnapshot* snapshot_; 2296 StringsStorage* names_; 2297 HeapObjectsMap* heap_object_map_; 2298 HeapEntry::Type entries_type_; 2299 }; 2300 2301 2302 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) { 2303 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr); 2304 intptr_t elements = info->GetElementCount(); 2305 intptr_t size = info->GetSizeInBytes(); 2306 const char* name = elements != -1 2307 ? names_->GetFormatted( 2308 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements) 2309 : names_->GetCopy(info->GetLabel()); 2310 return snapshot_->AddEntry( 2311 entries_type_, 2312 name, 2313 heap_object_map_->GenerateId(info), 2314 size != -1 ? static_cast<int>(size) : 0, 2315 0); 2316 } 2317 2318 2319 NativeObjectsExplorer::NativeObjectsExplorer( 2320 HeapSnapshot* snapshot, 2321 SnapshottingProgressReportingInterface* progress) 2322 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()), 2323 snapshot_(snapshot), 2324 names_(snapshot_->profiler()->names()), 2325 progress_(progress), 2326 embedder_queried_(false), 2327 objects_by_info_(RetainedInfosMatch), 2328 native_groups_(StringsMatch), 2329 filler_(NULL) { 2330 synthetic_entries_allocator_ = 2331 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic); 2332 native_entries_allocator_ = 2333 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative); 2334 } 2335 2336 2337 NativeObjectsExplorer::~NativeObjectsExplorer() { 2338 for (HashMap::Entry* p = objects_by_info_.Start(); 2339 p != NULL; 2340 p = objects_by_info_.Next(p)) { 2341 v8::RetainedObjectInfo* info = 2342 reinterpret_cast<v8::RetainedObjectInfo*>(p->key); 2343 info->Dispose(); 2344 List<HeapObject*>* objects = 2345 reinterpret_cast<List<HeapObject*>* >(p->value); 2346 delete objects; 2347 } 2348 for (HashMap::Entry* p = native_groups_.Start(); 2349 p != NULL; 2350 p = native_groups_.Next(p)) { 2351 v8::RetainedObjectInfo* info = 2352 reinterpret_cast<v8::RetainedObjectInfo*>(p->value); 2353 info->Dispose(); 2354 } 2355 delete synthetic_entries_allocator_; 2356 delete native_entries_allocator_; 2357 } 2358 2359 2360 int NativeObjectsExplorer::EstimateObjectsCount() { 2361 FillRetainedObjects(); 2362 return objects_by_info_.occupancy(); 2363 } 2364 2365 2366 void NativeObjectsExplorer::FillRetainedObjects() { 2367 if (embedder_queried_) return; 2368 Isolate* isolate = isolate_; 2369 const GCType major_gc_type = kGCTypeMarkSweepCompact; 2370 // Record objects that are joined into ObjectGroups. 2371 isolate->heap()->CallGCPrologueCallbacks( 2372 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos); 2373 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups(); 2374 for (int i = 0; i < groups->length(); ++i) { 2375 ObjectGroup* group = groups->at(i); 2376 if (group->info == NULL) continue; 2377 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info); 2378 for (size_t j = 0; j < group->length; ++j) { 2379 HeapObject* obj = HeapObject::cast(*group->objects[j]); 2380 list->Add(obj); 2381 in_groups_.Insert(obj); 2382 } 2383 group->info = NULL; // Acquire info object ownership. 2384 } 2385 isolate->global_handles()->RemoveObjectGroups(); 2386 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags); 2387 // Record objects that are not in ObjectGroups, but have class ID. 2388 GlobalHandlesExtractor extractor(this); 2389 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor); 2390 embedder_queried_ = true; 2391 } 2392 2393 2394 void NativeObjectsExplorer::FillImplicitReferences() { 2395 Isolate* isolate = isolate_; 2396 List<ImplicitRefGroup*>* groups = 2397 isolate->global_handles()->implicit_ref_groups(); 2398 for (int i = 0; i < groups->length(); ++i) { 2399 ImplicitRefGroup* group = groups->at(i); 2400 HeapObject* parent = *group->parent; 2401 int parent_entry = 2402 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index(); 2403 ASSERT(parent_entry != HeapEntry::kNoEntry); 2404 Object*** children = group->children; 2405 for (size_t j = 0; j < group->length; ++j) { 2406 Object* child = *children[j]; 2407 HeapEntry* child_entry = 2408 filler_->FindOrAddEntry(child, native_entries_allocator_); 2409 filler_->SetNamedReference( 2410 HeapGraphEdge::kInternal, 2411 parent_entry, 2412 "native", 2413 child_entry); 2414 } 2415 } 2416 isolate->global_handles()->RemoveImplicitRefGroups(); 2417 } 2418 2419 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo( 2420 v8::RetainedObjectInfo* info) { 2421 HashMap::Entry* entry = 2422 objects_by_info_.Lookup(info, InfoHash(info), true); 2423 if (entry->value != NULL) { 2424 info->Dispose(); 2425 } else { 2426 entry->value = new List<HeapObject*>(4); 2427 } 2428 return reinterpret_cast<List<HeapObject*>* >(entry->value); 2429 } 2430 2431 2432 bool NativeObjectsExplorer::IterateAndExtractReferences( 2433 SnapshotFiller* filler) { 2434 filler_ = filler; 2435 FillRetainedObjects(); 2436 FillImplicitReferences(); 2437 if (EstimateObjectsCount() > 0) { 2438 for (HashMap::Entry* p = objects_by_info_.Start(); 2439 p != NULL; 2440 p = objects_by_info_.Next(p)) { 2441 v8::RetainedObjectInfo* info = 2442 reinterpret_cast<v8::RetainedObjectInfo*>(p->key); 2443 SetNativeRootReference(info); 2444 List<HeapObject*>* objects = 2445 reinterpret_cast<List<HeapObject*>* >(p->value); 2446 for (int i = 0; i < objects->length(); ++i) { 2447 SetWrapperNativeReferences(objects->at(i), info); 2448 } 2449 } 2450 SetRootNativeRootsReference(); 2451 } 2452 filler_ = NULL; 2453 return true; 2454 } 2455 2456 2457 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo { 2458 public: 2459 explicit NativeGroupRetainedObjectInfo(const char* label) 2460 : disposed_(false), 2461 hash_(reinterpret_cast<intptr_t>(label)), 2462 label_(label) { 2463 } 2464 2465 virtual ~NativeGroupRetainedObjectInfo() {} 2466 virtual void Dispose() { 2467 CHECK(!disposed_); 2468 disposed_ = true; 2469 delete this; 2470 } 2471 virtual bool IsEquivalent(RetainedObjectInfo* other) { 2472 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel()); 2473 } 2474 virtual intptr_t GetHash() { return hash_; } 2475 virtual const char* GetLabel() { return label_; } 2476 2477 private: 2478 bool disposed_; 2479 intptr_t hash_; 2480 const char* label_; 2481 }; 2482 2483 2484 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo( 2485 const char* label) { 2486 const char* label_copy = names_->GetCopy(label); 2487 uint32_t hash = StringHasher::HashSequentialString( 2488 label_copy, 2489 static_cast<int>(strlen(label_copy)), 2490 isolate_->heap()->HashSeed()); 2491 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy), 2492 hash, true); 2493 if (entry->value == NULL) { 2494 entry->value = new NativeGroupRetainedObjectInfo(label); 2495 } 2496 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value); 2497 } 2498 2499 2500 void NativeObjectsExplorer::SetNativeRootReference( 2501 v8::RetainedObjectInfo* info) { 2502 HeapEntry* child_entry = 2503 filler_->FindOrAddEntry(info, native_entries_allocator_); 2504 ASSERT(child_entry != NULL); 2505 NativeGroupRetainedObjectInfo* group_info = 2506 FindOrAddGroupInfo(info->GetGroupLabel()); 2507 HeapEntry* group_entry = 2508 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_); 2509 filler_->SetNamedAutoIndexReference( 2510 HeapGraphEdge::kInternal, 2511 group_entry->index(), 2512 child_entry); 2513 } 2514 2515 2516 void NativeObjectsExplorer::SetWrapperNativeReferences( 2517 HeapObject* wrapper, v8::RetainedObjectInfo* info) { 2518 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper); 2519 ASSERT(wrapper_entry != NULL); 2520 HeapEntry* info_entry = 2521 filler_->FindOrAddEntry(info, native_entries_allocator_); 2522 ASSERT(info_entry != NULL); 2523 filler_->SetNamedReference(HeapGraphEdge::kInternal, 2524 wrapper_entry->index(), 2525 "native", 2526 info_entry); 2527 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, 2528 info_entry->index(), 2529 wrapper_entry); 2530 } 2531 2532 2533 void NativeObjectsExplorer::SetRootNativeRootsReference() { 2534 for (HashMap::Entry* entry = native_groups_.Start(); 2535 entry; 2536 entry = native_groups_.Next(entry)) { 2537 NativeGroupRetainedObjectInfo* group_info = 2538 static_cast<NativeGroupRetainedObjectInfo*>(entry->value); 2539 HeapEntry* group_entry = 2540 filler_->FindOrAddEntry(group_info, native_entries_allocator_); 2541 ASSERT(group_entry != NULL); 2542 filler_->SetIndexedAutoIndexReference( 2543 HeapGraphEdge::kElement, 2544 snapshot_->root()->index(), 2545 group_entry); 2546 } 2547 } 2548 2549 2550 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) { 2551 if (in_groups_.Contains(*p)) return; 2552 Isolate* isolate = isolate_; 2553 v8::RetainedObjectInfo* info = 2554 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p); 2555 if (info == NULL) return; 2556 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p)); 2557 } 2558 2559 2560 HeapSnapshotGenerator::HeapSnapshotGenerator( 2561 HeapSnapshot* snapshot, 2562 v8::ActivityControl* control, 2563 v8::HeapProfiler::ObjectNameResolver* resolver, 2564 Heap* heap) 2565 : snapshot_(snapshot), 2566 control_(control), 2567 v8_heap_explorer_(snapshot_, this, resolver), 2568 dom_explorer_(snapshot_, this), 2569 heap_(heap) { 2570 } 2571 2572 2573 bool HeapSnapshotGenerator::GenerateSnapshot() { 2574 v8_heap_explorer_.TagGlobalObjects(); 2575 2576 // TODO(1562) Profiler assumes that any object that is in the heap after 2577 // full GC is reachable from the root when computing dominators. 2578 // This is not true for weakly reachable objects. 2579 // As a temporary solution we call GC twice. 2580 heap_->CollectAllGarbage( 2581 Heap::kMakeHeapIterableMask, 2582 "HeapSnapshotGenerator::GenerateSnapshot"); 2583 heap_->CollectAllGarbage( 2584 Heap::kMakeHeapIterableMask, 2585 "HeapSnapshotGenerator::GenerateSnapshot"); 2586 2587 #ifdef VERIFY_HEAP 2588 Heap* debug_heap = heap_; 2589 CHECK(!debug_heap->old_data_space()->was_swept_conservatively()); 2590 CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively()); 2591 CHECK(!debug_heap->code_space()->was_swept_conservatively()); 2592 CHECK(!debug_heap->cell_space()->was_swept_conservatively()); 2593 CHECK(!debug_heap->property_cell_space()-> 2594 was_swept_conservatively()); 2595 CHECK(!debug_heap->map_space()->was_swept_conservatively()); 2596 #endif 2597 2598 #ifdef VERIFY_HEAP 2599 debug_heap->Verify(); 2600 #endif 2601 2602 SetProgressTotal(2); // 2 passes. 2603 2604 #ifdef VERIFY_HEAP 2605 debug_heap->Verify(); 2606 #endif 2607 2608 if (!FillReferences()) return false; 2609 2610 snapshot_->FillChildren(); 2611 snapshot_->RememberLastJSObjectId(); 2612 2613 progress_counter_ = progress_total_; 2614 if (!ProgressReport(true)) return false; 2615 return true; 2616 } 2617 2618 2619 void HeapSnapshotGenerator::ProgressStep() { 2620 ++progress_counter_; 2621 } 2622 2623 2624 bool HeapSnapshotGenerator::ProgressReport(bool force) { 2625 const int kProgressReportGranularity = 10000; 2626 if (control_ != NULL 2627 && (force || progress_counter_ % kProgressReportGranularity == 0)) { 2628 return 2629 control_->ReportProgressValue(progress_counter_, progress_total_) == 2630 v8::ActivityControl::kContinue; 2631 } 2632 return true; 2633 } 2634 2635 2636 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) { 2637 if (control_ == NULL) return; 2638 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); 2639 progress_total_ = iterations_count * ( 2640 v8_heap_explorer_.EstimateObjectsCount(&iterator) + 2641 dom_explorer_.EstimateObjectsCount()); 2642 progress_counter_ = 0; 2643 } 2644 2645 2646 bool HeapSnapshotGenerator::FillReferences() { 2647 SnapshotFiller filler(snapshot_, &entries_); 2648 v8_heap_explorer_.AddRootEntries(&filler); 2649 return v8_heap_explorer_.IterateAndExtractReferences(&filler) 2650 && dom_explorer_.IterateAndExtractReferences(&filler); 2651 } 2652 2653 2654 template<int bytes> struct MaxDecimalDigitsIn; 2655 template<> struct MaxDecimalDigitsIn<4> { 2656 static const int kSigned = 11; 2657 static const int kUnsigned = 10; 2658 }; 2659 template<> struct MaxDecimalDigitsIn<8> { 2660 static const int kSigned = 20; 2661 static const int kUnsigned = 20; 2662 }; 2663 2664 2665 class OutputStreamWriter { 2666 public: 2667 explicit OutputStreamWriter(v8::OutputStream* stream) 2668 : stream_(stream), 2669 chunk_size_(stream->GetChunkSize()), 2670 chunk_(chunk_size_), 2671 chunk_pos_(0), 2672 aborted_(false) { 2673 ASSERT(chunk_size_ > 0); 2674 } 2675 bool aborted() { return aborted_; } 2676 void AddCharacter(char c) { 2677 ASSERT(c != '\0'); 2678 ASSERT(chunk_pos_ < chunk_size_); 2679 chunk_[chunk_pos_++] = c; 2680 MaybeWriteChunk(); 2681 } 2682 void AddString(const char* s) { 2683 AddSubstring(s, StrLength(s)); 2684 } 2685 void AddSubstring(const char* s, int n) { 2686 if (n <= 0) return; 2687 ASSERT(static_cast<size_t>(n) <= strlen(s)); 2688 const char* s_end = s + n; 2689 while (s < s_end) { 2690 int s_chunk_size = 2691 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s)); 2692 ASSERT(s_chunk_size > 0); 2693 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size); 2694 s += s_chunk_size; 2695 chunk_pos_ += s_chunk_size; 2696 MaybeWriteChunk(); 2697 } 2698 } 2699 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); } 2700 void Finalize() { 2701 if (aborted_) return; 2702 ASSERT(chunk_pos_ < chunk_size_); 2703 if (chunk_pos_ != 0) { 2704 WriteChunk(); 2705 } 2706 stream_->EndOfStream(); 2707 } 2708 2709 private: 2710 template<typename T> 2711 void AddNumberImpl(T n, const char* format) { 2712 // Buffer for the longest value plus trailing \0 2713 static const int kMaxNumberSize = 2714 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1; 2715 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) { 2716 int result = SNPrintF( 2717 chunk_.SubVector(chunk_pos_, chunk_size_), format, n); 2718 ASSERT(result != -1); 2719 chunk_pos_ += result; 2720 MaybeWriteChunk(); 2721 } else { 2722 EmbeddedVector<char, kMaxNumberSize> buffer; 2723 int result = SNPrintF(buffer, format, n); 2724 USE(result); 2725 ASSERT(result != -1); 2726 AddString(buffer.start()); 2727 } 2728 } 2729 void MaybeWriteChunk() { 2730 ASSERT(chunk_pos_ <= chunk_size_); 2731 if (chunk_pos_ == chunk_size_) { 2732 WriteChunk(); 2733 } 2734 } 2735 void WriteChunk() { 2736 if (aborted_) return; 2737 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) == 2738 v8::OutputStream::kAbort) aborted_ = true; 2739 chunk_pos_ = 0; 2740 } 2741 2742 v8::OutputStream* stream_; 2743 int chunk_size_; 2744 ScopedVector<char> chunk_; 2745 int chunk_pos_; 2746 bool aborted_; 2747 }; 2748 2749 2750 // type, name|index, to_node. 2751 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3; 2752 // type, name, id, self_size, edge_count, trace_node_id. 2753 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6; 2754 2755 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) { 2756 if (AllocationTracker* allocation_tracker = 2757 snapshot_->profiler()->allocation_tracker()) { 2758 allocation_tracker->PrepareForSerialization(); 2759 } 2760 ASSERT(writer_ == NULL); 2761 writer_ = new OutputStreamWriter(stream); 2762 SerializeImpl(); 2763 delete writer_; 2764 writer_ = NULL; 2765 } 2766 2767 2768 void HeapSnapshotJSONSerializer::SerializeImpl() { 2769 ASSERT(0 == snapshot_->root()->index()); 2770 writer_->AddCharacter('{'); 2771 writer_->AddString("\"snapshot\":{"); 2772 SerializeSnapshot(); 2773 if (writer_->aborted()) return; 2774 writer_->AddString("},\n"); 2775 writer_->AddString("\"nodes\":["); 2776 SerializeNodes(); 2777 if (writer_->aborted()) return; 2778 writer_->AddString("],\n"); 2779 writer_->AddString("\"edges\":["); 2780 SerializeEdges(); 2781 if (writer_->aborted()) return; 2782 writer_->AddString("],\n"); 2783 2784 writer_->AddString("\"trace_function_infos\":["); 2785 SerializeTraceNodeInfos(); 2786 if (writer_->aborted()) return; 2787 writer_->AddString("],\n"); 2788 writer_->AddString("\"trace_tree\":["); 2789 SerializeTraceTree(); 2790 if (writer_->aborted()) return; 2791 writer_->AddString("],\n"); 2792 2793 writer_->AddString("\"strings\":["); 2794 SerializeStrings(); 2795 if (writer_->aborted()) return; 2796 writer_->AddCharacter(']'); 2797 writer_->AddCharacter('}'); 2798 writer_->Finalize(); 2799 } 2800 2801 2802 int HeapSnapshotJSONSerializer::GetStringId(const char* s) { 2803 HashMap::Entry* cache_entry = strings_.Lookup( 2804 const_cast<char*>(s), StringHash(s), true); 2805 if (cache_entry->value == NULL) { 2806 cache_entry->value = reinterpret_cast<void*>(next_string_id_++); 2807 } 2808 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value)); 2809 } 2810 2811 2812 namespace { 2813 2814 template<size_t size> struct ToUnsigned; 2815 2816 template<> struct ToUnsigned<4> { 2817 typedef uint32_t Type; 2818 }; 2819 2820 template<> struct ToUnsigned<8> { 2821 typedef uint64_t Type; 2822 }; 2823 2824 } // namespace 2825 2826 2827 template<typename T> 2828 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) { 2829 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned 2830 int number_of_digits = 0; 2831 T t = value; 2832 do { 2833 ++number_of_digits; 2834 } while (t /= 10); 2835 2836 buffer_pos += number_of_digits; 2837 int result = buffer_pos; 2838 do { 2839 int last_digit = static_cast<int>(value % 10); 2840 buffer[--buffer_pos] = '0' + last_digit; 2841 value /= 10; 2842 } while (value); 2843 return result; 2844 } 2845 2846 2847 template<typename T> 2848 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) { 2849 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value; 2850 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value)); 2851 return utoa_impl(unsigned_value, buffer, buffer_pos); 2852 } 2853 2854 2855 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge, 2856 bool first_edge) { 2857 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0 2858 static const int kBufferSize = 2859 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT 2860 EmbeddedVector<char, kBufferSize> buffer; 2861 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement 2862 || edge->type() == HeapGraphEdge::kHidden 2863 ? edge->index() : GetStringId(edge->name()); 2864 int buffer_pos = 0; 2865 if (!first_edge) { 2866 buffer[buffer_pos++] = ','; 2867 } 2868 buffer_pos = utoa(edge->type(), buffer, buffer_pos); 2869 buffer[buffer_pos++] = ','; 2870 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos); 2871 buffer[buffer_pos++] = ','; 2872 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos); 2873 buffer[buffer_pos++] = '\n'; 2874 buffer[buffer_pos++] = '\0'; 2875 writer_->AddString(buffer.start()); 2876 } 2877 2878 2879 void HeapSnapshotJSONSerializer::SerializeEdges() { 2880 List<HeapGraphEdge*>& edges = snapshot_->children(); 2881 for (int i = 0; i < edges.length(); ++i) { 2882 ASSERT(i == 0 || 2883 edges[i - 1]->from()->index() <= edges[i]->from()->index()); 2884 SerializeEdge(edges[i], i == 0); 2885 if (writer_->aborted()) return; 2886 } 2887 } 2888 2889 2890 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) { 2891 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0 2892 static const int kBufferSize = 2893 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT 2894 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT 2895 + 6 + 1 + 1; 2896 EmbeddedVector<char, kBufferSize> buffer; 2897 int buffer_pos = 0; 2898 if (entry_index(entry) != 0) { 2899 buffer[buffer_pos++] = ','; 2900 } 2901 buffer_pos = utoa(entry->type(), buffer, buffer_pos); 2902 buffer[buffer_pos++] = ','; 2903 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos); 2904 buffer[buffer_pos++] = ','; 2905 buffer_pos = utoa(entry->id(), buffer, buffer_pos); 2906 buffer[buffer_pos++] = ','; 2907 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos); 2908 buffer[buffer_pos++] = ','; 2909 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos); 2910 buffer[buffer_pos++] = ','; 2911 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos); 2912 buffer[buffer_pos++] = '\n'; 2913 buffer[buffer_pos++] = '\0'; 2914 writer_->AddString(buffer.start()); 2915 } 2916 2917 2918 void HeapSnapshotJSONSerializer::SerializeNodes() { 2919 List<HeapEntry>& entries = snapshot_->entries(); 2920 for (int i = 0; i < entries.length(); ++i) { 2921 SerializeNode(&entries[i]); 2922 if (writer_->aborted()) return; 2923 } 2924 } 2925 2926 2927 void HeapSnapshotJSONSerializer::SerializeSnapshot() { 2928 writer_->AddString("\"title\":\""); 2929 writer_->AddString(snapshot_->title()); 2930 writer_->AddString("\""); 2931 writer_->AddString(",\"uid\":"); 2932 writer_->AddNumber(snapshot_->uid()); 2933 writer_->AddString(",\"meta\":"); 2934 // The object describing node serialization layout. 2935 // We use a set of macros to improve readability. 2936 #define JSON_A(s) "[" s "]" 2937 #define JSON_O(s) "{" s "}" 2938 #define JSON_S(s) "\"" s "\"" 2939 writer_->AddString(JSON_O( 2940 JSON_S("node_fields") ":" JSON_A( 2941 JSON_S("type") "," 2942 JSON_S("name") "," 2943 JSON_S("id") "," 2944 JSON_S("self_size") "," 2945 JSON_S("edge_count") "," 2946 JSON_S("trace_node_id")) "," 2947 JSON_S("node_types") ":" JSON_A( 2948 JSON_A( 2949 JSON_S("hidden") "," 2950 JSON_S("array") "," 2951 JSON_S("string") "," 2952 JSON_S("object") "," 2953 JSON_S("code") "," 2954 JSON_S("closure") "," 2955 JSON_S("regexp") "," 2956 JSON_S("number") "," 2957 JSON_S("native") "," 2958 JSON_S("synthetic") "," 2959 JSON_S("concatenated string") "," 2960 JSON_S("sliced string")) "," 2961 JSON_S("string") "," 2962 JSON_S("number") "," 2963 JSON_S("number") "," 2964 JSON_S("number") "," 2965 JSON_S("number") "," 2966 JSON_S("number")) "," 2967 JSON_S("edge_fields") ":" JSON_A( 2968 JSON_S("type") "," 2969 JSON_S("name_or_index") "," 2970 JSON_S("to_node")) "," 2971 JSON_S("edge_types") ":" JSON_A( 2972 JSON_A( 2973 JSON_S("context") "," 2974 JSON_S("element") "," 2975 JSON_S("property") "," 2976 JSON_S("internal") "," 2977 JSON_S("hidden") "," 2978 JSON_S("shortcut") "," 2979 JSON_S("weak")) "," 2980 JSON_S("string_or_number") "," 2981 JSON_S("node")) "," 2982 JSON_S("trace_function_info_fields") ":" JSON_A( 2983 JSON_S("function_id") "," 2984 JSON_S("name") "," 2985 JSON_S("script_name") "," 2986 JSON_S("script_id") "," 2987 JSON_S("line") "," 2988 JSON_S("column")) "," 2989 JSON_S("trace_node_fields") ":" JSON_A( 2990 JSON_S("id") "," 2991 JSON_S("function_info_index") "," 2992 JSON_S("count") "," 2993 JSON_S("size") "," 2994 JSON_S("children")))); 2995 #undef JSON_S 2996 #undef JSON_O 2997 #undef JSON_A 2998 writer_->AddString(",\"node_count\":"); 2999 writer_->AddNumber(snapshot_->entries().length()); 3000 writer_->AddString(",\"edge_count\":"); 3001 writer_->AddNumber(snapshot_->edges().length()); 3002 writer_->AddString(",\"trace_function_count\":"); 3003 uint32_t count = 0; 3004 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); 3005 if (tracker) { 3006 count = tracker->function_info_list().length(); 3007 } 3008 writer_->AddNumber(count); 3009 } 3010 3011 3012 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) { 3013 static const char hex_chars[] = "0123456789ABCDEF"; 3014 w->AddString("\\u"); 3015 w->AddCharacter(hex_chars[(u >> 12) & 0xf]); 3016 w->AddCharacter(hex_chars[(u >> 8) & 0xf]); 3017 w->AddCharacter(hex_chars[(u >> 4) & 0xf]); 3018 w->AddCharacter(hex_chars[u & 0xf]); 3019 } 3020 3021 3022 void HeapSnapshotJSONSerializer::SerializeTraceTree() { 3023 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); 3024 if (!tracker) return; 3025 AllocationTraceTree* traces = tracker->trace_tree(); 3026 SerializeTraceNode(traces->root()); 3027 } 3028 3029 3030 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) { 3031 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0 3032 const int kBufferSize = 3033 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT 3034 + 4 + 1 + 1; 3035 EmbeddedVector<char, kBufferSize> buffer; 3036 int buffer_pos = 0; 3037 buffer_pos = utoa(node->id(), buffer, buffer_pos); 3038 buffer[buffer_pos++] = ','; 3039 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos); 3040 buffer[buffer_pos++] = ','; 3041 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos); 3042 buffer[buffer_pos++] = ','; 3043 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos); 3044 buffer[buffer_pos++] = ','; 3045 buffer[buffer_pos++] = '['; 3046 buffer[buffer_pos++] = '\0'; 3047 writer_->AddString(buffer.start()); 3048 3049 Vector<AllocationTraceNode*> children = node->children(); 3050 for (int i = 0; i < children.length(); i++) { 3051 if (i > 0) { 3052 writer_->AddCharacter(','); 3053 } 3054 SerializeTraceNode(children[i]); 3055 } 3056 writer_->AddCharacter(']'); 3057 } 3058 3059 3060 // 0-based position is converted to 1-based during the serialization. 3061 static int SerializePosition(int position, const Vector<char>& buffer, 3062 int buffer_pos) { 3063 if (position == -1) { 3064 buffer[buffer_pos++] = '0'; 3065 } else { 3066 ASSERT(position >= 0); 3067 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos); 3068 } 3069 return buffer_pos; 3070 } 3071 3072 3073 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() { 3074 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker(); 3075 if (!tracker) return; 3076 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0 3077 const int kBufferSize = 3078 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT 3079 + 6 + 1 + 1; 3080 EmbeddedVector<char, kBufferSize> buffer; 3081 const List<AllocationTracker::FunctionInfo*>& list = 3082 tracker->function_info_list(); 3083 bool first_entry = true; 3084 for (int i = 0; i < list.length(); i++) { 3085 AllocationTracker::FunctionInfo* info = list[i]; 3086 int buffer_pos = 0; 3087 if (first_entry) { 3088 first_entry = false; 3089 } else { 3090 buffer[buffer_pos++] = ','; 3091 } 3092 buffer_pos = utoa(info->function_id, buffer, buffer_pos); 3093 buffer[buffer_pos++] = ','; 3094 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos); 3095 buffer[buffer_pos++] = ','; 3096 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos); 3097 buffer[buffer_pos++] = ','; 3098 // The cast is safe because script id is a non-negative Smi. 3099 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer, 3100 buffer_pos); 3101 buffer[buffer_pos++] = ','; 3102 buffer_pos = SerializePosition(info->line, buffer, buffer_pos); 3103 buffer[buffer_pos++] = ','; 3104 buffer_pos = SerializePosition(info->column, buffer, buffer_pos); 3105 buffer[buffer_pos++] = '\n'; 3106 buffer[buffer_pos++] = '\0'; 3107 writer_->AddString(buffer.start()); 3108 } 3109 } 3110 3111 3112 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) { 3113 writer_->AddCharacter('\n'); 3114 writer_->AddCharacter('\"'); 3115 for ( ; *s != '\0'; ++s) { 3116 switch (*s) { 3117 case '\b': 3118 writer_->AddString("\\b"); 3119 continue; 3120 case '\f': 3121 writer_->AddString("\\f"); 3122 continue; 3123 case '\n': 3124 writer_->AddString("\\n"); 3125 continue; 3126 case '\r': 3127 writer_->AddString("\\r"); 3128 continue; 3129 case '\t': 3130 writer_->AddString("\\t"); 3131 continue; 3132 case '\"': 3133 case '\\': 3134 writer_->AddCharacter('\\'); 3135 writer_->AddCharacter(*s); 3136 continue; 3137 default: 3138 if (*s > 31 && *s < 128) { 3139 writer_->AddCharacter(*s); 3140 } else if (*s <= 31) { 3141 // Special character with no dedicated literal. 3142 WriteUChar(writer_, *s); 3143 } else { 3144 // Convert UTF-8 into \u UTF-16 literal. 3145 unsigned length = 1, cursor = 0; 3146 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { } 3147 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor); 3148 if (c != unibrow::Utf8::kBadChar) { 3149 WriteUChar(writer_, c); 3150 ASSERT(cursor != 0); 3151 s += cursor - 1; 3152 } else { 3153 writer_->AddCharacter('?'); 3154 } 3155 } 3156 } 3157 } 3158 writer_->AddCharacter('\"'); 3159 } 3160 3161 3162 void HeapSnapshotJSONSerializer::SerializeStrings() { 3163 ScopedVector<const unsigned char*> sorted_strings( 3164 strings_.occupancy() + 1); 3165 for (HashMap::Entry* entry = strings_.Start(); 3166 entry != NULL; 3167 entry = strings_.Next(entry)) { 3168 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value)); 3169 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key); 3170 } 3171 writer_->AddString("\"<dummy>\""); 3172 for (int i = 1; i < sorted_strings.length(); ++i) { 3173 writer_->AddCharacter(','); 3174 SerializeString(sorted_strings[i]); 3175 if (writer_->aborted()) return; 3176 } 3177 } 3178 3179 3180 } } // namespace v8::internal 3181