1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "heap-snapshot-generator-inl.h" 31 32 #include "heap-profiler.h" 33 #include "debug.h" 34 #include "types.h" 35 36 namespace v8 { 37 namespace internal { 38 39 40 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to) 41 : type_(type), 42 from_index_(from), 43 to_index_(to), 44 name_(name) { 45 ASSERT(type == kContextVariable 46 || type == kProperty 47 || type == kInternal 48 || type == kShortcut); 49 } 50 51 52 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to) 53 : type_(type), 54 from_index_(from), 55 to_index_(to), 56 index_(index) { 57 ASSERT(type == kElement || type == kHidden || type == kWeak); 58 } 59 60 61 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) { 62 to_entry_ = &snapshot->entries()[to_index_]; 63 } 64 65 66 const int HeapEntry::kNoEntry = -1; 67 68 HeapEntry::HeapEntry(HeapSnapshot* snapshot, 69 Type type, 70 const char* name, 71 SnapshotObjectId id, 72 int self_size) 73 : type_(type), 74 children_count_(0), 75 children_index_(-1), 76 self_size_(self_size), 77 id_(id), 78 snapshot_(snapshot), 79 name_(name) { } 80 81 82 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type, 83 const char* name, 84 HeapEntry* entry) { 85 HeapGraphEdge edge(type, name, this->index(), entry->index()); 86 snapshot_->edges().Add(edge); 87 ++children_count_; 88 } 89 90 91 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type, 92 int index, 93 HeapEntry* entry) { 94 HeapGraphEdge edge(type, index, this->index(), entry->index()); 95 snapshot_->edges().Add(edge); 96 ++children_count_; 97 } 98 99 100 Handle<HeapObject> HeapEntry::GetHeapObject() { 101 return snapshot_->collection()->FindHeapObjectById(id()); 102 } 103 104 105 void HeapEntry::Print( 106 const char* prefix, const char* edge_name, int max_depth, int indent) { 107 STATIC_CHECK(sizeof(unsigned) == sizeof(id())); 108 OS::Print("%6d @%6u %*c %s%s: ", 109 self_size(), id(), indent, ' ', prefix, edge_name); 110 if (type() != kString) { 111 OS::Print("%s %.40s\n", TypeAsString(), name_); 112 } else { 113 OS::Print("\""); 114 const char* c = name_; 115 while (*c && (c - name_) <= 40) { 116 if (*c != '\n') 117 OS::Print("%c", *c); 118 else 119 OS::Print("\\n"); 120 ++c; 121 } 122 OS::Print("\"\n"); 123 } 124 if (--max_depth == 0) return; 125 Vector<HeapGraphEdge*> ch = children(); 126 for (int i = 0; i < ch.length(); ++i) { 127 HeapGraphEdge& edge = *ch[i]; 128 const char* edge_prefix = ""; 129 EmbeddedVector<char, 64> index; 130 const char* edge_name = index.start(); 131 switch (edge.type()) { 132 case HeapGraphEdge::kContextVariable: 133 edge_prefix = "#"; 134 edge_name = edge.name(); 135 break; 136 case HeapGraphEdge::kElement: 137 OS::SNPrintF(index, "%d", edge.index()); 138 break; 139 case HeapGraphEdge::kInternal: 140 edge_prefix = "$"; 141 edge_name = edge.name(); 142 break; 143 case HeapGraphEdge::kProperty: 144 edge_name = edge.name(); 145 break; 146 case HeapGraphEdge::kHidden: 147 edge_prefix = "$"; 148 OS::SNPrintF(index, "%d", edge.index()); 149 break; 150 case HeapGraphEdge::kShortcut: 151 edge_prefix = "^"; 152 edge_name = edge.name(); 153 break; 154 case HeapGraphEdge::kWeak: 155 edge_prefix = "w"; 156 OS::SNPrintF(index, "%d", edge.index()); 157 break; 158 default: 159 OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type()); 160 } 161 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2); 162 } 163 } 164 165 166 const char* HeapEntry::TypeAsString() { 167 switch (type()) { 168 case kHidden: return "/hidden/"; 169 case kObject: return "/object/"; 170 case kClosure: return "/closure/"; 171 case kString: return "/string/"; 172 case kCode: return "/code/"; 173 case kArray: return "/array/"; 174 case kRegExp: return "/regexp/"; 175 case kHeapNumber: return "/number/"; 176 case kNative: return "/native/"; 177 case kSynthetic: return "/synthetic/"; 178 default: return "???"; 179 } 180 } 181 182 183 // It is very important to keep objects that form a heap snapshot 184 // as small as possible. 185 namespace { // Avoid littering the global namespace. 186 187 template <size_t ptr_size> struct SnapshotSizeConstants; 188 189 template <> struct SnapshotSizeConstants<4> { 190 static const int kExpectedHeapGraphEdgeSize = 12; 191 static const int kExpectedHeapEntrySize = 24; 192 }; 193 194 template <> struct SnapshotSizeConstants<8> { 195 static const int kExpectedHeapGraphEdgeSize = 24; 196 static const int kExpectedHeapEntrySize = 32; 197 }; 198 199 } // namespace 200 201 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, 202 const char* title, 203 unsigned uid) 204 : collection_(collection), 205 title_(title), 206 uid_(uid), 207 root_index_(HeapEntry::kNoEntry), 208 gc_roots_index_(HeapEntry::kNoEntry), 209 natives_root_index_(HeapEntry::kNoEntry), 210 max_snapshot_js_object_id_(0) { 211 STATIC_CHECK( 212 sizeof(HeapGraphEdge) == 213 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize); 214 STATIC_CHECK( 215 sizeof(HeapEntry) == 216 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize); 217 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) { 218 gc_subroot_indexes_[i] = HeapEntry::kNoEntry; 219 } 220 } 221 222 223 void HeapSnapshot::Delete() { 224 collection_->RemoveSnapshot(this); 225 delete this; 226 } 227 228 229 void HeapSnapshot::RememberLastJSObjectId() { 230 max_snapshot_js_object_id_ = collection_->last_assigned_id(); 231 } 232 233 234 HeapEntry* HeapSnapshot::AddRootEntry() { 235 ASSERT(root_index_ == HeapEntry::kNoEntry); 236 ASSERT(entries_.is_empty()); // Root entry must be the first one. 237 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, 238 "", 239 HeapObjectsMap::kInternalRootObjectId, 240 0); 241 root_index_ = entry->index(); 242 ASSERT(root_index_ == 0); 243 return entry; 244 } 245 246 247 HeapEntry* HeapSnapshot::AddGcRootsEntry() { 248 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry); 249 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, 250 "(GC roots)", 251 HeapObjectsMap::kGcRootsObjectId, 252 0); 253 gc_roots_index_ = entry->index(); 254 return entry; 255 } 256 257 258 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) { 259 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry); 260 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags); 261 HeapEntry* entry = AddEntry( 262 HeapEntry::kSynthetic, 263 VisitorSynchronization::kTagNames[tag], 264 HeapObjectsMap::GetNthGcSubrootId(tag), 265 0); 266 gc_subroot_indexes_[tag] = entry->index(); 267 return entry; 268 } 269 270 271 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type, 272 const char* name, 273 SnapshotObjectId id, 274 int size) { 275 HeapEntry entry(this, type, name, id, size); 276 entries_.Add(entry); 277 return &entries_.last(); 278 } 279 280 281 void HeapSnapshot::FillChildren() { 282 ASSERT(children().is_empty()); 283 children().Allocate(edges().length()); 284 int children_index = 0; 285 for (int i = 0; i < entries().length(); ++i) { 286 HeapEntry* entry = &entries()[i]; 287 children_index = entry->set_children_index(children_index); 288 } 289 ASSERT(edges().length() == children_index); 290 for (int i = 0; i < edges().length(); ++i) { 291 HeapGraphEdge* edge = &edges()[i]; 292 edge->ReplaceToIndexWithEntry(this); 293 edge->from()->add_child(edge); 294 } 295 } 296 297 298 class FindEntryById { 299 public: 300 explicit FindEntryById(SnapshotObjectId id) : id_(id) { } 301 int operator()(HeapEntry* const* entry) { 302 if ((*entry)->id() == id_) return 0; 303 return (*entry)->id() < id_ ? -1 : 1; 304 } 305 private: 306 SnapshotObjectId id_; 307 }; 308 309 310 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) { 311 List<HeapEntry*>* entries_by_id = GetSortedEntriesList(); 312 // Perform a binary search by id. 313 int index = SortedListBSearch(*entries_by_id, FindEntryById(id)); 314 if (index == -1) 315 return NULL; 316 return entries_by_id->at(index); 317 } 318 319 320 template<class T> 321 static int SortByIds(const T* entry1_ptr, 322 const T* entry2_ptr) { 323 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0; 324 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1; 325 } 326 327 328 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() { 329 if (sorted_entries_.is_empty()) { 330 sorted_entries_.Allocate(entries_.length()); 331 for (int i = 0; i < entries_.length(); ++i) { 332 sorted_entries_[i] = &entries_[i]; 333 } 334 sorted_entries_.Sort(SortByIds); 335 } 336 return &sorted_entries_; 337 } 338 339 340 void HeapSnapshot::Print(int max_depth) { 341 root()->Print("", "", max_depth, 0); 342 } 343 344 345 template<typename T, class P> 346 static size_t GetMemoryUsedByList(const List<T, P>& list) { 347 return list.length() * sizeof(T) + sizeof(list); 348 } 349 350 351 size_t HeapSnapshot::RawSnapshotSize() const { 352 return 353 sizeof(*this) + 354 GetMemoryUsedByList(entries_) + 355 GetMemoryUsedByList(edges_) + 356 GetMemoryUsedByList(children_) + 357 GetMemoryUsedByList(sorted_entries_); 358 } 359 360 361 // We split IDs on evens for embedder objects (see 362 // HeapObjectsMap::GenerateId) and odds for native objects. 363 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1; 364 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId = 365 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep; 366 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId = 367 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep; 368 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId = 369 HeapObjectsMap::kGcRootsFirstSubrootId + 370 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep; 371 372 373 static bool AddressesMatch(void* key1, void* key2) { 374 return key1 == key2; 375 } 376 377 378 HeapObjectsMap::HeapObjectsMap(Heap* heap) 379 : next_id_(kFirstAvailableObjectId), 380 entries_map_(AddressesMatch), 381 heap_(heap) { 382 // This dummy element solves a problem with entries_map_. 383 // When we do lookup in HashMap we see no difference between two cases: 384 // it has an entry with NULL as the value or it has created 385 // a new entry on the fly with NULL as the default value. 386 // With such dummy element we have a guaranty that all entries_map_ entries 387 // will have the value field grater than 0. 388 // This fact is using in MoveObject method. 389 entries_.Add(EntryInfo(0, NULL, 0)); 390 } 391 392 393 void HeapObjectsMap::SnapshotGenerationFinished() { 394 RemoveDeadEntries(); 395 } 396 397 398 void HeapObjectsMap::MoveObject(Address from, Address to) { 399 ASSERT(to != NULL); 400 ASSERT(from != NULL); 401 if (from == to) return; 402 void* from_value = entries_map_.Remove(from, ComputePointerHash(from)); 403 if (from_value == NULL) { 404 // It may occur that some untracked object moves to an address X and there 405 // is a tracked object at that address. In this case we should remove the 406 // entry as we know that the object has died. 407 void* to_value = entries_map_.Remove(to, ComputePointerHash(to)); 408 if (to_value != NULL) { 409 int to_entry_info_index = 410 static_cast<int>(reinterpret_cast<intptr_t>(to_value)); 411 entries_.at(to_entry_info_index).addr = NULL; 412 } 413 } else { 414 HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to), 415 true); 416 if (to_entry->value != NULL) { 417 // We found the existing entry with to address for an old object. 418 // Without this operation we will have two EntryInfo's with the same 419 // value in addr field. It is bad because later at RemoveDeadEntries 420 // one of this entry will be removed with the corresponding entries_map_ 421 // entry. 422 int to_entry_info_index = 423 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value)); 424 entries_.at(to_entry_info_index).addr = NULL; 425 } 426 int from_entry_info_index = 427 static_cast<int>(reinterpret_cast<intptr_t>(from_value)); 428 entries_.at(from_entry_info_index).addr = to; 429 to_entry->value = from_value; 430 } 431 } 432 433 434 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) { 435 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), 436 false); 437 if (entry == NULL) return 0; 438 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); 439 EntryInfo& entry_info = entries_.at(entry_index); 440 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); 441 return entry_info.id; 442 } 443 444 445 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr, 446 unsigned int size) { 447 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); 448 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr), 449 true); 450 if (entry->value != NULL) { 451 int entry_index = 452 static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); 453 EntryInfo& entry_info = entries_.at(entry_index); 454 entry_info.accessed = true; 455 entry_info.size = size; 456 return entry_info.id; 457 } 458 entry->value = reinterpret_cast<void*>(entries_.length()); 459 SnapshotObjectId id = next_id_; 460 next_id_ += kObjectIdStep; 461 entries_.Add(EntryInfo(id, addr, size)); 462 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); 463 return id; 464 } 465 466 467 void HeapObjectsMap::StopHeapObjectsTracking() { 468 time_intervals_.Clear(); 469 } 470 471 472 void HeapObjectsMap::UpdateHeapObjectsMap() { 473 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, 474 "HeapSnapshotsCollection::UpdateHeapObjectsMap"); 475 HeapIterator iterator(heap_); 476 for (HeapObject* obj = iterator.next(); 477 obj != NULL; 478 obj = iterator.next()) { 479 FindOrAddEntry(obj->address(), obj->Size()); 480 } 481 RemoveDeadEntries(); 482 } 483 484 485 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) { 486 UpdateHeapObjectsMap(); 487 time_intervals_.Add(TimeInterval(next_id_)); 488 int prefered_chunk_size = stream->GetChunkSize(); 489 List<v8::HeapStatsUpdate> stats_buffer; 490 ASSERT(!entries_.is_empty()); 491 EntryInfo* entry_info = &entries_.first(); 492 EntryInfo* end_entry_info = &entries_.last() + 1; 493 for (int time_interval_index = 0; 494 time_interval_index < time_intervals_.length(); 495 ++time_interval_index) { 496 TimeInterval& time_interval = time_intervals_[time_interval_index]; 497 SnapshotObjectId time_interval_id = time_interval.id; 498 uint32_t entries_size = 0; 499 EntryInfo* start_entry_info = entry_info; 500 while (entry_info < end_entry_info && entry_info->id < time_interval_id) { 501 entries_size += entry_info->size; 502 ++entry_info; 503 } 504 uint32_t entries_count = 505 static_cast<uint32_t>(entry_info - start_entry_info); 506 if (time_interval.count != entries_count || 507 time_interval.size != entries_size) { 508 stats_buffer.Add(v8::HeapStatsUpdate( 509 time_interval_index, 510 time_interval.count = entries_count, 511 time_interval.size = entries_size)); 512 if (stats_buffer.length() >= prefered_chunk_size) { 513 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( 514 &stats_buffer.first(), stats_buffer.length()); 515 if (result == OutputStream::kAbort) return last_assigned_id(); 516 stats_buffer.Clear(); 517 } 518 } 519 } 520 ASSERT(entry_info == end_entry_info); 521 if (!stats_buffer.is_empty()) { 522 OutputStream::WriteResult result = stream->WriteHeapStatsChunk( 523 &stats_buffer.first(), stats_buffer.length()); 524 if (result == OutputStream::kAbort) return last_assigned_id(); 525 } 526 stream->EndOfStream(); 527 return last_assigned_id(); 528 } 529 530 531 void HeapObjectsMap::RemoveDeadEntries() { 532 ASSERT(entries_.length() > 0 && 533 entries_.at(0).id == 0 && 534 entries_.at(0).addr == NULL); 535 int first_free_entry = 1; 536 for (int i = 1; i < entries_.length(); ++i) { 537 EntryInfo& entry_info = entries_.at(i); 538 if (entry_info.accessed) { 539 if (first_free_entry != i) { 540 entries_.at(first_free_entry) = entry_info; 541 } 542 entries_.at(first_free_entry).accessed = false; 543 HashMap::Entry* entry = entries_map_.Lookup( 544 entry_info.addr, ComputePointerHash(entry_info.addr), false); 545 ASSERT(entry); 546 entry->value = reinterpret_cast<void*>(first_free_entry); 547 ++first_free_entry; 548 } else { 549 if (entry_info.addr) { 550 entries_map_.Remove(entry_info.addr, 551 ComputePointerHash(entry_info.addr)); 552 } 553 } 554 } 555 entries_.Rewind(first_free_entry); 556 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 == 557 entries_map_.occupancy()); 558 } 559 560 561 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) { 562 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash()); 563 const char* label = info->GetLabel(); 564 id ^= StringHasher::HashSequentialString(label, 565 static_cast<int>(strlen(label)), 566 HEAP->HashSeed()); 567 intptr_t element_count = info->GetElementCount(); 568 if (element_count != -1) 569 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count), 570 v8::internal::kZeroHashSeed); 571 return id << 1; 572 } 573 574 575 size_t HeapObjectsMap::GetUsedMemorySize() const { 576 return 577 sizeof(*this) + 578 sizeof(HashMap::Entry) * entries_map_.capacity() + 579 GetMemoryUsedByList(entries_) + 580 GetMemoryUsedByList(time_intervals_); 581 } 582 583 584 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap) 585 : is_tracking_objects_(false), 586 ids_(heap) { 587 } 588 589 590 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) { 591 delete *snapshot_ptr; 592 } 593 594 595 HeapSnapshotsCollection::~HeapSnapshotsCollection() { 596 snapshots_.Iterate(DeleteHeapSnapshot); 597 } 598 599 600 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name, 601 unsigned uid) { 602 is_tracking_objects_ = true; // Start watching for heap objects moves. 603 return new HeapSnapshot(this, name, uid); 604 } 605 606 607 void HeapSnapshotsCollection::SnapshotGenerationFinished( 608 HeapSnapshot* snapshot) { 609 ids_.SnapshotGenerationFinished(); 610 if (snapshot != NULL) { 611 snapshots_.Add(snapshot); 612 } 613 } 614 615 616 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) { 617 snapshots_.RemoveElement(snapshot); 618 } 619 620 621 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById( 622 SnapshotObjectId id) { 623 // First perform a full GC in order to avoid dead objects. 624 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, 625 "HeapSnapshotsCollection::FindHeapObjectById"); 626 DisallowHeapAllocation no_allocation; 627 HeapObject* object = NULL; 628 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable); 629 // Make sure that object with the given id is still reachable. 630 for (HeapObject* obj = iterator.next(); 631 obj != NULL; 632 obj = iterator.next()) { 633 if (ids_.FindEntry(obj->address()) == id) { 634 ASSERT(object == NULL); 635 object = obj; 636 // Can't break -- kFilterUnreachable requires full heap traversal. 637 } 638 } 639 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>(); 640 } 641 642 643 size_t HeapSnapshotsCollection::GetUsedMemorySize() const { 644 size_t size = sizeof(*this); 645 size += names_.GetUsedMemorySize(); 646 size += ids_.GetUsedMemorySize(); 647 size += GetMemoryUsedByList(snapshots_); 648 for (int i = 0; i < snapshots_.length(); ++i) { 649 size += snapshots_[i]->RawSnapshotSize(); 650 } 651 return size; 652 } 653 654 655 HeapEntriesMap::HeapEntriesMap() 656 : entries_(HeapThingsMatch) { 657 } 658 659 660 int HeapEntriesMap::Map(HeapThing thing) { 661 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false); 662 if (cache_entry == NULL) return HeapEntry::kNoEntry; 663 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value)); 664 } 665 666 667 void HeapEntriesMap::Pair(HeapThing thing, int entry) { 668 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true); 669 ASSERT(cache_entry->value == NULL); 670 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry)); 671 } 672 673 674 HeapObjectsSet::HeapObjectsSet() 675 : entries_(HeapEntriesMap::HeapThingsMatch) { 676 } 677 678 679 void HeapObjectsSet::Clear() { 680 entries_.Clear(); 681 } 682 683 684 bool HeapObjectsSet::Contains(Object* obj) { 685 if (!obj->IsHeapObject()) return false; 686 HeapObject* object = HeapObject::cast(obj); 687 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL; 688 } 689 690 691 void HeapObjectsSet::Insert(Object* obj) { 692 if (!obj->IsHeapObject()) return; 693 HeapObject* object = HeapObject::cast(obj); 694 entries_.Lookup(object, HeapEntriesMap::Hash(object), true); 695 } 696 697 698 const char* HeapObjectsSet::GetTag(Object* obj) { 699 HeapObject* object = HeapObject::cast(obj); 700 HashMap::Entry* cache_entry = 701 entries_.Lookup(object, HeapEntriesMap::Hash(object), false); 702 return cache_entry != NULL 703 ? reinterpret_cast<const char*>(cache_entry->value) 704 : NULL; 705 } 706 707 708 void HeapObjectsSet::SetTag(Object* obj, const char* tag) { 709 if (!obj->IsHeapObject()) return; 710 HeapObject* object = HeapObject::cast(obj); 711 HashMap::Entry* cache_entry = 712 entries_.Lookup(object, HeapEntriesMap::Hash(object), true); 713 cache_entry->value = const_cast<char*>(tag); 714 } 715 716 717 HeapObject* const V8HeapExplorer::kInternalRootObject = 718 reinterpret_cast<HeapObject*>( 719 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId)); 720 HeapObject* const V8HeapExplorer::kGcRootsObject = 721 reinterpret_cast<HeapObject*>( 722 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId)); 723 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject = 724 reinterpret_cast<HeapObject*>( 725 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId)); 726 HeapObject* const V8HeapExplorer::kLastGcSubrootObject = 727 reinterpret_cast<HeapObject*>( 728 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId)); 729 730 731 V8HeapExplorer::V8HeapExplorer( 732 HeapSnapshot* snapshot, 733 SnapshottingProgressReportingInterface* progress, 734 v8::HeapProfiler::ObjectNameResolver* resolver) 735 : heap_(Isolate::Current()->heap()), 736 snapshot_(snapshot), 737 collection_(snapshot_->collection()), 738 progress_(progress), 739 filler_(NULL), 740 global_object_name_resolver_(resolver) { 741 } 742 743 744 V8HeapExplorer::~V8HeapExplorer() { 745 } 746 747 748 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) { 749 return AddEntry(reinterpret_cast<HeapObject*>(ptr)); 750 } 751 752 753 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) { 754 if (object == kInternalRootObject) { 755 snapshot_->AddRootEntry(); 756 return snapshot_->root(); 757 } else if (object == kGcRootsObject) { 758 HeapEntry* entry = snapshot_->AddGcRootsEntry(); 759 return entry; 760 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) { 761 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object)); 762 return entry; 763 } else if (object->IsJSFunction()) { 764 JSFunction* func = JSFunction::cast(object); 765 SharedFunctionInfo* shared = func->shared(); 766 const char* name = shared->bound() ? "native_bind" : 767 collection_->names()->GetName(String::cast(shared->name())); 768 return AddEntry(object, HeapEntry::kClosure, name); 769 } else if (object->IsJSRegExp()) { 770 JSRegExp* re = JSRegExp::cast(object); 771 return AddEntry(object, 772 HeapEntry::kRegExp, 773 collection_->names()->GetName(re->Pattern())); 774 } else if (object->IsJSObject()) { 775 const char* name = collection_->names()->GetName( 776 GetConstructorName(JSObject::cast(object))); 777 if (object->IsJSGlobalObject()) { 778 const char* tag = objects_tags_.GetTag(object); 779 if (tag != NULL) { 780 name = collection_->names()->GetFormatted("%s / %s", name, tag); 781 } 782 } 783 return AddEntry(object, HeapEntry::kObject, name); 784 } else if (object->IsString()) { 785 return AddEntry(object, 786 HeapEntry::kString, 787 collection_->names()->GetName(String::cast(object))); 788 } else if (object->IsCode()) { 789 return AddEntry(object, HeapEntry::kCode, ""); 790 } else if (object->IsSharedFunctionInfo()) { 791 String* name = String::cast(SharedFunctionInfo::cast(object)->name()); 792 return AddEntry(object, 793 HeapEntry::kCode, 794 collection_->names()->GetName(name)); 795 } else if (object->IsScript()) { 796 Object* name = Script::cast(object)->name(); 797 return AddEntry(object, 798 HeapEntry::kCode, 799 name->IsString() 800 ? collection_->names()->GetName(String::cast(name)) 801 : ""); 802 } else if (object->IsNativeContext()) { 803 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext"); 804 } else if (object->IsContext()) { 805 return AddEntry(object, HeapEntry::kObject, "system / Context"); 806 } else if (object->IsFixedArray() || 807 object->IsFixedDoubleArray() || 808 object->IsByteArray() || 809 object->IsExternalArray()) { 810 return AddEntry(object, HeapEntry::kArray, ""); 811 } else if (object->IsHeapNumber()) { 812 return AddEntry(object, HeapEntry::kHeapNumber, "number"); 813 } 814 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object)); 815 } 816 817 818 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object, 819 HeapEntry::Type type, 820 const char* name) { 821 int object_size = object->Size(); 822 SnapshotObjectId object_id = 823 collection_->GetObjectId(object->address(), object_size); 824 return snapshot_->AddEntry(type, name, object_id, object_size); 825 } 826 827 828 class GcSubrootsEnumerator : public ObjectVisitor { 829 public: 830 GcSubrootsEnumerator( 831 SnapshotFillerInterface* filler, V8HeapExplorer* explorer) 832 : filler_(filler), 833 explorer_(explorer), 834 previous_object_count_(0), 835 object_count_(0) { 836 } 837 void VisitPointers(Object** start, Object** end) { 838 object_count_ += end - start; 839 } 840 void Synchronize(VisitorSynchronization::SyncTag tag) { 841 // Skip empty subroots. 842 if (previous_object_count_ != object_count_) { 843 previous_object_count_ = object_count_; 844 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_); 845 } 846 } 847 private: 848 SnapshotFillerInterface* filler_; 849 V8HeapExplorer* explorer_; 850 intptr_t previous_object_count_; 851 intptr_t object_count_; 852 }; 853 854 855 void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) { 856 filler->AddEntry(kInternalRootObject, this); 857 filler->AddEntry(kGcRootsObject, this); 858 GcSubrootsEnumerator enumerator(filler, this); 859 heap_->IterateRoots(&enumerator, VISIT_ALL); 860 } 861 862 863 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) { 864 switch (object->map()->instance_type()) { 865 case MAP_TYPE: 866 switch (Map::cast(object)->instance_type()) { 867 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \ 868 case instance_type: return "system / Map (" #Name ")"; 869 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE) 870 #undef MAKE_STRING_MAP_CASE 871 default: return "system / Map"; 872 } 873 case CELL_TYPE: return "system / Cell"; 874 case PROPERTY_CELL_TYPE: return "system / PropertyCell"; 875 case FOREIGN_TYPE: return "system / Foreign"; 876 case ODDBALL_TYPE: return "system / Oddball"; 877 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 878 case NAME##_TYPE: return "system / "#Name; 879 STRUCT_LIST(MAKE_STRUCT_CASE) 880 #undef MAKE_STRUCT_CASE 881 default: return "system"; 882 } 883 } 884 885 886 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) { 887 int objects_count = 0; 888 for (HeapObject* obj = iterator->next(); 889 obj != NULL; 890 obj = iterator->next()) { 891 objects_count++; 892 } 893 return objects_count; 894 } 895 896 897 class IndexedReferencesExtractor : public ObjectVisitor { 898 public: 899 IndexedReferencesExtractor(V8HeapExplorer* generator, 900 HeapObject* parent_obj, 901 int parent) 902 : generator_(generator), 903 parent_obj_(parent_obj), 904 parent_(parent), 905 next_index_(0) { 906 } 907 void VisitCodeEntry(Address entry_address) { 908 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address)); 909 generator_->SetInternalReference(parent_obj_, parent_, "code", code); 910 generator_->TagObject(code, "(code)"); 911 } 912 void VisitPointers(Object** start, Object** end) { 913 for (Object** p = start; p < end; p++) { 914 if (CheckVisitedAndUnmark(p)) continue; 915 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p); 916 } 917 } 918 static void MarkVisitedField(HeapObject* obj, int offset) { 919 if (offset < 0) return; 920 Address field = obj->address() + offset; 921 ASSERT(!Memory::Object_at(field)->IsFailure()); 922 ASSERT(Memory::Object_at(field)->IsHeapObject()); 923 *field |= kFailureTag; 924 } 925 926 private: 927 bool CheckVisitedAndUnmark(Object** field) { 928 if ((*field)->IsFailure()) { 929 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask; 930 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag); 931 ASSERT((*field)->IsHeapObject()); 932 return true; 933 } 934 return false; 935 } 936 V8HeapExplorer* generator_; 937 HeapObject* parent_obj_; 938 int parent_; 939 int next_index_; 940 }; 941 942 943 void V8HeapExplorer::ExtractReferences(HeapObject* obj) { 944 HeapEntry* heap_entry = GetEntry(obj); 945 if (heap_entry == NULL) return; // No interest in this object. 946 int entry = heap_entry->index(); 947 948 if (obj->IsJSGlobalProxy()) { 949 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj)); 950 } else if (obj->IsJSObject()) { 951 ExtractJSObjectReferences(entry, JSObject::cast(obj)); 952 } else if (obj->IsString()) { 953 ExtractStringReferences(entry, String::cast(obj)); 954 } else if (obj->IsContext()) { 955 ExtractContextReferences(entry, Context::cast(obj)); 956 } else if (obj->IsMap()) { 957 ExtractMapReferences(entry, Map::cast(obj)); 958 } else if (obj->IsSharedFunctionInfo()) { 959 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj)); 960 } else if (obj->IsScript()) { 961 ExtractScriptReferences(entry, Script::cast(obj)); 962 } else if (obj->IsAccessorPair()) { 963 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj)); 964 } else if (obj->IsCodeCache()) { 965 ExtractCodeCacheReferences(entry, CodeCache::cast(obj)); 966 } else if (obj->IsCode()) { 967 ExtractCodeReferences(entry, Code::cast(obj)); 968 } else if (obj->IsCell()) { 969 ExtractCellReferences(entry, Cell::cast(obj)); 970 } else if (obj->IsPropertyCell()) { 971 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj)); 972 } else if (obj->IsAllocationSite()) { 973 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj)); 974 } 975 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset); 976 977 // Extract unvisited fields as hidden references and restore tags 978 // of visited fields. 979 IndexedReferencesExtractor refs_extractor(this, obj, entry); 980 obj->Iterate(&refs_extractor); 981 } 982 983 984 void V8HeapExplorer::ExtractJSGlobalProxyReferences( 985 int entry, JSGlobalProxy* proxy) { 986 SetInternalReference(proxy, entry, 987 "native_context", proxy->native_context(), 988 JSGlobalProxy::kNativeContextOffset); 989 } 990 991 992 void V8HeapExplorer::ExtractJSObjectReferences( 993 int entry, JSObject* js_obj) { 994 HeapObject* obj = js_obj; 995 ExtractClosureReferences(js_obj, entry); 996 ExtractPropertyReferences(js_obj, entry); 997 ExtractElementReferences(js_obj, entry); 998 ExtractInternalReferences(js_obj, entry); 999 SetPropertyReference( 1000 obj, entry, heap_->proto_string(), js_obj->GetPrototype()); 1001 if (obj->IsJSFunction()) { 1002 JSFunction* js_fun = JSFunction::cast(js_obj); 1003 Object* proto_or_map = js_fun->prototype_or_initial_map(); 1004 if (!proto_or_map->IsTheHole()) { 1005 if (!proto_or_map->IsMap()) { 1006 SetPropertyReference( 1007 obj, entry, 1008 heap_->prototype_string(), proto_or_map, 1009 NULL, 1010 JSFunction::kPrototypeOrInitialMapOffset); 1011 } else { 1012 SetPropertyReference( 1013 obj, entry, 1014 heap_->prototype_string(), js_fun->prototype()); 1015 SetInternalReference( 1016 obj, entry, "initial_map", proto_or_map, 1017 JSFunction::kPrototypeOrInitialMapOffset); 1018 } 1019 } 1020 SharedFunctionInfo* shared_info = js_fun->shared(); 1021 // JSFunction has either bindings or literals and never both. 1022 bool bound = shared_info->bound(); 1023 TagObject(js_fun->literals_or_bindings(), 1024 bound ? "(function bindings)" : "(function literals)"); 1025 SetInternalReference(js_fun, entry, 1026 bound ? "bindings" : "literals", 1027 js_fun->literals_or_bindings(), 1028 JSFunction::kLiteralsOffset); 1029 TagObject(shared_info, "(shared function info)"); 1030 SetInternalReference(js_fun, entry, 1031 "shared", shared_info, 1032 JSFunction::kSharedFunctionInfoOffset); 1033 TagObject(js_fun->context(), "(context)"); 1034 SetInternalReference(js_fun, entry, 1035 "context", js_fun->context(), 1036 JSFunction::kContextOffset); 1037 for (int i = JSFunction::kNonWeakFieldsEndOffset; 1038 i < JSFunction::kSize; 1039 i += kPointerSize) { 1040 SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i); 1041 } 1042 } else if (obj->IsGlobalObject()) { 1043 GlobalObject* global_obj = GlobalObject::cast(obj); 1044 SetInternalReference(global_obj, entry, 1045 "builtins", global_obj->builtins(), 1046 GlobalObject::kBuiltinsOffset); 1047 SetInternalReference(global_obj, entry, 1048 "native_context", global_obj->native_context(), 1049 GlobalObject::kNativeContextOffset); 1050 SetInternalReference(global_obj, entry, 1051 "global_receiver", global_obj->global_receiver(), 1052 GlobalObject::kGlobalReceiverOffset); 1053 } 1054 TagObject(js_obj->properties(), "(object properties)"); 1055 SetInternalReference(obj, entry, 1056 "properties", js_obj->properties(), 1057 JSObject::kPropertiesOffset); 1058 TagObject(js_obj->elements(), "(object elements)"); 1059 SetInternalReference(obj, entry, 1060 "elements", js_obj->elements(), 1061 JSObject::kElementsOffset); 1062 } 1063 1064 1065 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) { 1066 if (string->IsConsString()) { 1067 ConsString* cs = ConsString::cast(string); 1068 SetInternalReference(cs, entry, "first", cs->first(), 1069 ConsString::kFirstOffset); 1070 SetInternalReference(cs, entry, "second", cs->second(), 1071 ConsString::kSecondOffset); 1072 } else if (string->IsSlicedString()) { 1073 SlicedString* ss = SlicedString::cast(string); 1074 SetInternalReference(ss, entry, "parent", ss->parent(), 1075 SlicedString::kParentOffset); 1076 } 1077 } 1078 1079 1080 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) { 1081 if (context == context->declaration_context()) { 1082 ScopeInfo* scope_info = context->closure()->shared()->scope_info(); 1083 // Add context allocated locals. 1084 int context_locals = scope_info->ContextLocalCount(); 1085 for (int i = 0; i < context_locals; ++i) { 1086 String* local_name = scope_info->ContextLocalName(i); 1087 int idx = Context::MIN_CONTEXT_SLOTS + i; 1088 SetContextReference(context, entry, local_name, context->get(idx), 1089 Context::OffsetOfElementAt(idx)); 1090 } 1091 if (scope_info->HasFunctionName()) { 1092 String* name = scope_info->FunctionName(); 1093 VariableMode mode; 1094 int idx = scope_info->FunctionContextSlotIndex(name, &mode); 1095 if (idx >= 0) { 1096 SetContextReference(context, entry, name, context->get(idx), 1097 Context::OffsetOfElementAt(idx)); 1098 } 1099 } 1100 } 1101 1102 #define EXTRACT_CONTEXT_FIELD(index, type, name) \ 1103 SetInternalReference(context, entry, #name, context->get(Context::index), \ 1104 FixedArray::OffsetOfElementAt(Context::index)); 1105 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure); 1106 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous); 1107 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension); 1108 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global); 1109 if (context->IsNativeContext()) { 1110 TagObject(context->jsfunction_result_caches(), 1111 "(context func. result caches)"); 1112 TagObject(context->normalized_map_cache(), "(context norm. map cache)"); 1113 TagObject(context->runtime_context(), "(runtime context)"); 1114 TagObject(context->embedder_data(), "(context data)"); 1115 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD); 1116 #undef EXTRACT_CONTEXT_FIELD 1117 for (int i = Context::FIRST_WEAK_SLOT; 1118 i < Context::NATIVE_CONTEXT_SLOTS; 1119 ++i) { 1120 SetWeakReference(context, entry, i, context->get(i), 1121 FixedArray::OffsetOfElementAt(i)); 1122 } 1123 } 1124 } 1125 1126 1127 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) { 1128 if (map->HasTransitionArray()) { 1129 TransitionArray* transitions = map->transitions(); 1130 int transitions_entry = GetEntry(transitions)->index(); 1131 Object* back_pointer = transitions->back_pointer_storage(); 1132 TagObject(back_pointer, "(back pointer)"); 1133 SetInternalReference(transitions, transitions_entry, 1134 "back_pointer", back_pointer); 1135 TagObject(transitions, "(transition array)"); 1136 SetInternalReference(map, entry, 1137 "transitions", transitions, 1138 Map::kTransitionsOrBackPointerOffset); 1139 } else { 1140 Object* back_pointer = map->GetBackPointer(); 1141 TagObject(back_pointer, "(back pointer)"); 1142 SetInternalReference(map, entry, 1143 "back_pointer", back_pointer, 1144 Map::kTransitionsOrBackPointerOffset); 1145 } 1146 DescriptorArray* descriptors = map->instance_descriptors(); 1147 TagObject(descriptors, "(map descriptors)"); 1148 SetInternalReference(map, entry, 1149 "descriptors", descriptors, 1150 Map::kDescriptorsOffset); 1151 1152 SetInternalReference(map, entry, 1153 "code_cache", map->code_cache(), 1154 Map::kCodeCacheOffset); 1155 SetInternalReference(map, entry, 1156 "prototype", map->prototype(), Map::kPrototypeOffset); 1157 SetInternalReference(map, entry, 1158 "constructor", map->constructor(), 1159 Map::kConstructorOffset); 1160 TagObject(map->dependent_code(), "(dependent code)"); 1161 SetInternalReference(map, entry, 1162 "dependent_code", map->dependent_code(), 1163 Map::kDependentCodeOffset); 1164 } 1165 1166 1167 void V8HeapExplorer::ExtractSharedFunctionInfoReferences( 1168 int entry, SharedFunctionInfo* shared) { 1169 HeapObject* obj = shared; 1170 SetInternalReference(obj, entry, 1171 "name", shared->name(), 1172 SharedFunctionInfo::kNameOffset); 1173 TagObject(shared->code(), "(code)"); 1174 SetInternalReference(obj, entry, 1175 "code", shared->code(), 1176 SharedFunctionInfo::kCodeOffset); 1177 TagObject(shared->scope_info(), "(function scope info)"); 1178 SetInternalReference(obj, entry, 1179 "scope_info", shared->scope_info(), 1180 SharedFunctionInfo::kScopeInfoOffset); 1181 SetInternalReference(obj, entry, 1182 "instance_class_name", shared->instance_class_name(), 1183 SharedFunctionInfo::kInstanceClassNameOffset); 1184 SetInternalReference(obj, entry, 1185 "script", shared->script(), 1186 SharedFunctionInfo::kScriptOffset); 1187 TagObject(shared->construct_stub(), "(code)"); 1188 SetInternalReference(obj, entry, 1189 "construct_stub", shared->construct_stub(), 1190 SharedFunctionInfo::kConstructStubOffset); 1191 SetInternalReference(obj, entry, 1192 "function_data", shared->function_data(), 1193 SharedFunctionInfo::kFunctionDataOffset); 1194 SetInternalReference(obj, entry, 1195 "debug_info", shared->debug_info(), 1196 SharedFunctionInfo::kDebugInfoOffset); 1197 SetInternalReference(obj, entry, 1198 "inferred_name", shared->inferred_name(), 1199 SharedFunctionInfo::kInferredNameOffset); 1200 SetWeakReference(obj, entry, 1201 1, shared->initial_map(), 1202 SharedFunctionInfo::kInitialMapOffset); 1203 } 1204 1205 1206 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) { 1207 HeapObject* obj = script; 1208 SetInternalReference(obj, entry, 1209 "source", script->source(), 1210 Script::kSourceOffset); 1211 SetInternalReference(obj, entry, 1212 "name", script->name(), 1213 Script::kNameOffset); 1214 SetInternalReference(obj, entry, 1215 "data", script->data(), 1216 Script::kDataOffset); 1217 SetInternalReference(obj, entry, 1218 "context_data", script->context_data(), 1219 Script::kContextOffset); 1220 TagObject(script->line_ends(), "(script line ends)"); 1221 SetInternalReference(obj, entry, 1222 "line_ends", script->line_ends(), 1223 Script::kLineEndsOffset); 1224 } 1225 1226 1227 void V8HeapExplorer::ExtractAccessorPairReferences( 1228 int entry, AccessorPair* accessors) { 1229 SetInternalReference(accessors, entry, "getter", accessors->getter(), 1230 AccessorPair::kGetterOffset); 1231 SetInternalReference(accessors, entry, "setter", accessors->setter(), 1232 AccessorPair::kSetterOffset); 1233 } 1234 1235 1236 void V8HeapExplorer::ExtractCodeCacheReferences( 1237 int entry, CodeCache* code_cache) { 1238 TagObject(code_cache->default_cache(), "(default code cache)"); 1239 SetInternalReference(code_cache, entry, 1240 "default_cache", code_cache->default_cache(), 1241 CodeCache::kDefaultCacheOffset); 1242 TagObject(code_cache->normal_type_cache(), "(code type cache)"); 1243 SetInternalReference(code_cache, entry, 1244 "type_cache", code_cache->normal_type_cache(), 1245 CodeCache::kNormalTypeCacheOffset); 1246 } 1247 1248 1249 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) { 1250 TagObject(code->relocation_info(), "(code relocation info)"); 1251 SetInternalReference(code, entry, 1252 "relocation_info", code->relocation_info(), 1253 Code::kRelocationInfoOffset); 1254 SetInternalReference(code, entry, 1255 "handler_table", code->handler_table(), 1256 Code::kHandlerTableOffset); 1257 TagObject(code->deoptimization_data(), "(code deopt data)"); 1258 SetInternalReference(code, entry, 1259 "deoptimization_data", code->deoptimization_data(), 1260 Code::kDeoptimizationDataOffset); 1261 if (code->kind() == Code::FUNCTION) { 1262 SetInternalReference(code, entry, 1263 "type_feedback_info", code->type_feedback_info(), 1264 Code::kTypeFeedbackInfoOffset); 1265 } 1266 SetInternalReference(code, entry, 1267 "gc_metadata", code->gc_metadata(), 1268 Code::kGCMetadataOffset); 1269 } 1270 1271 1272 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) { 1273 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset); 1274 } 1275 1276 1277 void V8HeapExplorer::ExtractPropertyCellReferences(int entry, 1278 PropertyCell* cell) { 1279 ExtractCellReferences(entry, cell); 1280 SetInternalReference(cell, entry, "type", cell->type(), 1281 PropertyCell::kTypeOffset); 1282 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(), 1283 PropertyCell::kDependentCodeOffset); 1284 } 1285 1286 1287 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry, 1288 AllocationSite* site) { 1289 SetInternalReference(site, entry, "transition_info", site->transition_info(), 1290 AllocationSite::kTransitionInfoOffset); 1291 } 1292 1293 1294 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) { 1295 if (!js_obj->IsJSFunction()) return; 1296 1297 JSFunction* func = JSFunction::cast(js_obj); 1298 if (func->shared()->bound()) { 1299 FixedArray* bindings = func->function_bindings(); 1300 SetNativeBindReference(js_obj, entry, "bound_this", 1301 bindings->get(JSFunction::kBoundThisIndex)); 1302 SetNativeBindReference(js_obj, entry, "bound_function", 1303 bindings->get(JSFunction::kBoundFunctionIndex)); 1304 for (int i = JSFunction::kBoundArgumentsStartIndex; 1305 i < bindings->length(); i++) { 1306 const char* reference_name = collection_->names()->GetFormatted( 1307 "bound_argument_%d", 1308 i - JSFunction::kBoundArgumentsStartIndex); 1309 SetNativeBindReference(js_obj, entry, reference_name, 1310 bindings->get(i)); 1311 } 1312 } 1313 } 1314 1315 1316 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) { 1317 if (js_obj->HasFastProperties()) { 1318 DescriptorArray* descs = js_obj->map()->instance_descriptors(); 1319 int real_size = js_obj->map()->NumberOfOwnDescriptors(); 1320 for (int i = 0; i < real_size; i++) { 1321 switch (descs->GetType(i)) { 1322 case FIELD: { 1323 int index = descs->GetFieldIndex(i); 1324 1325 Name* k = descs->GetKey(i); 1326 if (index < js_obj->map()->inobject_properties()) { 1327 Object* value = js_obj->InObjectPropertyAt(index); 1328 if (k != heap_->hidden_string()) { 1329 SetPropertyReference( 1330 js_obj, entry, 1331 k, value, 1332 NULL, 1333 js_obj->GetInObjectPropertyOffset(index)); 1334 } else { 1335 TagObject(value, "(hidden properties)"); 1336 SetInternalReference( 1337 js_obj, entry, 1338 "hidden_properties", value, 1339 js_obj->GetInObjectPropertyOffset(index)); 1340 } 1341 } else { 1342 Object* value = js_obj->RawFastPropertyAt(index); 1343 if (k != heap_->hidden_string()) { 1344 SetPropertyReference(js_obj, entry, k, value); 1345 } else { 1346 TagObject(value, "(hidden properties)"); 1347 SetInternalReference(js_obj, entry, "hidden_properties", value); 1348 } 1349 } 1350 break; 1351 } 1352 case CONSTANT: 1353 SetPropertyReference( 1354 js_obj, entry, 1355 descs->GetKey(i), descs->GetConstant(i)); 1356 break; 1357 case CALLBACKS: 1358 ExtractAccessorPairProperty( 1359 js_obj, entry, 1360 descs->GetKey(i), descs->GetValue(i)); 1361 break; 1362 case NORMAL: // only in slow mode 1363 case HANDLER: // only in lookup results, not in descriptors 1364 case INTERCEPTOR: // only in lookup results, not in descriptors 1365 break; 1366 case TRANSITION: 1367 case NONEXISTENT: 1368 UNREACHABLE(); 1369 break; 1370 } 1371 } 1372 } else { 1373 NameDictionary* dictionary = js_obj->property_dictionary(); 1374 int length = dictionary->Capacity(); 1375 for (int i = 0; i < length; ++i) { 1376 Object* k = dictionary->KeyAt(i); 1377 if (dictionary->IsKey(k)) { 1378 Object* target = dictionary->ValueAt(i); 1379 // We assume that global objects can only have slow properties. 1380 Object* value = target->IsPropertyCell() 1381 ? PropertyCell::cast(target)->value() 1382 : target; 1383 if (k == heap_->hidden_string()) { 1384 TagObject(value, "(hidden properties)"); 1385 SetInternalReference(js_obj, entry, "hidden_properties", value); 1386 continue; 1387 } 1388 if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue; 1389 SetPropertyReference(js_obj, entry, String::cast(k), value); 1390 } 1391 } 1392 } 1393 } 1394 1395 1396 bool V8HeapExplorer::ExtractAccessorPairProperty( 1397 JSObject* js_obj, int entry, Object* key, Object* callback_obj) { 1398 if (!callback_obj->IsAccessorPair()) return false; 1399 AccessorPair* accessors = AccessorPair::cast(callback_obj); 1400 Object* getter = accessors->getter(); 1401 if (!getter->IsOddball()) { 1402 SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s"); 1403 } 1404 Object* setter = accessors->setter(); 1405 if (!setter->IsOddball()) { 1406 SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s"); 1407 } 1408 return true; 1409 } 1410 1411 1412 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) { 1413 if (js_obj->HasFastObjectElements()) { 1414 FixedArray* elements = FixedArray::cast(js_obj->elements()); 1415 int length = js_obj->IsJSArray() ? 1416 Smi::cast(JSArray::cast(js_obj)->length())->value() : 1417 elements->length(); 1418 for (int i = 0; i < length; ++i) { 1419 if (!elements->get(i)->IsTheHole()) { 1420 SetElementReference(js_obj, entry, i, elements->get(i)); 1421 } 1422 } 1423 } else if (js_obj->HasDictionaryElements()) { 1424 SeededNumberDictionary* dictionary = js_obj->element_dictionary(); 1425 int length = dictionary->Capacity(); 1426 for (int i = 0; i < length; ++i) { 1427 Object* k = dictionary->KeyAt(i); 1428 if (dictionary->IsKey(k)) { 1429 ASSERT(k->IsNumber()); 1430 uint32_t index = static_cast<uint32_t>(k->Number()); 1431 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i)); 1432 } 1433 } 1434 } 1435 } 1436 1437 1438 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) { 1439 int length = js_obj->GetInternalFieldCount(); 1440 for (int i = 0; i < length; ++i) { 1441 Object* o = js_obj->GetInternalField(i); 1442 SetInternalReference( 1443 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i)); 1444 } 1445 } 1446 1447 1448 String* V8HeapExplorer::GetConstructorName(JSObject* object) { 1449 Heap* heap = object->GetHeap(); 1450 if (object->IsJSFunction()) return heap->closure_string(); 1451 String* constructor_name = object->constructor_name(); 1452 if (constructor_name == heap->Object_string()) { 1453 // Look up an immediate "constructor" property, if it is a function, 1454 // return its name. This is for instances of binding objects, which 1455 // have prototype constructor type "Object". 1456 Object* constructor_prop = NULL; 1457 LookupResult result(heap->isolate()); 1458 object->LocalLookupRealNamedProperty(heap->constructor_string(), &result); 1459 if (!result.IsFound()) return object->constructor_name(); 1460 1461 constructor_prop = result.GetLazyValue(); 1462 if (constructor_prop->IsJSFunction()) { 1463 Object* maybe_name = 1464 JSFunction::cast(constructor_prop)->shared()->name(); 1465 if (maybe_name->IsString()) { 1466 String* name = String::cast(maybe_name); 1467 if (name->length() > 0) return name; 1468 } 1469 } 1470 } 1471 return object->constructor_name(); 1472 } 1473 1474 1475 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) { 1476 if (!obj->IsHeapObject()) return NULL; 1477 return filler_->FindOrAddEntry(obj, this); 1478 } 1479 1480 1481 class RootsReferencesExtractor : public ObjectVisitor { 1482 private: 1483 struct IndexTag { 1484 IndexTag(int index, VisitorSynchronization::SyncTag tag) 1485 : index(index), tag(tag) { } 1486 int index; 1487 VisitorSynchronization::SyncTag tag; 1488 }; 1489 1490 public: 1491 RootsReferencesExtractor() 1492 : collecting_all_references_(false), 1493 previous_reference_count_(0) { 1494 } 1495 1496 void VisitPointers(Object** start, Object** end) { 1497 if (collecting_all_references_) { 1498 for (Object** p = start; p < end; p++) all_references_.Add(*p); 1499 } else { 1500 for (Object** p = start; p < end; p++) strong_references_.Add(*p); 1501 } 1502 } 1503 1504 void SetCollectingAllReferences() { collecting_all_references_ = true; } 1505 1506 void FillReferences(V8HeapExplorer* explorer) { 1507 ASSERT(strong_references_.length() <= all_references_.length()); 1508 for (int i = 0; i < reference_tags_.length(); ++i) { 1509 explorer->SetGcRootsReference(reference_tags_[i].tag); 1510 } 1511 int strong_index = 0, all_index = 0, tags_index = 0; 1512 while (all_index < all_references_.length()) { 1513 if (strong_index < strong_references_.length() && 1514 strong_references_[strong_index] == all_references_[all_index]) { 1515 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag, 1516 false, 1517 all_references_[all_index++]); 1518 ++strong_index; 1519 } else { 1520 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag, 1521 true, 1522 all_references_[all_index++]); 1523 } 1524 if (reference_tags_[tags_index].index == all_index) ++tags_index; 1525 } 1526 } 1527 1528 void Synchronize(VisitorSynchronization::SyncTag tag) { 1529 if (collecting_all_references_ && 1530 previous_reference_count_ != all_references_.length()) { 1531 previous_reference_count_ = all_references_.length(); 1532 reference_tags_.Add(IndexTag(previous_reference_count_, tag)); 1533 } 1534 } 1535 1536 private: 1537 bool collecting_all_references_; 1538 List<Object*> strong_references_; 1539 List<Object*> all_references_; 1540 int previous_reference_count_; 1541 List<IndexTag> reference_tags_; 1542 }; 1543 1544 1545 bool V8HeapExplorer::IterateAndExtractReferences( 1546 SnapshotFillerInterface* filler) { 1547 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); 1548 1549 filler_ = filler; 1550 bool interrupted = false; 1551 1552 // Heap iteration with filtering must be finished in any case. 1553 for (HeapObject* obj = iterator.next(); 1554 obj != NULL; 1555 obj = iterator.next(), progress_->ProgressStep()) { 1556 if (!interrupted) { 1557 ExtractReferences(obj); 1558 if (!progress_->ProgressReport(false)) interrupted = true; 1559 } 1560 } 1561 if (interrupted) { 1562 filler_ = NULL; 1563 return false; 1564 } 1565 1566 SetRootGcRootsReference(); 1567 RootsReferencesExtractor extractor; 1568 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG); 1569 extractor.SetCollectingAllReferences(); 1570 heap_->IterateRoots(&extractor, VISIT_ALL); 1571 extractor.FillReferences(this); 1572 filler_ = NULL; 1573 return progress_->ProgressReport(true); 1574 } 1575 1576 1577 bool V8HeapExplorer::IsEssentialObject(Object* object) { 1578 return object->IsHeapObject() 1579 && !object->IsOddball() 1580 && object != heap_->empty_byte_array() 1581 && object != heap_->empty_fixed_array() 1582 && object != heap_->empty_descriptor_array() 1583 && object != heap_->fixed_array_map() 1584 && object != heap_->cell_map() 1585 && object != heap_->global_property_cell_map() 1586 && object != heap_->shared_function_info_map() 1587 && object != heap_->free_space_map() 1588 && object != heap_->one_pointer_filler_map() 1589 && object != heap_->two_pointer_filler_map(); 1590 } 1591 1592 1593 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj, 1594 int parent_entry, 1595 String* reference_name, 1596 Object* child_obj, 1597 int field_offset) { 1598 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1599 HeapEntry* child_entry = GetEntry(child_obj); 1600 if (child_entry != NULL) { 1601 filler_->SetNamedReference(HeapGraphEdge::kContextVariable, 1602 parent_entry, 1603 collection_->names()->GetName(reference_name), 1604 child_entry); 1605 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 1606 } 1607 } 1608 1609 1610 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj, 1611 int parent_entry, 1612 const char* reference_name, 1613 Object* child_obj) { 1614 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1615 HeapEntry* child_entry = GetEntry(child_obj); 1616 if (child_entry != NULL) { 1617 filler_->SetNamedReference(HeapGraphEdge::kShortcut, 1618 parent_entry, 1619 reference_name, 1620 child_entry); 1621 } 1622 } 1623 1624 1625 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj, 1626 int parent_entry, 1627 int index, 1628 Object* child_obj) { 1629 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1630 HeapEntry* child_entry = GetEntry(child_obj); 1631 if (child_entry != NULL) { 1632 filler_->SetIndexedReference(HeapGraphEdge::kElement, 1633 parent_entry, 1634 index, 1635 child_entry); 1636 } 1637 } 1638 1639 1640 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, 1641 int parent_entry, 1642 const char* reference_name, 1643 Object* child_obj, 1644 int field_offset) { 1645 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1646 HeapEntry* child_entry = GetEntry(child_obj); 1647 if (child_entry == NULL) return; 1648 if (IsEssentialObject(child_obj)) { 1649 filler_->SetNamedReference(HeapGraphEdge::kInternal, 1650 parent_entry, 1651 reference_name, 1652 child_entry); 1653 } 1654 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 1655 } 1656 1657 1658 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj, 1659 int parent_entry, 1660 int index, 1661 Object* child_obj, 1662 int field_offset) { 1663 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1664 HeapEntry* child_entry = GetEntry(child_obj); 1665 if (child_entry == NULL) return; 1666 if (IsEssentialObject(child_obj)) { 1667 filler_->SetNamedReference(HeapGraphEdge::kInternal, 1668 parent_entry, 1669 collection_->names()->GetName(index), 1670 child_entry); 1671 } 1672 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 1673 } 1674 1675 1676 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj, 1677 int parent_entry, 1678 int index, 1679 Object* child_obj) { 1680 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1681 HeapEntry* child_entry = GetEntry(child_obj); 1682 if (child_entry != NULL && IsEssentialObject(child_obj)) { 1683 filler_->SetIndexedReference(HeapGraphEdge::kHidden, 1684 parent_entry, 1685 index, 1686 child_entry); 1687 } 1688 } 1689 1690 1691 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj, 1692 int parent_entry, 1693 int index, 1694 Object* child_obj, 1695 int field_offset) { 1696 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1697 HeapEntry* child_entry = GetEntry(child_obj); 1698 if (child_entry == NULL) return; 1699 if (IsEssentialObject(child_obj)) { 1700 filler_->SetIndexedReference(HeapGraphEdge::kWeak, 1701 parent_entry, 1702 index, 1703 child_entry); 1704 } 1705 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 1706 } 1707 1708 1709 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj, 1710 int parent_entry, 1711 Name* reference_name, 1712 Object* child_obj, 1713 const char* name_format_string, 1714 int field_offset) { 1715 ASSERT(parent_entry == GetEntry(parent_obj)->index()); 1716 HeapEntry* child_entry = GetEntry(child_obj); 1717 if (child_entry != NULL) { 1718 HeapGraphEdge::Type type = 1719 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0 1720 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal; 1721 const char* name = name_format_string != NULL && reference_name->IsString() 1722 ? collection_->names()->GetFormatted( 1723 name_format_string, 1724 *String::cast(reference_name)->ToCString( 1725 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)) : 1726 collection_->names()->GetName(reference_name); 1727 1728 filler_->SetNamedReference(type, 1729 parent_entry, 1730 name, 1731 child_entry); 1732 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset); 1733 } 1734 } 1735 1736 1737 void V8HeapExplorer::SetRootGcRootsReference() { 1738 filler_->SetIndexedAutoIndexReference( 1739 HeapGraphEdge::kElement, 1740 snapshot_->root()->index(), 1741 snapshot_->gc_roots()); 1742 } 1743 1744 1745 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) { 1746 HeapEntry* child_entry = GetEntry(child_obj); 1747 ASSERT(child_entry != NULL); 1748 filler_->SetNamedAutoIndexReference( 1749 HeapGraphEdge::kShortcut, 1750 snapshot_->root()->index(), 1751 child_entry); 1752 } 1753 1754 1755 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) { 1756 filler_->SetIndexedAutoIndexReference( 1757 HeapGraphEdge::kElement, 1758 snapshot_->gc_roots()->index(), 1759 snapshot_->gc_subroot(tag)); 1760 } 1761 1762 1763 void V8HeapExplorer::SetGcSubrootReference( 1764 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) { 1765 HeapEntry* child_entry = GetEntry(child_obj); 1766 if (child_entry != NULL) { 1767 const char* name = GetStrongGcSubrootName(child_obj); 1768 if (name != NULL) { 1769 filler_->SetNamedReference( 1770 HeapGraphEdge::kInternal, 1771 snapshot_->gc_subroot(tag)->index(), 1772 name, 1773 child_entry); 1774 } else { 1775 filler_->SetIndexedAutoIndexReference( 1776 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement, 1777 snapshot_->gc_subroot(tag)->index(), 1778 child_entry); 1779 } 1780 1781 // Add a shortcut to JS global object reference at snapshot root. 1782 if (child_obj->IsNativeContext()) { 1783 Context* context = Context::cast(child_obj); 1784 GlobalObject* global = context->global_object(); 1785 if (global->IsJSGlobalObject()) { 1786 bool is_debug_object = false; 1787 #ifdef ENABLE_DEBUGGER_SUPPORT 1788 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global); 1789 #endif 1790 if (!is_debug_object && !user_roots_.Contains(global)) { 1791 user_roots_.Insert(global); 1792 SetUserGlobalReference(global); 1793 } 1794 } 1795 } 1796 } 1797 } 1798 1799 1800 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) { 1801 if (strong_gc_subroot_names_.is_empty()) { 1802 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name); 1803 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name) 1804 STRONG_ROOT_LIST(ROOT_NAME) 1805 #undef ROOT_NAME 1806 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map) 1807 STRUCT_LIST(STRUCT_MAP_NAME) 1808 #undef STRUCT_MAP_NAME 1809 #define STRING_NAME(name, str) NAME_ENTRY(name) 1810 INTERNALIZED_STRING_LIST(STRING_NAME) 1811 #undef STRING_NAME 1812 #undef NAME_ENTRY 1813 CHECK(!strong_gc_subroot_names_.is_empty()); 1814 } 1815 return strong_gc_subroot_names_.GetTag(object); 1816 } 1817 1818 1819 void V8HeapExplorer::TagObject(Object* obj, const char* tag) { 1820 if (IsEssentialObject(obj)) { 1821 HeapEntry* entry = GetEntry(obj); 1822 if (entry->name()[0] == '\0') { 1823 entry->set_name(tag); 1824 } 1825 } 1826 } 1827 1828 1829 class GlobalObjectsEnumerator : public ObjectVisitor { 1830 public: 1831 virtual void VisitPointers(Object** start, Object** end) { 1832 for (Object** p = start; p < end; p++) { 1833 if ((*p)->IsNativeContext()) { 1834 Context* context = Context::cast(*p); 1835 JSObject* proxy = context->global_proxy(); 1836 if (proxy->IsJSGlobalProxy()) { 1837 Object* global = proxy->map()->prototype(); 1838 if (global->IsJSGlobalObject()) { 1839 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global))); 1840 } 1841 } 1842 } 1843 } 1844 } 1845 int count() { return objects_.length(); } 1846 Handle<JSGlobalObject>& at(int i) { return objects_[i]; } 1847 1848 private: 1849 List<Handle<JSGlobalObject> > objects_; 1850 }; 1851 1852 1853 // Modifies heap. Must not be run during heap traversal. 1854 void V8HeapExplorer::TagGlobalObjects() { 1855 Isolate* isolate = Isolate::Current(); 1856 HandleScope scope(isolate); 1857 GlobalObjectsEnumerator enumerator; 1858 isolate->global_handles()->IterateAllRoots(&enumerator); 1859 const char** urls = NewArray<const char*>(enumerator.count()); 1860 for (int i = 0, l = enumerator.count(); i < l; ++i) { 1861 if (global_object_name_resolver_) { 1862 HandleScope scope(isolate); 1863 Handle<JSGlobalObject> global_obj = enumerator.at(i); 1864 urls[i] = global_object_name_resolver_->GetName( 1865 Utils::ToLocal(Handle<JSObject>::cast(global_obj))); 1866 } else { 1867 urls[i] = NULL; 1868 } 1869 } 1870 1871 DisallowHeapAllocation no_allocation; 1872 for (int i = 0, l = enumerator.count(); i < l; ++i) { 1873 objects_tags_.SetTag(*enumerator.at(i), urls[i]); 1874 } 1875 1876 DeleteArray(urls); 1877 } 1878 1879 1880 class GlobalHandlesExtractor : public ObjectVisitor { 1881 public: 1882 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer) 1883 : explorer_(explorer) {} 1884 virtual ~GlobalHandlesExtractor() {} 1885 virtual void VisitPointers(Object** start, Object** end) { 1886 UNREACHABLE(); 1887 } 1888 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) { 1889 explorer_->VisitSubtreeWrapper(p, class_id); 1890 } 1891 private: 1892 NativeObjectsExplorer* explorer_; 1893 }; 1894 1895 1896 class BasicHeapEntriesAllocator : public HeapEntriesAllocator { 1897 public: 1898 BasicHeapEntriesAllocator( 1899 HeapSnapshot* snapshot, 1900 HeapEntry::Type entries_type) 1901 : snapshot_(snapshot), 1902 collection_(snapshot_->collection()), 1903 entries_type_(entries_type) { 1904 } 1905 virtual HeapEntry* AllocateEntry(HeapThing ptr); 1906 private: 1907 HeapSnapshot* snapshot_; 1908 HeapSnapshotsCollection* collection_; 1909 HeapEntry::Type entries_type_; 1910 }; 1911 1912 1913 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) { 1914 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr); 1915 intptr_t elements = info->GetElementCount(); 1916 intptr_t size = info->GetSizeInBytes(); 1917 const char* name = elements != -1 1918 ? collection_->names()->GetFormatted( 1919 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements) 1920 : collection_->names()->GetCopy(info->GetLabel()); 1921 return snapshot_->AddEntry( 1922 entries_type_, 1923 name, 1924 HeapObjectsMap::GenerateId(info), 1925 size != -1 ? static_cast<int>(size) : 0); 1926 } 1927 1928 1929 NativeObjectsExplorer::NativeObjectsExplorer( 1930 HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress) 1931 : snapshot_(snapshot), 1932 collection_(snapshot_->collection()), 1933 progress_(progress), 1934 embedder_queried_(false), 1935 objects_by_info_(RetainedInfosMatch), 1936 native_groups_(StringsMatch), 1937 filler_(NULL) { 1938 synthetic_entries_allocator_ = 1939 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic); 1940 native_entries_allocator_ = 1941 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative); 1942 } 1943 1944 1945 NativeObjectsExplorer::~NativeObjectsExplorer() { 1946 for (HashMap::Entry* p = objects_by_info_.Start(); 1947 p != NULL; 1948 p = objects_by_info_.Next(p)) { 1949 v8::RetainedObjectInfo* info = 1950 reinterpret_cast<v8::RetainedObjectInfo*>(p->key); 1951 info->Dispose(); 1952 List<HeapObject*>* objects = 1953 reinterpret_cast<List<HeapObject*>* >(p->value); 1954 delete objects; 1955 } 1956 for (HashMap::Entry* p = native_groups_.Start(); 1957 p != NULL; 1958 p = native_groups_.Next(p)) { 1959 v8::RetainedObjectInfo* info = 1960 reinterpret_cast<v8::RetainedObjectInfo*>(p->value); 1961 info->Dispose(); 1962 } 1963 delete synthetic_entries_allocator_; 1964 delete native_entries_allocator_; 1965 } 1966 1967 1968 int NativeObjectsExplorer::EstimateObjectsCount() { 1969 FillRetainedObjects(); 1970 return objects_by_info_.occupancy(); 1971 } 1972 1973 1974 void NativeObjectsExplorer::FillRetainedObjects() { 1975 if (embedder_queried_) return; 1976 Isolate* isolate = Isolate::Current(); 1977 const GCType major_gc_type = kGCTypeMarkSweepCompact; 1978 // Record objects that are joined into ObjectGroups. 1979 isolate->heap()->CallGCPrologueCallbacks( 1980 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos); 1981 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups(); 1982 for (int i = 0; i < groups->length(); ++i) { 1983 ObjectGroup* group = groups->at(i); 1984 if (group->info == NULL) continue; 1985 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info); 1986 for (size_t j = 0; j < group->length; ++j) { 1987 HeapObject* obj = HeapObject::cast(*group->objects[j]); 1988 list->Add(obj); 1989 in_groups_.Insert(obj); 1990 } 1991 group->info = NULL; // Acquire info object ownership. 1992 } 1993 isolate->global_handles()->RemoveObjectGroups(); 1994 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type); 1995 // Record objects that are not in ObjectGroups, but have class ID. 1996 GlobalHandlesExtractor extractor(this); 1997 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor); 1998 embedder_queried_ = true; 1999 } 2000 2001 2002 void NativeObjectsExplorer::FillImplicitReferences() { 2003 Isolate* isolate = Isolate::Current(); 2004 List<ImplicitRefGroup*>* groups = 2005 isolate->global_handles()->implicit_ref_groups(); 2006 for (int i = 0; i < groups->length(); ++i) { 2007 ImplicitRefGroup* group = groups->at(i); 2008 HeapObject* parent = *group->parent; 2009 int parent_entry = 2010 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index(); 2011 ASSERT(parent_entry != HeapEntry::kNoEntry); 2012 Object*** children = group->children; 2013 for (size_t j = 0; j < group->length; ++j) { 2014 Object* child = *children[j]; 2015 HeapEntry* child_entry = 2016 filler_->FindOrAddEntry(child, native_entries_allocator_); 2017 filler_->SetNamedReference( 2018 HeapGraphEdge::kInternal, 2019 parent_entry, 2020 "native", 2021 child_entry); 2022 } 2023 } 2024 isolate->global_handles()->RemoveImplicitRefGroups(); 2025 } 2026 2027 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo( 2028 v8::RetainedObjectInfo* info) { 2029 HashMap::Entry* entry = 2030 objects_by_info_.Lookup(info, InfoHash(info), true); 2031 if (entry->value != NULL) { 2032 info->Dispose(); 2033 } else { 2034 entry->value = new List<HeapObject*>(4); 2035 } 2036 return reinterpret_cast<List<HeapObject*>* >(entry->value); 2037 } 2038 2039 2040 bool NativeObjectsExplorer::IterateAndExtractReferences( 2041 SnapshotFillerInterface* filler) { 2042 filler_ = filler; 2043 FillRetainedObjects(); 2044 FillImplicitReferences(); 2045 if (EstimateObjectsCount() > 0) { 2046 for (HashMap::Entry* p = objects_by_info_.Start(); 2047 p != NULL; 2048 p = objects_by_info_.Next(p)) { 2049 v8::RetainedObjectInfo* info = 2050 reinterpret_cast<v8::RetainedObjectInfo*>(p->key); 2051 SetNativeRootReference(info); 2052 List<HeapObject*>* objects = 2053 reinterpret_cast<List<HeapObject*>* >(p->value); 2054 for (int i = 0; i < objects->length(); ++i) { 2055 SetWrapperNativeReferences(objects->at(i), info); 2056 } 2057 } 2058 SetRootNativeRootsReference(); 2059 } 2060 filler_ = NULL; 2061 return true; 2062 } 2063 2064 2065 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo { 2066 public: 2067 explicit NativeGroupRetainedObjectInfo(const char* label) 2068 : disposed_(false), 2069 hash_(reinterpret_cast<intptr_t>(label)), 2070 label_(label) { 2071 } 2072 2073 virtual ~NativeGroupRetainedObjectInfo() {} 2074 virtual void Dispose() { 2075 CHECK(!disposed_); 2076 disposed_ = true; 2077 delete this; 2078 } 2079 virtual bool IsEquivalent(RetainedObjectInfo* other) { 2080 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel()); 2081 } 2082 virtual intptr_t GetHash() { return hash_; } 2083 virtual const char* GetLabel() { return label_; } 2084 2085 private: 2086 bool disposed_; 2087 intptr_t hash_; 2088 const char* label_; 2089 }; 2090 2091 2092 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo( 2093 const char* label) { 2094 const char* label_copy = collection_->names()->GetCopy(label); 2095 uint32_t hash = StringHasher::HashSequentialString( 2096 label_copy, 2097 static_cast<int>(strlen(label_copy)), 2098 HEAP->HashSeed()); 2099 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy), 2100 hash, true); 2101 if (entry->value == NULL) { 2102 entry->value = new NativeGroupRetainedObjectInfo(label); 2103 } 2104 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value); 2105 } 2106 2107 2108 void NativeObjectsExplorer::SetNativeRootReference( 2109 v8::RetainedObjectInfo* info) { 2110 HeapEntry* child_entry = 2111 filler_->FindOrAddEntry(info, native_entries_allocator_); 2112 ASSERT(child_entry != NULL); 2113 NativeGroupRetainedObjectInfo* group_info = 2114 FindOrAddGroupInfo(info->GetGroupLabel()); 2115 HeapEntry* group_entry = 2116 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_); 2117 filler_->SetNamedAutoIndexReference( 2118 HeapGraphEdge::kInternal, 2119 group_entry->index(), 2120 child_entry); 2121 } 2122 2123 2124 void NativeObjectsExplorer::SetWrapperNativeReferences( 2125 HeapObject* wrapper, v8::RetainedObjectInfo* info) { 2126 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper); 2127 ASSERT(wrapper_entry != NULL); 2128 HeapEntry* info_entry = 2129 filler_->FindOrAddEntry(info, native_entries_allocator_); 2130 ASSERT(info_entry != NULL); 2131 filler_->SetNamedReference(HeapGraphEdge::kInternal, 2132 wrapper_entry->index(), 2133 "native", 2134 info_entry); 2135 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, 2136 info_entry->index(), 2137 wrapper_entry); 2138 } 2139 2140 2141 void NativeObjectsExplorer::SetRootNativeRootsReference() { 2142 for (HashMap::Entry* entry = native_groups_.Start(); 2143 entry; 2144 entry = native_groups_.Next(entry)) { 2145 NativeGroupRetainedObjectInfo* group_info = 2146 static_cast<NativeGroupRetainedObjectInfo*>(entry->value); 2147 HeapEntry* group_entry = 2148 filler_->FindOrAddEntry(group_info, native_entries_allocator_); 2149 ASSERT(group_entry != NULL); 2150 filler_->SetIndexedAutoIndexReference( 2151 HeapGraphEdge::kElement, 2152 snapshot_->root()->index(), 2153 group_entry); 2154 } 2155 } 2156 2157 2158 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) { 2159 if (in_groups_.Contains(*p)) return; 2160 Isolate* isolate = Isolate::Current(); 2161 v8::RetainedObjectInfo* info = 2162 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p); 2163 if (info == NULL) return; 2164 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p)); 2165 } 2166 2167 2168 class SnapshotFiller : public SnapshotFillerInterface { 2169 public: 2170 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries) 2171 : snapshot_(snapshot), 2172 collection_(snapshot->collection()), 2173 entries_(entries) { } 2174 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) { 2175 HeapEntry* entry = allocator->AllocateEntry(ptr); 2176 entries_->Pair(ptr, entry->index()); 2177 return entry; 2178 } 2179 HeapEntry* FindEntry(HeapThing ptr) { 2180 int index = entries_->Map(ptr); 2181 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL; 2182 } 2183 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) { 2184 HeapEntry* entry = FindEntry(ptr); 2185 return entry != NULL ? entry : AddEntry(ptr, allocator); 2186 } 2187 void SetIndexedReference(HeapGraphEdge::Type type, 2188 int parent, 2189 int index, 2190 HeapEntry* child_entry) { 2191 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 2192 parent_entry->SetIndexedReference(type, index, child_entry); 2193 } 2194 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, 2195 int parent, 2196 HeapEntry* child_entry) { 2197 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 2198 int index = parent_entry->children_count() + 1; 2199 parent_entry->SetIndexedReference(type, index, child_entry); 2200 } 2201 void SetNamedReference(HeapGraphEdge::Type type, 2202 int parent, 2203 const char* reference_name, 2204 HeapEntry* child_entry) { 2205 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 2206 parent_entry->SetNamedReference(type, reference_name, child_entry); 2207 } 2208 void SetNamedAutoIndexReference(HeapGraphEdge::Type type, 2209 int parent, 2210 HeapEntry* child_entry) { 2211 HeapEntry* parent_entry = &snapshot_->entries()[parent]; 2212 int index = parent_entry->children_count() + 1; 2213 parent_entry->SetNamedReference( 2214 type, 2215 collection_->names()->GetName(index), 2216 child_entry); 2217 } 2218 2219 private: 2220 HeapSnapshot* snapshot_; 2221 HeapSnapshotsCollection* collection_; 2222 HeapEntriesMap* entries_; 2223 }; 2224 2225 2226 HeapSnapshotGenerator::HeapSnapshotGenerator( 2227 HeapSnapshot* snapshot, 2228 v8::ActivityControl* control, 2229 v8::HeapProfiler::ObjectNameResolver* resolver, 2230 Heap* heap) 2231 : snapshot_(snapshot), 2232 control_(control), 2233 v8_heap_explorer_(snapshot_, this, resolver), 2234 dom_explorer_(snapshot_, this), 2235 heap_(heap) { 2236 } 2237 2238 2239 bool HeapSnapshotGenerator::GenerateSnapshot() { 2240 v8_heap_explorer_.TagGlobalObjects(); 2241 2242 // TODO(1562) Profiler assumes that any object that is in the heap after 2243 // full GC is reachable from the root when computing dominators. 2244 // This is not true for weakly reachable objects. 2245 // As a temporary solution we call GC twice. 2246 Isolate::Current()->heap()->CollectAllGarbage( 2247 Heap::kMakeHeapIterableMask, 2248 "HeapSnapshotGenerator::GenerateSnapshot"); 2249 Isolate::Current()->heap()->CollectAllGarbage( 2250 Heap::kMakeHeapIterableMask, 2251 "HeapSnapshotGenerator::GenerateSnapshot"); 2252 2253 #ifdef VERIFY_HEAP 2254 Heap* debug_heap = Isolate::Current()->heap(); 2255 CHECK(!debug_heap->old_data_space()->was_swept_conservatively()); 2256 CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively()); 2257 CHECK(!debug_heap->code_space()->was_swept_conservatively()); 2258 CHECK(!debug_heap->cell_space()->was_swept_conservatively()); 2259 CHECK(!debug_heap->property_cell_space()-> 2260 was_swept_conservatively()); 2261 CHECK(!debug_heap->map_space()->was_swept_conservatively()); 2262 #endif 2263 2264 // The following code uses heap iterators, so we want the heap to be 2265 // stable. It should follow TagGlobalObjects as that can allocate. 2266 DisallowHeapAllocation no_alloc; 2267 2268 #ifdef VERIFY_HEAP 2269 debug_heap->Verify(); 2270 #endif 2271 2272 SetProgressTotal(1); // 1 pass. 2273 2274 #ifdef VERIFY_HEAP 2275 debug_heap->Verify(); 2276 #endif 2277 2278 if (!FillReferences()) return false; 2279 2280 snapshot_->FillChildren(); 2281 snapshot_->RememberLastJSObjectId(); 2282 2283 progress_counter_ = progress_total_; 2284 if (!ProgressReport(true)) return false; 2285 return true; 2286 } 2287 2288 2289 void HeapSnapshotGenerator::ProgressStep() { 2290 ++progress_counter_; 2291 } 2292 2293 2294 bool HeapSnapshotGenerator::ProgressReport(bool force) { 2295 const int kProgressReportGranularity = 10000; 2296 if (control_ != NULL 2297 && (force || progress_counter_ % kProgressReportGranularity == 0)) { 2298 return 2299 control_->ReportProgressValue(progress_counter_, progress_total_) == 2300 v8::ActivityControl::kContinue; 2301 } 2302 return true; 2303 } 2304 2305 2306 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) { 2307 if (control_ == NULL) return; 2308 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable); 2309 progress_total_ = iterations_count * ( 2310 v8_heap_explorer_.EstimateObjectsCount(&iterator) + 2311 dom_explorer_.EstimateObjectsCount()); 2312 progress_counter_ = 0; 2313 } 2314 2315 2316 bool HeapSnapshotGenerator::FillReferences() { 2317 SnapshotFiller filler(snapshot_, &entries_); 2318 v8_heap_explorer_.AddRootEntries(&filler); 2319 return v8_heap_explorer_.IterateAndExtractReferences(&filler) 2320 && dom_explorer_.IterateAndExtractReferences(&filler); 2321 } 2322 2323 2324 template<int bytes> struct MaxDecimalDigitsIn; 2325 template<> struct MaxDecimalDigitsIn<4> { 2326 static const int kSigned = 11; 2327 static const int kUnsigned = 10; 2328 }; 2329 template<> struct MaxDecimalDigitsIn<8> { 2330 static const int kSigned = 20; 2331 static const int kUnsigned = 20; 2332 }; 2333 2334 2335 class OutputStreamWriter { 2336 public: 2337 explicit OutputStreamWriter(v8::OutputStream* stream) 2338 : stream_(stream), 2339 chunk_size_(stream->GetChunkSize()), 2340 chunk_(chunk_size_), 2341 chunk_pos_(0), 2342 aborted_(false) { 2343 ASSERT(chunk_size_ > 0); 2344 } 2345 bool aborted() { return aborted_; } 2346 void AddCharacter(char c) { 2347 ASSERT(c != '\0'); 2348 ASSERT(chunk_pos_ < chunk_size_); 2349 chunk_[chunk_pos_++] = c; 2350 MaybeWriteChunk(); 2351 } 2352 void AddString(const char* s) { 2353 AddSubstring(s, StrLength(s)); 2354 } 2355 void AddSubstring(const char* s, int n) { 2356 if (n <= 0) return; 2357 ASSERT(static_cast<size_t>(n) <= strlen(s)); 2358 const char* s_end = s + n; 2359 while (s < s_end) { 2360 int s_chunk_size = Min( 2361 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s)); 2362 ASSERT(s_chunk_size > 0); 2363 OS::MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size); 2364 s += s_chunk_size; 2365 chunk_pos_ += s_chunk_size; 2366 MaybeWriteChunk(); 2367 } 2368 } 2369 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); } 2370 void Finalize() { 2371 if (aborted_) return; 2372 ASSERT(chunk_pos_ < chunk_size_); 2373 if (chunk_pos_ != 0) { 2374 WriteChunk(); 2375 } 2376 stream_->EndOfStream(); 2377 } 2378 2379 private: 2380 template<typename T> 2381 void AddNumberImpl(T n, const char* format) { 2382 // Buffer for the longest value plus trailing \0 2383 static const int kMaxNumberSize = 2384 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1; 2385 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) { 2386 int result = OS::SNPrintF( 2387 chunk_.SubVector(chunk_pos_, chunk_size_), format, n); 2388 ASSERT(result != -1); 2389 chunk_pos_ += result; 2390 MaybeWriteChunk(); 2391 } else { 2392 EmbeddedVector<char, kMaxNumberSize> buffer; 2393 int result = OS::SNPrintF(buffer, format, n); 2394 USE(result); 2395 ASSERT(result != -1); 2396 AddString(buffer.start()); 2397 } 2398 } 2399 void MaybeWriteChunk() { 2400 ASSERT(chunk_pos_ <= chunk_size_); 2401 if (chunk_pos_ == chunk_size_) { 2402 WriteChunk(); 2403 } 2404 } 2405 void WriteChunk() { 2406 if (aborted_) return; 2407 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) == 2408 v8::OutputStream::kAbort) aborted_ = true; 2409 chunk_pos_ = 0; 2410 } 2411 2412 v8::OutputStream* stream_; 2413 int chunk_size_; 2414 ScopedVector<char> chunk_; 2415 int chunk_pos_; 2416 bool aborted_; 2417 }; 2418 2419 2420 // type, name|index, to_node. 2421 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3; 2422 // type, name, id, self_size, children_index. 2423 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5; 2424 2425 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) { 2426 ASSERT(writer_ == NULL); 2427 writer_ = new OutputStreamWriter(stream); 2428 SerializeImpl(); 2429 delete writer_; 2430 writer_ = NULL; 2431 } 2432 2433 2434 void HeapSnapshotJSONSerializer::SerializeImpl() { 2435 ASSERT(0 == snapshot_->root()->index()); 2436 writer_->AddCharacter('{'); 2437 writer_->AddString("\"snapshot\":{"); 2438 SerializeSnapshot(); 2439 if (writer_->aborted()) return; 2440 writer_->AddString("},\n"); 2441 writer_->AddString("\"nodes\":["); 2442 SerializeNodes(); 2443 if (writer_->aborted()) return; 2444 writer_->AddString("],\n"); 2445 writer_->AddString("\"edges\":["); 2446 SerializeEdges(); 2447 if (writer_->aborted()) return; 2448 writer_->AddString("],\n"); 2449 writer_->AddString("\"strings\":["); 2450 SerializeStrings(); 2451 if (writer_->aborted()) return; 2452 writer_->AddCharacter(']'); 2453 writer_->AddCharacter('}'); 2454 writer_->Finalize(); 2455 } 2456 2457 2458 int HeapSnapshotJSONSerializer::GetStringId(const char* s) { 2459 HashMap::Entry* cache_entry = strings_.Lookup( 2460 const_cast<char*>(s), ObjectHash(s), true); 2461 if (cache_entry->value == NULL) { 2462 cache_entry->value = reinterpret_cast<void*>(next_string_id_++); 2463 } 2464 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value)); 2465 } 2466 2467 2468 static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) { 2469 int number_of_digits = 0; 2470 unsigned t = value; 2471 do { 2472 ++number_of_digits; 2473 } while (t /= 10); 2474 2475 buffer_pos += number_of_digits; 2476 int result = buffer_pos; 2477 do { 2478 int last_digit = value % 10; 2479 buffer[--buffer_pos] = '0' + last_digit; 2480 value /= 10; 2481 } while (value); 2482 return result; 2483 } 2484 2485 2486 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge, 2487 bool first_edge) { 2488 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0 2489 static const int kBufferSize = 2490 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT 2491 EmbeddedVector<char, kBufferSize> buffer; 2492 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement 2493 || edge->type() == HeapGraphEdge::kHidden 2494 || edge->type() == HeapGraphEdge::kWeak 2495 ? edge->index() : GetStringId(edge->name()); 2496 int buffer_pos = 0; 2497 if (!first_edge) { 2498 buffer[buffer_pos++] = ','; 2499 } 2500 buffer_pos = utoa(edge->type(), buffer, buffer_pos); 2501 buffer[buffer_pos++] = ','; 2502 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos); 2503 buffer[buffer_pos++] = ','; 2504 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos); 2505 buffer[buffer_pos++] = '\n'; 2506 buffer[buffer_pos++] = '\0'; 2507 writer_->AddString(buffer.start()); 2508 } 2509 2510 2511 void HeapSnapshotJSONSerializer::SerializeEdges() { 2512 List<HeapGraphEdge*>& edges = snapshot_->children(); 2513 for (int i = 0; i < edges.length(); ++i) { 2514 ASSERT(i == 0 || 2515 edges[i - 1]->from()->index() <= edges[i]->from()->index()); 2516 SerializeEdge(edges[i], i == 0); 2517 if (writer_->aborted()) return; 2518 } 2519 } 2520 2521 2522 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) { 2523 // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0 2524 static const int kBufferSize = 2525 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT 2526 + 5 + 1 + 1; 2527 EmbeddedVector<char, kBufferSize> buffer; 2528 int buffer_pos = 0; 2529 if (entry_index(entry) != 0) { 2530 buffer[buffer_pos++] = ','; 2531 } 2532 buffer_pos = utoa(entry->type(), buffer, buffer_pos); 2533 buffer[buffer_pos++] = ','; 2534 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos); 2535 buffer[buffer_pos++] = ','; 2536 buffer_pos = utoa(entry->id(), buffer, buffer_pos); 2537 buffer[buffer_pos++] = ','; 2538 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos); 2539 buffer[buffer_pos++] = ','; 2540 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos); 2541 buffer[buffer_pos++] = '\n'; 2542 buffer[buffer_pos++] = '\0'; 2543 writer_->AddString(buffer.start()); 2544 } 2545 2546 2547 void HeapSnapshotJSONSerializer::SerializeNodes() { 2548 List<HeapEntry>& entries = snapshot_->entries(); 2549 for (int i = 0; i < entries.length(); ++i) { 2550 SerializeNode(&entries[i]); 2551 if (writer_->aborted()) return; 2552 } 2553 } 2554 2555 2556 void HeapSnapshotJSONSerializer::SerializeSnapshot() { 2557 writer_->AddString("\"title\":\""); 2558 writer_->AddString(snapshot_->title()); 2559 writer_->AddString("\""); 2560 writer_->AddString(",\"uid\":"); 2561 writer_->AddNumber(snapshot_->uid()); 2562 writer_->AddString(",\"meta\":"); 2563 // The object describing node serialization layout. 2564 // We use a set of macros to improve readability. 2565 #define JSON_A(s) "[" s "]" 2566 #define JSON_O(s) "{" s "}" 2567 #define JSON_S(s) "\"" s "\"" 2568 writer_->AddString(JSON_O( 2569 JSON_S("node_fields") ":" JSON_A( 2570 JSON_S("type") "," 2571 JSON_S("name") "," 2572 JSON_S("id") "," 2573 JSON_S("self_size") "," 2574 JSON_S("edge_count")) "," 2575 JSON_S("node_types") ":" JSON_A( 2576 JSON_A( 2577 JSON_S("hidden") "," 2578 JSON_S("array") "," 2579 JSON_S("string") "," 2580 JSON_S("object") "," 2581 JSON_S("code") "," 2582 JSON_S("closure") "," 2583 JSON_S("regexp") "," 2584 JSON_S("number") "," 2585 JSON_S("native") "," 2586 JSON_S("synthetic")) "," 2587 JSON_S("string") "," 2588 JSON_S("number") "," 2589 JSON_S("number") "," 2590 JSON_S("number") "," 2591 JSON_S("number") "," 2592 JSON_S("number")) "," 2593 JSON_S("edge_fields") ":" JSON_A( 2594 JSON_S("type") "," 2595 JSON_S("name_or_index") "," 2596 JSON_S("to_node")) "," 2597 JSON_S("edge_types") ":" JSON_A( 2598 JSON_A( 2599 JSON_S("context") "," 2600 JSON_S("element") "," 2601 JSON_S("property") "," 2602 JSON_S("internal") "," 2603 JSON_S("hidden") "," 2604 JSON_S("shortcut") "," 2605 JSON_S("weak")) "," 2606 JSON_S("string_or_number") "," 2607 JSON_S("node")))); 2608 #undef JSON_S 2609 #undef JSON_O 2610 #undef JSON_A 2611 writer_->AddString(",\"node_count\":"); 2612 writer_->AddNumber(snapshot_->entries().length()); 2613 writer_->AddString(",\"edge_count\":"); 2614 writer_->AddNumber(snapshot_->edges().length()); 2615 } 2616 2617 2618 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) { 2619 static const char hex_chars[] = "0123456789ABCDEF"; 2620 w->AddString("\\u"); 2621 w->AddCharacter(hex_chars[(u >> 12) & 0xf]); 2622 w->AddCharacter(hex_chars[(u >> 8) & 0xf]); 2623 w->AddCharacter(hex_chars[(u >> 4) & 0xf]); 2624 w->AddCharacter(hex_chars[u & 0xf]); 2625 } 2626 2627 2628 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) { 2629 writer_->AddCharacter('\n'); 2630 writer_->AddCharacter('\"'); 2631 for ( ; *s != '\0'; ++s) { 2632 switch (*s) { 2633 case '\b': 2634 writer_->AddString("\\b"); 2635 continue; 2636 case '\f': 2637 writer_->AddString("\\f"); 2638 continue; 2639 case '\n': 2640 writer_->AddString("\\n"); 2641 continue; 2642 case '\r': 2643 writer_->AddString("\\r"); 2644 continue; 2645 case '\t': 2646 writer_->AddString("\\t"); 2647 continue; 2648 case '\"': 2649 case '\\': 2650 writer_->AddCharacter('\\'); 2651 writer_->AddCharacter(*s); 2652 continue; 2653 default: 2654 if (*s > 31 && *s < 128) { 2655 writer_->AddCharacter(*s); 2656 } else if (*s <= 31) { 2657 // Special character with no dedicated literal. 2658 WriteUChar(writer_, *s); 2659 } else { 2660 // Convert UTF-8 into \u UTF-16 literal. 2661 unsigned length = 1, cursor = 0; 2662 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { } 2663 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor); 2664 if (c != unibrow::Utf8::kBadChar) { 2665 WriteUChar(writer_, c); 2666 ASSERT(cursor != 0); 2667 s += cursor - 1; 2668 } else { 2669 writer_->AddCharacter('?'); 2670 } 2671 } 2672 } 2673 } 2674 writer_->AddCharacter('\"'); 2675 } 2676 2677 2678 void HeapSnapshotJSONSerializer::SerializeStrings() { 2679 List<HashMap::Entry*> sorted_strings; 2680 SortHashMap(&strings_, &sorted_strings); 2681 writer_->AddString("\"<dummy>\""); 2682 for (int i = 0; i < sorted_strings.length(); ++i) { 2683 writer_->AddCharacter(','); 2684 SerializeString( 2685 reinterpret_cast<const unsigned char*>(sorted_strings[i]->key)); 2686 if (writer_->aborted()) return; 2687 } 2688 } 2689 2690 2691 template<typename T> 2692 inline static int SortUsingEntryValue(const T* x, const T* y) { 2693 uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value); 2694 uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value); 2695 if (x_uint > y_uint) { 2696 return 1; 2697 } else if (x_uint == y_uint) { 2698 return 0; 2699 } else { 2700 return -1; 2701 } 2702 } 2703 2704 2705 void HeapSnapshotJSONSerializer::SortHashMap( 2706 HashMap* map, List<HashMap::Entry*>* sorted_entries) { 2707 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) 2708 sorted_entries->Add(p); 2709 sorted_entries->Sort(SortUsingEntryValue); 2710 } 2711 2712 } } // namespace v8::internal 2713