1 // Copyright 2011 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_PROFILE_GENERATOR_H_ 29 #define V8_PROFILE_GENERATOR_H_ 30 31 #include "allocation.h" 32 #include "hashmap.h" 33 #include "../include/v8-profiler.h" 34 35 namespace v8 { 36 namespace internal { 37 38 typedef uint32_t SnapshotObjectId; 39 40 class TokenEnumerator { 41 public: 42 TokenEnumerator(); 43 ~TokenEnumerator(); 44 int GetTokenId(Object* token); 45 46 static const int kNoSecurityToken = -1; 47 static const int kInheritsSecurityToken = -2; 48 49 private: 50 static void TokenRemovedCallback(v8::Persistent<v8::Value> handle, 51 void* parameter); 52 void TokenRemoved(Object** token_location); 53 54 List<Object**> token_locations_; 55 List<bool> token_removed_; 56 57 friend class TokenEnumeratorTester; 58 59 DISALLOW_COPY_AND_ASSIGN(TokenEnumerator); 60 }; 61 62 63 // Provides a storage of strings allocated in C++ heap, to hold them 64 // forever, even if they disappear from JS heap or external storage. 65 class StringsStorage { 66 public: 67 StringsStorage(); 68 ~StringsStorage(); 69 70 const char* GetCopy(const char* src); 71 const char* GetFormatted(const char* format, ...); 72 const char* GetVFormatted(const char* format, va_list args); 73 const char* GetName(String* name); 74 const char* GetName(int index); 75 inline const char* GetFunctionName(String* name); 76 inline const char* GetFunctionName(const char* name); 77 78 private: 79 static const int kMaxNameSize = 1024; 80 81 INLINE(static bool StringsMatch(void* key1, void* key2)) { 82 return strcmp(reinterpret_cast<char*>(key1), 83 reinterpret_cast<char*>(key2)) == 0; 84 } 85 const char* AddOrDisposeString(char* str, uint32_t hash); 86 87 // Mapping of strings by String::Hash to const char* strings. 88 HashMap names_; 89 90 DISALLOW_COPY_AND_ASSIGN(StringsStorage); 91 }; 92 93 94 class CodeEntry { 95 public: 96 // CodeEntry doesn't own name strings, just references them. 97 INLINE(CodeEntry(Logger::LogEventsAndTags tag, 98 const char* name_prefix, 99 const char* name, 100 const char* resource_name, 101 int line_number, 102 int security_token_id)); 103 104 INLINE(bool is_js_function() const) { return is_js_function_tag(tag_); } 105 INLINE(const char* name_prefix() const) { return name_prefix_; } 106 INLINE(bool has_name_prefix() const) { return name_prefix_[0] != '\0'; } 107 INLINE(const char* name() const) { return name_; } 108 INLINE(const char* resource_name() const) { return resource_name_; } 109 INLINE(int line_number() const) { return line_number_; } 110 INLINE(int shared_id() const) { return shared_id_; } 111 INLINE(void set_shared_id(int shared_id)) { shared_id_ = shared_id; } 112 INLINE(int security_token_id() const) { return security_token_id_; } 113 114 INLINE(static bool is_js_function_tag(Logger::LogEventsAndTags tag)); 115 116 void CopyData(const CodeEntry& source); 117 uint32_t GetCallUid() const; 118 bool IsSameAs(CodeEntry* entry) const; 119 120 static const char* const kEmptyNamePrefix; 121 122 private: 123 Logger::LogEventsAndTags tag_; 124 const char* name_prefix_; 125 const char* name_; 126 const char* resource_name_; 127 int line_number_; 128 int shared_id_; 129 int security_token_id_; 130 131 DISALLOW_COPY_AND_ASSIGN(CodeEntry); 132 }; 133 134 135 class ProfileTree; 136 137 class ProfileNode { 138 public: 139 INLINE(ProfileNode(ProfileTree* tree, CodeEntry* entry)); 140 141 ProfileNode* FindChild(CodeEntry* entry); 142 ProfileNode* FindOrAddChild(CodeEntry* entry); 143 INLINE(void IncrementSelfTicks()) { ++self_ticks_; } 144 INLINE(void IncreaseSelfTicks(unsigned amount)) { self_ticks_ += amount; } 145 INLINE(void IncreaseTotalTicks(unsigned amount)) { total_ticks_ += amount; } 146 147 INLINE(CodeEntry* entry() const) { return entry_; } 148 INLINE(unsigned self_ticks() const) { return self_ticks_; } 149 INLINE(unsigned total_ticks() const) { return total_ticks_; } 150 INLINE(const List<ProfileNode*>* children() const) { return &children_list_; } 151 double GetSelfMillis() const; 152 double GetTotalMillis() const; 153 154 void Print(int indent); 155 156 private: 157 INLINE(static bool CodeEntriesMatch(void* entry1, void* entry2)) { 158 return reinterpret_cast<CodeEntry*>(entry1)->IsSameAs( 159 reinterpret_cast<CodeEntry*>(entry2)); 160 } 161 162 INLINE(static uint32_t CodeEntryHash(CodeEntry* entry)) { 163 return entry->GetCallUid(); 164 } 165 166 ProfileTree* tree_; 167 CodeEntry* entry_; 168 unsigned total_ticks_; 169 unsigned self_ticks_; 170 // Mapping from CodeEntry* to ProfileNode* 171 HashMap children_; 172 List<ProfileNode*> children_list_; 173 174 DISALLOW_COPY_AND_ASSIGN(ProfileNode); 175 }; 176 177 178 class ProfileTree { 179 public: 180 ProfileTree(); 181 ~ProfileTree(); 182 183 void AddPathFromEnd(const Vector<CodeEntry*>& path); 184 void AddPathFromStart(const Vector<CodeEntry*>& path); 185 void CalculateTotalTicks(); 186 void FilteredClone(ProfileTree* src, int security_token_id); 187 188 double TicksToMillis(unsigned ticks) const { 189 return ticks * ms_to_ticks_scale_; 190 } 191 ProfileNode* root() const { return root_; } 192 void SetTickRatePerMs(double ticks_per_ms); 193 194 void ShortPrint(); 195 void Print() { 196 root_->Print(0); 197 } 198 199 private: 200 template <typename Callback> 201 void TraverseDepthFirst(Callback* callback); 202 203 CodeEntry root_entry_; 204 ProfileNode* root_; 205 double ms_to_ticks_scale_; 206 207 DISALLOW_COPY_AND_ASSIGN(ProfileTree); 208 }; 209 210 211 class CpuProfile { 212 public: 213 CpuProfile(const char* title, unsigned uid) 214 : title_(title), uid_(uid) { } 215 216 // Add pc -> ... -> main() call path to the profile. 217 void AddPath(const Vector<CodeEntry*>& path); 218 void CalculateTotalTicks(); 219 void SetActualSamplingRate(double actual_sampling_rate); 220 CpuProfile* FilteredClone(int security_token_id); 221 222 INLINE(const char* title() const) { return title_; } 223 INLINE(unsigned uid() const) { return uid_; } 224 INLINE(const ProfileTree* top_down() const) { return &top_down_; } 225 INLINE(const ProfileTree* bottom_up() const) { return &bottom_up_; } 226 227 void UpdateTicksScale(); 228 229 void ShortPrint(); 230 void Print(); 231 232 private: 233 const char* title_; 234 unsigned uid_; 235 ProfileTree top_down_; 236 ProfileTree bottom_up_; 237 238 DISALLOW_COPY_AND_ASSIGN(CpuProfile); 239 }; 240 241 242 class CodeMap { 243 public: 244 CodeMap() : next_shared_id_(1) { } 245 void AddCode(Address addr, CodeEntry* entry, unsigned size); 246 void MoveCode(Address from, Address to); 247 CodeEntry* FindEntry(Address addr); 248 int GetSharedId(Address addr); 249 250 void Print(); 251 252 private: 253 struct CodeEntryInfo { 254 CodeEntryInfo(CodeEntry* an_entry, unsigned a_size) 255 : entry(an_entry), size(a_size) { } 256 CodeEntry* entry; 257 unsigned size; 258 }; 259 260 struct CodeTreeConfig { 261 typedef Address Key; 262 typedef CodeEntryInfo Value; 263 static const Key kNoKey; 264 static const Value NoValue() { return CodeEntryInfo(NULL, 0); } 265 static int Compare(const Key& a, const Key& b) { 266 return a < b ? -1 : (a > b ? 1 : 0); 267 } 268 }; 269 typedef SplayTree<CodeTreeConfig> CodeTree; 270 271 class CodeTreePrinter { 272 public: 273 void Call(const Address& key, const CodeEntryInfo& value); 274 }; 275 276 void DeleteAllCoveredCode(Address start, Address end); 277 278 // Fake CodeEntry pointer to distinguish shared function entries. 279 static CodeEntry* const kSharedFunctionCodeEntry; 280 281 CodeTree tree_; 282 int next_shared_id_; 283 284 DISALLOW_COPY_AND_ASSIGN(CodeMap); 285 }; 286 287 288 class CpuProfilesCollection { 289 public: 290 CpuProfilesCollection(); 291 ~CpuProfilesCollection(); 292 293 bool StartProfiling(const char* title, unsigned uid); 294 bool StartProfiling(String* title, unsigned uid); 295 CpuProfile* StopProfiling(int security_token_id, 296 const char* title, 297 double actual_sampling_rate); 298 List<CpuProfile*>* Profiles(int security_token_id); 299 const char* GetName(String* name) { 300 return function_and_resource_names_.GetName(name); 301 } 302 const char* GetName(int args_count) { 303 return function_and_resource_names_.GetName(args_count); 304 } 305 CpuProfile* GetProfile(int security_token_id, unsigned uid); 306 bool IsLastProfile(const char* title); 307 void RemoveProfile(CpuProfile* profile); 308 bool HasDetachedProfiles() { return detached_profiles_.length() > 0; } 309 310 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 311 String* name, String* resource_name, int line_number); 312 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, const char* name); 313 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 314 const char* name_prefix, String* name); 315 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, int args_count); 316 CodeEntry* NewCodeEntry(int security_token_id); 317 318 // Called from profile generator thread. 319 void AddPathToCurrentProfiles(const Vector<CodeEntry*>& path); 320 321 // Limits the number of profiles that can be simultaneously collected. 322 static const int kMaxSimultaneousProfiles = 100; 323 324 private: 325 const char* GetFunctionName(String* name) { 326 return function_and_resource_names_.GetFunctionName(name); 327 } 328 const char* GetFunctionName(const char* name) { 329 return function_and_resource_names_.GetFunctionName(name); 330 } 331 int GetProfileIndex(unsigned uid); 332 List<CpuProfile*>* GetProfilesList(int security_token_id); 333 int TokenToIndex(int security_token_id); 334 335 INLINE(static bool UidsMatch(void* key1, void* key2)) { 336 return key1 == key2; 337 } 338 339 StringsStorage function_and_resource_names_; 340 List<CodeEntry*> code_entries_; 341 List<List<CpuProfile*>* > profiles_by_token_; 342 // Mapping from profiles' uids to indexes in the second nested list 343 // of profiles_by_token_. 344 HashMap profiles_uids_; 345 List<CpuProfile*> detached_profiles_; 346 347 // Accessed by VM thread and profile generator thread. 348 List<CpuProfile*> current_profiles_; 349 Semaphore* current_profiles_semaphore_; 350 351 DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection); 352 }; 353 354 355 class SampleRateCalculator { 356 public: 357 SampleRateCalculator() 358 : result_(Logger::kSamplingIntervalMs * kResultScale), 359 ticks_per_ms_(Logger::kSamplingIntervalMs), 360 measurements_count_(0), 361 wall_time_query_countdown_(1) { 362 } 363 364 double ticks_per_ms() { 365 return result_ / static_cast<double>(kResultScale); 366 } 367 void Tick(); 368 void UpdateMeasurements(double current_time); 369 370 // Instead of querying current wall time each tick, 371 // we use this constant to control query intervals. 372 static const unsigned kWallTimeQueryIntervalMs = 100; 373 374 private: 375 // As the result needs to be accessed from a different thread, we 376 // use type that guarantees atomic writes to memory. There should 377 // be <= 1000 ticks per second, thus storing a value of a 10 ** 5 378 // order should provide enough precision while keeping away from a 379 // potential overflow. 380 static const int kResultScale = 100000; 381 382 AtomicWord result_; 383 // All other fields are accessed only from the sampler thread. 384 double ticks_per_ms_; 385 unsigned measurements_count_; 386 unsigned wall_time_query_countdown_; 387 double last_wall_time_; 388 389 DISALLOW_COPY_AND_ASSIGN(SampleRateCalculator); 390 }; 391 392 393 class ProfileGenerator { 394 public: 395 explicit ProfileGenerator(CpuProfilesCollection* profiles); 396 397 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 398 String* name, 399 String* resource_name, 400 int line_number)) { 401 return profiles_->NewCodeEntry(tag, name, resource_name, line_number); 402 } 403 404 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 405 const char* name)) { 406 return profiles_->NewCodeEntry(tag, name); 407 } 408 409 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 410 const char* name_prefix, 411 String* name)) { 412 return profiles_->NewCodeEntry(tag, name_prefix, name); 413 } 414 415 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 416 int args_count)) { 417 return profiles_->NewCodeEntry(tag, args_count); 418 } 419 420 INLINE(CodeEntry* NewCodeEntry(int security_token_id)) { 421 return profiles_->NewCodeEntry(security_token_id); 422 } 423 424 void RecordTickSample(const TickSample& sample); 425 426 INLINE(CodeMap* code_map()) { return &code_map_; } 427 428 INLINE(void Tick()) { sample_rate_calc_.Tick(); } 429 INLINE(double actual_sampling_rate()) { 430 return sample_rate_calc_.ticks_per_ms(); 431 } 432 433 static const char* const kAnonymousFunctionName; 434 static const char* const kProgramEntryName; 435 static const char* const kGarbageCollectorEntryName; 436 437 private: 438 INLINE(CodeEntry* EntryForVMState(StateTag tag)); 439 440 CpuProfilesCollection* profiles_; 441 CodeMap code_map_; 442 CodeEntry* program_entry_; 443 CodeEntry* gc_entry_; 444 SampleRateCalculator sample_rate_calc_; 445 446 DISALLOW_COPY_AND_ASSIGN(ProfileGenerator); 447 }; 448 449 450 class HeapEntry; 451 452 class HeapGraphEdge BASE_EMBEDDED { 453 public: 454 enum Type { 455 kContextVariable = v8::HeapGraphEdge::kContextVariable, 456 kElement = v8::HeapGraphEdge::kElement, 457 kProperty = v8::HeapGraphEdge::kProperty, 458 kInternal = v8::HeapGraphEdge::kInternal, 459 kHidden = v8::HeapGraphEdge::kHidden, 460 kShortcut = v8::HeapGraphEdge::kShortcut, 461 kWeak = v8::HeapGraphEdge::kWeak 462 }; 463 464 HeapGraphEdge() { } 465 void Init(int child_index, Type type, const char* name, HeapEntry* to); 466 void Init(int child_index, Type type, int index, HeapEntry* to); 467 void Init(int child_index, int index, HeapEntry* to); 468 469 Type type() { return static_cast<Type>(type_); } 470 int index() { 471 ASSERT(type_ == kElement || type_ == kHidden || type_ == kWeak); 472 return index_; 473 } 474 const char* name() { 475 ASSERT(type_ == kContextVariable 476 || type_ == kProperty 477 || type_ == kInternal 478 || type_ == kShortcut); 479 return name_; 480 } 481 HeapEntry* to() { return to_; } 482 483 HeapEntry* From(); 484 485 private: 486 int child_index_ : 29; 487 unsigned type_ : 3; 488 union { 489 int index_; 490 const char* name_; 491 }; 492 HeapEntry* to_; 493 494 DISALLOW_COPY_AND_ASSIGN(HeapGraphEdge); 495 }; 496 497 498 class HeapSnapshot; 499 500 // HeapEntry instances represent an entity from the heap (or a special 501 // virtual node, e.g. root). To make heap snapshots more compact, 502 // HeapEntries has a special memory layout (no Vectors or Lists used): 503 // 504 // +-----------------+ 505 // HeapEntry 506 // +-----------------+ 507 // HeapGraphEdge | 508 // ... } children_count 509 // HeapGraphEdge | 510 // +-----------------+ 511 // HeapGraphEdge* | 512 // ... } retainers_count 513 // HeapGraphEdge* | 514 // +-----------------+ 515 // 516 // In a HeapSnapshot, all entries are hand-allocated in a continuous array 517 // of raw bytes. 518 // 519 class HeapEntry BASE_EMBEDDED { 520 public: 521 enum Type { 522 kHidden = v8::HeapGraphNode::kHidden, 523 kArray = v8::HeapGraphNode::kArray, 524 kString = v8::HeapGraphNode::kString, 525 kObject = v8::HeapGraphNode::kObject, 526 kCode = v8::HeapGraphNode::kCode, 527 kClosure = v8::HeapGraphNode::kClosure, 528 kRegExp = v8::HeapGraphNode::kRegExp, 529 kHeapNumber = v8::HeapGraphNode::kHeapNumber, 530 kNative = v8::HeapGraphNode::kNative, 531 kSynthetic = v8::HeapGraphNode::kSynthetic 532 }; 533 534 HeapEntry() { } 535 void Init(HeapSnapshot* snapshot, 536 Type type, 537 const char* name, 538 SnapshotObjectId id, 539 int self_size, 540 int children_count, 541 int retainers_count); 542 543 HeapSnapshot* snapshot() { return snapshot_; } 544 Type type() { return static_cast<Type>(type_); } 545 const char* name() { return name_; } 546 void set_name(const char* name) { name_ = name; } 547 inline SnapshotObjectId id() { return id_; } 548 int self_size() { return self_size_; } 549 int retained_size() { return retained_size_; } 550 void add_retained_size(int size) { retained_size_ += size; } 551 void set_retained_size(int value) { retained_size_ = value; } 552 int ordered_index() { return ordered_index_; } 553 void set_ordered_index(int value) { ordered_index_ = value; } 554 555 Vector<HeapGraphEdge> children() { 556 return Vector<HeapGraphEdge>(children_arr(), children_count_); } 557 Vector<HeapGraphEdge*> retainers() { 558 return Vector<HeapGraphEdge*>(retainers_arr(), retainers_count_); } 559 HeapEntry* dominator() { return dominator_; } 560 void set_dominator(HeapEntry* entry) { 561 ASSERT(entry != NULL); 562 dominator_ = entry; 563 } 564 void clear_paint() { painted_ = false; } 565 bool painted() { return painted_; } 566 void paint() { painted_ = true; } 567 568 void SetIndexedReference(HeapGraphEdge::Type type, 569 int child_index, 570 int index, 571 HeapEntry* entry, 572 int retainer_index); 573 void SetNamedReference(HeapGraphEdge::Type type, 574 int child_index, 575 const char* name, 576 HeapEntry* entry, 577 int retainer_index); 578 void SetUnidirElementReference(int child_index, int index, HeapEntry* entry); 579 580 size_t EntrySize() { 581 return EntriesSize(1, children_count_, retainers_count_); 582 } 583 584 void Print( 585 const char* prefix, const char* edge_name, int max_depth, int indent); 586 587 Handle<HeapObject> GetHeapObject(); 588 589 static size_t EntriesSize(int entries_count, 590 int children_count, 591 int retainers_count); 592 593 private: 594 HeapGraphEdge* children_arr() { 595 return reinterpret_cast<HeapGraphEdge*>(this + 1); 596 } 597 HeapGraphEdge** retainers_arr() { 598 return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_); 599 } 600 const char* TypeAsString(); 601 602 unsigned painted_: 1; 603 unsigned type_: 4; 604 int children_count_: 27; 605 int retainers_count_; 606 int self_size_; 607 union { 608 int ordered_index_; // Used during dominator tree building. 609 int retained_size_; // At that moment, there is no retained size yet. 610 }; 611 SnapshotObjectId id_; 612 HeapEntry* dominator_; 613 HeapSnapshot* snapshot_; 614 const char* name_; 615 616 DISALLOW_COPY_AND_ASSIGN(HeapEntry); 617 }; 618 619 620 class HeapSnapshotsCollection; 621 622 // HeapSnapshot represents a single heap snapshot. It is stored in 623 // HeapSnapshotsCollection, which is also a factory for 624 // HeapSnapshots. All HeapSnapshots share strings copied from JS heap 625 // to be able to return them even if they were collected. 626 // HeapSnapshotGenerator fills in a HeapSnapshot. 627 class HeapSnapshot { 628 public: 629 enum Type { 630 kFull = v8::HeapSnapshot::kFull 631 }; 632 633 HeapSnapshot(HeapSnapshotsCollection* collection, 634 Type type, 635 const char* title, 636 unsigned uid); 637 ~HeapSnapshot(); 638 void Delete(); 639 640 HeapSnapshotsCollection* collection() { return collection_; } 641 Type type() { return type_; } 642 const char* title() { return title_; } 643 unsigned uid() { return uid_; } 644 HeapEntry* root() { return root_entry_; } 645 HeapEntry* gc_roots() { return gc_roots_entry_; } 646 HeapEntry* natives_root() { return natives_root_entry_; } 647 HeapEntry* gc_subroot(int index) { return gc_subroot_entries_[index]; } 648 List<HeapEntry*>* entries() { return &entries_; } 649 size_t raw_entries_size() { return raw_entries_size_; } 650 651 void AllocateEntries( 652 int entries_count, int children_count, int retainers_count); 653 HeapEntry* AddEntry(HeapEntry::Type type, 654 const char* name, 655 SnapshotObjectId id, 656 int size, 657 int children_count, 658 int retainers_count); 659 HeapEntry* AddRootEntry(int children_count); 660 HeapEntry* AddGcRootsEntry(int children_count, int retainers_count); 661 HeapEntry* AddGcSubrootEntry(int tag, 662 int children_count, 663 int retainers_count); 664 HeapEntry* AddNativesRootEntry(int children_count, int retainers_count); 665 void ClearPaint(); 666 HeapEntry* GetEntryById(SnapshotObjectId id); 667 List<HeapEntry*>* GetSortedEntriesList(); 668 template<class Visitor> 669 void IterateEntries(Visitor* visitor) { entries_.Iterate(visitor); } 670 void SetDominatorsToSelf(); 671 672 void Print(int max_depth); 673 void PrintEntriesSize(); 674 675 private: 676 HeapEntry* GetNextEntryToInit(); 677 678 HeapSnapshotsCollection* collection_; 679 Type type_; 680 const char* title_; 681 unsigned uid_; 682 HeapEntry* root_entry_; 683 HeapEntry* gc_roots_entry_; 684 HeapEntry* natives_root_entry_; 685 HeapEntry* gc_subroot_entries_[VisitorSynchronization::kNumberOfSyncTags]; 686 char* raw_entries_; 687 List<HeapEntry*> entries_; 688 bool entries_sorted_; 689 size_t raw_entries_size_; 690 691 friend class HeapSnapshotTester; 692 693 DISALLOW_COPY_AND_ASSIGN(HeapSnapshot); 694 }; 695 696 697 class HeapObjectsMap { 698 public: 699 HeapObjectsMap(); 700 ~HeapObjectsMap(); 701 702 void SnapshotGenerationFinished(); 703 SnapshotObjectId FindObject(Address addr); 704 void MoveObject(Address from, Address to); 705 706 static SnapshotObjectId GenerateId(v8::RetainedObjectInfo* info); 707 static inline SnapshotObjectId GetNthGcSubrootId(int delta); 708 709 static const int kObjectIdStep = 2; 710 static const SnapshotObjectId kInternalRootObjectId; 711 static const SnapshotObjectId kGcRootsObjectId; 712 static const SnapshotObjectId kNativesRootObjectId; 713 static const SnapshotObjectId kGcRootsFirstSubrootId; 714 static const SnapshotObjectId kFirstAvailableObjectId; 715 716 private: 717 struct EntryInfo { 718 explicit EntryInfo(SnapshotObjectId id) : id(id), accessed(true) { } 719 EntryInfo(SnapshotObjectId id, bool accessed) 720 : id(id), 721 accessed(accessed) { } 722 SnapshotObjectId id; 723 bool accessed; 724 }; 725 726 void AddEntry(Address addr, SnapshotObjectId id); 727 SnapshotObjectId FindEntry(Address addr); 728 void RemoveDeadEntries(); 729 730 static bool AddressesMatch(void* key1, void* key2) { 731 return key1 == key2; 732 } 733 734 static uint32_t AddressHash(Address addr) { 735 return ComputeIntegerHash( 736 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr)), 737 v8::internal::kZeroHashSeed); 738 } 739 740 bool initial_fill_mode_; 741 SnapshotObjectId next_id_; 742 HashMap entries_map_; 743 List<EntryInfo>* entries_; 744 745 DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap); 746 }; 747 748 749 class HeapSnapshotsCollection { 750 public: 751 HeapSnapshotsCollection(); 752 ~HeapSnapshotsCollection(); 753 754 bool is_tracking_objects() { return is_tracking_objects_; } 755 756 HeapSnapshot* NewSnapshot( 757 HeapSnapshot::Type type, const char* name, unsigned uid); 758 void SnapshotGenerationFinished(HeapSnapshot* snapshot); 759 List<HeapSnapshot*>* snapshots() { return &snapshots_; } 760 HeapSnapshot* GetSnapshot(unsigned uid); 761 void RemoveSnapshot(HeapSnapshot* snapshot); 762 763 StringsStorage* names() { return &names_; } 764 TokenEnumerator* token_enumerator() { return token_enumerator_; } 765 766 SnapshotObjectId GetObjectId(Address addr) { return ids_.FindObject(addr); } 767 Handle<HeapObject> FindHeapObjectById(SnapshotObjectId id); 768 void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); } 769 770 private: 771 INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) { 772 return key1 == key2; 773 } 774 775 bool is_tracking_objects_; // Whether tracking object moves is needed. 776 List<HeapSnapshot*> snapshots_; 777 // Mapping from snapshots' uids to HeapSnapshot* pointers. 778 HashMap snapshots_uids_; 779 StringsStorage names_; 780 TokenEnumerator* token_enumerator_; 781 // Mapping from HeapObject addresses to objects' uids. 782 HeapObjectsMap ids_; 783 784 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsCollection); 785 }; 786 787 788 // A typedef for referencing anything that can be snapshotted living 789 // in any kind of heap memory. 790 typedef void* HeapThing; 791 792 793 // An interface that creates HeapEntries by HeapThings. 794 class HeapEntriesAllocator { 795 public: 796 virtual ~HeapEntriesAllocator() { } 797 virtual HeapEntry* AllocateEntry( 798 HeapThing ptr, int children_count, int retainers_count) = 0; 799 }; 800 801 802 // The HeapEntriesMap instance is used to track a mapping between 803 // real heap objects and their representations in heap snapshots. 804 class HeapEntriesMap { 805 public: 806 HeapEntriesMap(); 807 ~HeapEntriesMap(); 808 809 void AllocateEntries(); 810 HeapEntry* Map(HeapThing thing); 811 void Pair(HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry); 812 void CountReference(HeapThing from, HeapThing to, 813 int* prev_children_count = NULL, 814 int* prev_retainers_count = NULL); 815 816 int entries_count() { return entries_count_; } 817 int total_children_count() { return total_children_count_; } 818 int total_retainers_count() { return total_retainers_count_; } 819 820 static HeapEntry* const kHeapEntryPlaceholder; 821 822 private: 823 struct EntryInfo { 824 EntryInfo(HeapEntry* entry, HeapEntriesAllocator* allocator) 825 : entry(entry), 826 allocator(allocator), 827 children_count(0), 828 retainers_count(0) { 829 } 830 HeapEntry* entry; 831 HeapEntriesAllocator* allocator; 832 int children_count; 833 int retainers_count; 834 }; 835 836 static uint32_t Hash(HeapThing thing) { 837 return ComputeIntegerHash( 838 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)), 839 v8::internal::kZeroHashSeed); 840 } 841 static bool HeapThingsMatch(HeapThing key1, HeapThing key2) { 842 return key1 == key2; 843 } 844 845 HashMap entries_; 846 int entries_count_; 847 int total_children_count_; 848 int total_retainers_count_; 849 850 friend class HeapObjectsSet; 851 852 DISALLOW_COPY_AND_ASSIGN(HeapEntriesMap); 853 }; 854 855 856 class HeapObjectsSet { 857 public: 858 HeapObjectsSet(); 859 void Clear(); 860 bool Contains(Object* object); 861 void Insert(Object* obj); 862 const char* GetTag(Object* obj); 863 void SetTag(Object* obj, const char* tag); 864 865 private: 866 HashMap entries_; 867 868 DISALLOW_COPY_AND_ASSIGN(HeapObjectsSet); 869 }; 870 871 872 // An interface used to populate a snapshot with nodes and edges. 873 class SnapshotFillerInterface { 874 public: 875 virtual ~SnapshotFillerInterface() { } 876 virtual HeapEntry* AddEntry(HeapThing ptr, 877 HeapEntriesAllocator* allocator) = 0; 878 virtual HeapEntry* FindEntry(HeapThing ptr) = 0; 879 virtual HeapEntry* FindOrAddEntry(HeapThing ptr, 880 HeapEntriesAllocator* allocator) = 0; 881 virtual void SetIndexedReference(HeapGraphEdge::Type type, 882 HeapThing parent_ptr, 883 HeapEntry* parent_entry, 884 int index, 885 HeapThing child_ptr, 886 HeapEntry* child_entry) = 0; 887 virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, 888 HeapThing parent_ptr, 889 HeapEntry* parent_entry, 890 HeapThing child_ptr, 891 HeapEntry* child_entry) = 0; 892 virtual void SetNamedReference(HeapGraphEdge::Type type, 893 HeapThing parent_ptr, 894 HeapEntry* parent_entry, 895 const char* reference_name, 896 HeapThing child_ptr, 897 HeapEntry* child_entry) = 0; 898 virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, 899 HeapThing parent_ptr, 900 HeapEntry* parent_entry, 901 HeapThing child_ptr, 902 HeapEntry* child_entry) = 0; 903 }; 904 905 906 class SnapshottingProgressReportingInterface { 907 public: 908 virtual ~SnapshottingProgressReportingInterface() { } 909 virtual void ProgressStep() = 0; 910 virtual bool ProgressReport(bool force) = 0; 911 }; 912 913 914 // An implementation of V8 heap graph extractor. 915 class V8HeapExplorer : public HeapEntriesAllocator { 916 public: 917 V8HeapExplorer(HeapSnapshot* snapshot, 918 SnapshottingProgressReportingInterface* progress); 919 virtual ~V8HeapExplorer(); 920 virtual HeapEntry* AllocateEntry( 921 HeapThing ptr, int children_count, int retainers_count); 922 void AddRootEntries(SnapshotFillerInterface* filler); 923 int EstimateObjectsCount(HeapIterator* iterator); 924 bool IterateAndExtractReferences(SnapshotFillerInterface* filler); 925 bool IterateAndSetObjectNames(SnapshotFillerInterface* filler); 926 void TagGlobalObjects(); 927 928 static String* GetConstructorName(JSObject* object); 929 930 static HeapObject* const kInternalRootObject; 931 932 private: 933 HeapEntry* AddEntry( 934 HeapObject* object, int children_count, int retainers_count); 935 HeapEntry* AddEntry(HeapObject* object, 936 HeapEntry::Type type, 937 const char* name, 938 int children_count, 939 int retainers_count); 940 const char* GetSystemEntryName(HeapObject* object); 941 void ExtractReferences(HeapObject* obj); 942 void ExtractClosureReferences(JSObject* js_obj, HeapEntry* entry); 943 void ExtractPropertyReferences(JSObject* js_obj, HeapEntry* entry); 944 void ExtractElementReferences(JSObject* js_obj, HeapEntry* entry); 945 void ExtractInternalReferences(JSObject* js_obj, HeapEntry* entry); 946 void SetClosureReference(HeapObject* parent_obj, 947 HeapEntry* parent, 948 String* reference_name, 949 Object* child); 950 void SetNativeBindReference(HeapObject* parent_obj, 951 HeapEntry* parent, 952 const char* reference_name, 953 Object* child); 954 void SetElementReference(HeapObject* parent_obj, 955 HeapEntry* parent, 956 int index, 957 Object* child); 958 void SetInternalReference(HeapObject* parent_obj, 959 HeapEntry* parent, 960 const char* reference_name, 961 Object* child, 962 int field_offset = -1); 963 void SetInternalReference(HeapObject* parent_obj, 964 HeapEntry* parent, 965 int index, 966 Object* child, 967 int field_offset = -1); 968 void SetHiddenReference(HeapObject* parent_obj, 969 HeapEntry* parent, 970 int index, 971 Object* child); 972 void SetWeakReference(HeapObject* parent_obj, 973 HeapEntry* parent_entry, 974 int index, 975 Object* child_obj, 976 int field_offset); 977 void SetPropertyReference(HeapObject* parent_obj, 978 HeapEntry* parent, 979 String* reference_name, 980 Object* child, 981 const char* name_format_string = NULL, 982 int field_offset = -1); 983 void SetPropertyShortcutReference(HeapObject* parent_obj, 984 HeapEntry* parent, 985 String* reference_name, 986 Object* child); 987 void SetRootShortcutReference(Object* child); 988 void SetRootGcRootsReference(); 989 void SetGcRootsReference(VisitorSynchronization::SyncTag tag); 990 void SetGcSubrootReference( 991 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child); 992 void SetObjectName(HeapObject* object); 993 void TagObject(Object* obj, const char* tag); 994 995 HeapEntry* GetEntry(Object* obj); 996 997 static inline HeapObject* GetNthGcSubrootObject(int delta); 998 static inline int GetGcSubrootOrder(HeapObject* subroot); 999 1000 Heap* heap_; 1001 HeapSnapshot* snapshot_; 1002 HeapSnapshotsCollection* collection_; 1003 SnapshottingProgressReportingInterface* progress_; 1004 SnapshotFillerInterface* filler_; 1005 HeapObjectsSet objects_tags_; 1006 1007 static HeapObject* const kGcRootsObject; 1008 static HeapObject* const kFirstGcSubrootObject; 1009 static HeapObject* const kLastGcSubrootObject; 1010 1011 friend class IndexedReferencesExtractor; 1012 friend class GcSubrootsEnumerator; 1013 friend class RootsReferencesExtractor; 1014 1015 DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer); 1016 }; 1017 1018 1019 class NativeGroupRetainedObjectInfo; 1020 1021 1022 // An implementation of retained native objects extractor. 1023 class NativeObjectsExplorer { 1024 public: 1025 NativeObjectsExplorer(HeapSnapshot* snapshot, 1026 SnapshottingProgressReportingInterface* progress); 1027 virtual ~NativeObjectsExplorer(); 1028 void AddRootEntries(SnapshotFillerInterface* filler); 1029 int EstimateObjectsCount(); 1030 bool IterateAndExtractReferences(SnapshotFillerInterface* filler); 1031 1032 private: 1033 void FillRetainedObjects(); 1034 void FillImplicitReferences(); 1035 List<HeapObject*>* GetListMaybeDisposeInfo(v8::RetainedObjectInfo* info); 1036 void SetNativeRootReference(v8::RetainedObjectInfo* info); 1037 void SetRootNativeRootsReference(); 1038 void SetWrapperNativeReferences(HeapObject* wrapper, 1039 v8::RetainedObjectInfo* info); 1040 void VisitSubtreeWrapper(Object** p, uint16_t class_id); 1041 1042 static uint32_t InfoHash(v8::RetainedObjectInfo* info) { 1043 return ComputeIntegerHash(static_cast<uint32_t>(info->GetHash()), 1044 v8::internal::kZeroHashSeed); 1045 } 1046 static bool RetainedInfosMatch(void* key1, void* key2) { 1047 return key1 == key2 || 1048 (reinterpret_cast<v8::RetainedObjectInfo*>(key1))->IsEquivalent( 1049 reinterpret_cast<v8::RetainedObjectInfo*>(key2)); 1050 } 1051 INLINE(static bool StringsMatch(void* key1, void* key2)) { 1052 return strcmp(reinterpret_cast<char*>(key1), 1053 reinterpret_cast<char*>(key2)) == 0; 1054 } 1055 1056 NativeGroupRetainedObjectInfo* FindOrAddGroupInfo(const char* label); 1057 1058 HeapSnapshot* snapshot_; 1059 HeapSnapshotsCollection* collection_; 1060 SnapshottingProgressReportingInterface* progress_; 1061 bool embedder_queried_; 1062 HeapObjectsSet in_groups_; 1063 // RetainedObjectInfo* -> List<HeapObject*>* 1064 HashMap objects_by_info_; 1065 HashMap native_groups_; 1066 HeapEntriesAllocator* synthetic_entries_allocator_; 1067 HeapEntriesAllocator* native_entries_allocator_; 1068 // Used during references extraction. 1069 SnapshotFillerInterface* filler_; 1070 1071 static HeapThing const kNativesRootObject; 1072 1073 friend class GlobalHandlesExtractor; 1074 1075 DISALLOW_COPY_AND_ASSIGN(NativeObjectsExplorer); 1076 }; 1077 1078 1079 class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface { 1080 public: 1081 HeapSnapshotGenerator(HeapSnapshot* snapshot, 1082 v8::ActivityControl* control); 1083 bool GenerateSnapshot(); 1084 1085 private: 1086 bool BuildDominatorTree(const Vector<HeapEntry*>& entries, 1087 Vector<int>* dominators); 1088 bool CalculateRetainedSizes(); 1089 bool CountEntriesAndReferences(); 1090 bool FillReferences(); 1091 void FillReversePostorderIndexes(Vector<HeapEntry*>* entries); 1092 void ProgressStep(); 1093 bool ProgressReport(bool force = false); 1094 bool SetEntriesDominators(); 1095 void SetProgressTotal(int iterations_count); 1096 1097 HeapSnapshot* snapshot_; 1098 v8::ActivityControl* control_; 1099 V8HeapExplorer v8_heap_explorer_; 1100 NativeObjectsExplorer dom_explorer_; 1101 // Mapping from HeapThing pointers to HeapEntry* pointers. 1102 HeapEntriesMap entries_; 1103 // Used during snapshot generation. 1104 int progress_counter_; 1105 int progress_total_; 1106 1107 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator); 1108 }; 1109 1110 class OutputStreamWriter; 1111 1112 class HeapSnapshotJSONSerializer { 1113 public: 1114 explicit HeapSnapshotJSONSerializer(HeapSnapshot* snapshot) 1115 : snapshot_(snapshot), 1116 nodes_(ObjectsMatch), 1117 strings_(ObjectsMatch), 1118 next_node_id_(1), 1119 next_string_id_(1), 1120 writer_(NULL) { 1121 } 1122 void Serialize(v8::OutputStream* stream); 1123 1124 private: 1125 INLINE(static bool ObjectsMatch(void* key1, void* key2)) { 1126 return key1 == key2; 1127 } 1128 1129 INLINE(static uint32_t ObjectHash(const void* key)) { 1130 return ComputeIntegerHash( 1131 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key)), 1132 v8::internal::kZeroHashSeed); 1133 } 1134 1135 void EnumerateNodes(); 1136 HeapSnapshot* CreateFakeSnapshot(); 1137 int GetNodeId(HeapEntry* entry); 1138 int GetStringId(const char* s); 1139 void SerializeEdge(HeapGraphEdge* edge); 1140 void SerializeImpl(); 1141 void SerializeNode(HeapEntry* entry); 1142 void SerializeNodes(); 1143 void SerializeSnapshot(); 1144 void SerializeString(const unsigned char* s); 1145 void SerializeStrings(); 1146 void SortHashMap(HashMap* map, List<HashMap::Entry*>* sorted_entries); 1147 1148 static const int kMaxSerializableSnapshotRawSize; 1149 1150 HeapSnapshot* snapshot_; 1151 HashMap nodes_; 1152 HashMap strings_; 1153 int next_node_id_; 1154 int next_string_id_; 1155 OutputStreamWriter* writer_; 1156 1157 friend class HeapSnapshotJSONSerializerEnumerator; 1158 friend class HeapSnapshotJSONSerializerIterator; 1159 1160 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotJSONSerializer); 1161 }; 1162 1163 } } // namespace v8::internal 1164 1165 #endif // V8_PROFILE_GENERATOR_H_ 1166