1 // Copyright 2010 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_PROFILE_GENERATOR_H_ 29 #define V8_PROFILE_GENERATOR_H_ 30 31 #ifdef ENABLE_LOGGING_AND_PROFILING 32 33 #include "hashmap.h" 34 #include "../include/v8-profiler.h" 35 36 namespace v8 { 37 namespace internal { 38 39 class TokenEnumerator { 40 public: 41 TokenEnumerator(); 42 ~TokenEnumerator(); 43 int GetTokenId(Object* token); 44 45 static const int kNoSecurityToken = -1; 46 static const int kInheritsSecurityToken = -2; 47 48 private: 49 static void TokenRemovedCallback(v8::Persistent<v8::Value> handle, 50 void* parameter); 51 void TokenRemoved(Object** token_location); 52 53 List<Object**> token_locations_; 54 List<bool> token_removed_; 55 56 friend class TokenEnumeratorTester; 57 58 DISALLOW_COPY_AND_ASSIGN(TokenEnumerator); 59 }; 60 61 62 // Provides a storage of strings allocated in C++ heap, to hold them 63 // forever, even if they disappear from JS heap or external storage. 64 class StringsStorage { 65 public: 66 StringsStorage(); 67 ~StringsStorage(); 68 69 const char* GetCopy(const char* src); 70 const char* GetFormatted(const char* format, ...); 71 const char* GetVFormatted(const char* format, va_list args); 72 const char* GetName(String* name); 73 const char* GetName(int index); 74 inline const char* GetFunctionName(String* name); 75 inline const char* GetFunctionName(const char* name); 76 77 private: 78 INLINE(static bool StringsMatch(void* key1, void* key2)) { 79 return strcmp(reinterpret_cast<char*>(key1), 80 reinterpret_cast<char*>(key2)) == 0; 81 } 82 const char* AddOrDisposeString(char* str, uint32_t hash); 83 84 // Mapping of strings by String::Hash to const char* strings. 85 HashMap names_; 86 87 DISALLOW_COPY_AND_ASSIGN(StringsStorage); 88 }; 89 90 91 class CodeEntry { 92 public: 93 // CodeEntry doesn't own name strings, just references them. 94 INLINE(CodeEntry(Logger::LogEventsAndTags tag, 95 const char* name_prefix, 96 const char* name, 97 const char* resource_name, 98 int line_number, 99 int security_token_id)); 100 101 INLINE(bool is_js_function() const) { return is_js_function_tag(tag_); } 102 INLINE(const char* name_prefix() const) { return name_prefix_; } 103 INLINE(bool has_name_prefix() const) { return name_prefix_[0] != '\0'; } 104 INLINE(const char* name() const) { return name_; } 105 INLINE(const char* resource_name() const) { return resource_name_; } 106 INLINE(int line_number() const) { return line_number_; } 107 INLINE(int shared_id() const) { return shared_id_; } 108 INLINE(void set_shared_id(int shared_id)) { shared_id_ = shared_id; } 109 INLINE(int security_token_id() const) { return security_token_id_; } 110 111 INLINE(static bool is_js_function_tag(Logger::LogEventsAndTags tag)); 112 113 void CopyData(const CodeEntry& source); 114 uint32_t GetCallUid() const; 115 bool IsSameAs(CodeEntry* entry) const; 116 117 static const char* const kEmptyNamePrefix; 118 119 private: 120 Logger::LogEventsAndTags tag_; 121 const char* name_prefix_; 122 const char* name_; 123 const char* resource_name_; 124 int line_number_; 125 int shared_id_; 126 int security_token_id_; 127 128 DISALLOW_COPY_AND_ASSIGN(CodeEntry); 129 }; 130 131 132 class ProfileTree; 133 134 class ProfileNode { 135 public: 136 INLINE(ProfileNode(ProfileTree* tree, CodeEntry* entry)); 137 138 ProfileNode* FindChild(CodeEntry* entry); 139 ProfileNode* FindOrAddChild(CodeEntry* entry); 140 INLINE(void IncrementSelfTicks()) { ++self_ticks_; } 141 INLINE(void IncreaseSelfTicks(unsigned amount)) { self_ticks_ += amount; } 142 INLINE(void IncreaseTotalTicks(unsigned amount)) { total_ticks_ += amount; } 143 144 INLINE(CodeEntry* entry() const) { return entry_; } 145 INLINE(unsigned self_ticks() const) { return self_ticks_; } 146 INLINE(unsigned total_ticks() const) { return total_ticks_; } 147 INLINE(const List<ProfileNode*>* children() const) { return &children_list_; } 148 double GetSelfMillis() const; 149 double GetTotalMillis() const; 150 151 void Print(int indent); 152 153 private: 154 INLINE(static bool CodeEntriesMatch(void* entry1, void* entry2)) { 155 return reinterpret_cast<CodeEntry*>(entry1)->IsSameAs( 156 reinterpret_cast<CodeEntry*>(entry2)); 157 } 158 159 INLINE(static uint32_t CodeEntryHash(CodeEntry* entry)) { 160 return entry->GetCallUid(); 161 } 162 163 ProfileTree* tree_; 164 CodeEntry* entry_; 165 unsigned total_ticks_; 166 unsigned self_ticks_; 167 // Mapping from CodeEntry* to ProfileNode* 168 HashMap children_; 169 List<ProfileNode*> children_list_; 170 171 DISALLOW_COPY_AND_ASSIGN(ProfileNode); 172 }; 173 174 175 class ProfileTree { 176 public: 177 ProfileTree(); 178 ~ProfileTree(); 179 180 void AddPathFromEnd(const Vector<CodeEntry*>& path); 181 void AddPathFromStart(const Vector<CodeEntry*>& path); 182 void CalculateTotalTicks(); 183 void FilteredClone(ProfileTree* src, int security_token_id); 184 185 double TicksToMillis(unsigned ticks) const { 186 return ticks * ms_to_ticks_scale_; 187 } 188 ProfileNode* root() const { return root_; } 189 void SetTickRatePerMs(double ticks_per_ms); 190 191 void ShortPrint(); 192 void Print() { 193 root_->Print(0); 194 } 195 196 private: 197 template <typename Callback> 198 void TraverseDepthFirst(Callback* callback); 199 200 CodeEntry root_entry_; 201 ProfileNode* root_; 202 double ms_to_ticks_scale_; 203 204 DISALLOW_COPY_AND_ASSIGN(ProfileTree); 205 }; 206 207 208 class CpuProfile { 209 public: 210 CpuProfile(const char* title, unsigned uid) 211 : title_(title), uid_(uid) { } 212 213 // Add pc -> ... -> main() call path to the profile. 214 void AddPath(const Vector<CodeEntry*>& path); 215 void CalculateTotalTicks(); 216 void SetActualSamplingRate(double actual_sampling_rate); 217 CpuProfile* FilteredClone(int security_token_id); 218 219 INLINE(const char* title() const) { return title_; } 220 INLINE(unsigned uid() const) { return uid_; } 221 INLINE(const ProfileTree* top_down() const) { return &top_down_; } 222 INLINE(const ProfileTree* bottom_up() const) { return &bottom_up_; } 223 224 void UpdateTicksScale(); 225 226 void ShortPrint(); 227 void Print(); 228 229 private: 230 const char* title_; 231 unsigned uid_; 232 ProfileTree top_down_; 233 ProfileTree bottom_up_; 234 235 DISALLOW_COPY_AND_ASSIGN(CpuProfile); 236 }; 237 238 239 class CodeMap { 240 public: 241 CodeMap() : next_shared_id_(1) { } 242 INLINE(void AddCode(Address addr, CodeEntry* entry, unsigned size)); 243 INLINE(void MoveCode(Address from, Address to)); 244 INLINE(void DeleteCode(Address addr)); 245 CodeEntry* FindEntry(Address addr); 246 int GetSharedId(Address addr); 247 248 void Print(); 249 250 private: 251 struct CodeEntryInfo { 252 CodeEntryInfo(CodeEntry* an_entry, unsigned a_size) 253 : entry(an_entry), size(a_size) { } 254 CodeEntry* entry; 255 unsigned size; 256 }; 257 258 struct CodeTreeConfig { 259 typedef Address Key; 260 typedef CodeEntryInfo Value; 261 static const Key kNoKey; 262 static const Value kNoValue; 263 static int Compare(const Key& a, const Key& b) { 264 return a < b ? -1 : (a > b ? 1 : 0); 265 } 266 }; 267 typedef SplayTree<CodeTreeConfig> CodeTree; 268 269 class CodeTreePrinter { 270 public: 271 void Call(const Address& key, const CodeEntryInfo& value); 272 }; 273 274 // Fake CodeEntry pointer to distinguish shared function entries. 275 static CodeEntry* const kSharedFunctionCodeEntry; 276 277 CodeTree tree_; 278 int next_shared_id_; 279 280 DISALLOW_COPY_AND_ASSIGN(CodeMap); 281 }; 282 283 284 class CpuProfilesCollection { 285 public: 286 CpuProfilesCollection(); 287 ~CpuProfilesCollection(); 288 289 bool StartProfiling(const char* title, unsigned uid); 290 bool StartProfiling(String* title, unsigned uid); 291 CpuProfile* StopProfiling(int security_token_id, 292 const char* title, 293 double actual_sampling_rate); 294 List<CpuProfile*>* Profiles(int security_token_id); 295 const char* GetName(String* name) { 296 return function_and_resource_names_.GetName(name); 297 } 298 const char* GetName(int args_count) { 299 return function_and_resource_names_.GetName(args_count); 300 } 301 CpuProfile* GetProfile(int security_token_id, unsigned uid); 302 bool IsLastProfile(const char* title); 303 void RemoveProfile(CpuProfile* profile); 304 bool HasDetachedProfiles() { return detached_profiles_.length() > 0; } 305 306 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 307 String* name, String* resource_name, int line_number); 308 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, const char* name); 309 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 310 const char* name_prefix, String* name); 311 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, int args_count); 312 CodeEntry* NewCodeEntry(int security_token_id); 313 314 // Called from profile generator thread. 315 void AddPathToCurrentProfiles(const Vector<CodeEntry*>& path); 316 317 // Limits the number of profiles that can be simultaneously collected. 318 static const int kMaxSimultaneousProfiles = 100; 319 320 private: 321 const char* GetFunctionName(String* name) { 322 return function_and_resource_names_.GetFunctionName(name); 323 } 324 const char* GetFunctionName(const char* name) { 325 return function_and_resource_names_.GetFunctionName(name); 326 } 327 int GetProfileIndex(unsigned uid); 328 List<CpuProfile*>* GetProfilesList(int security_token_id); 329 int TokenToIndex(int security_token_id); 330 331 INLINE(static bool UidsMatch(void* key1, void* key2)) { 332 return key1 == key2; 333 } 334 335 StringsStorage function_and_resource_names_; 336 List<CodeEntry*> code_entries_; 337 List<List<CpuProfile*>* > profiles_by_token_; 338 // Mapping from profiles' uids to indexes in the second nested list 339 // of profiles_by_token_. 340 HashMap profiles_uids_; 341 List<CpuProfile*> detached_profiles_; 342 343 // Accessed by VM thread and profile generator thread. 344 List<CpuProfile*> current_profiles_; 345 Semaphore* current_profiles_semaphore_; 346 347 DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection); 348 }; 349 350 351 class SampleRateCalculator { 352 public: 353 SampleRateCalculator() 354 : result_(Logger::kSamplingIntervalMs * kResultScale), 355 ticks_per_ms_(Logger::kSamplingIntervalMs), 356 measurements_count_(0), 357 wall_time_query_countdown_(1) { 358 } 359 360 double ticks_per_ms() { 361 return result_ / static_cast<double>(kResultScale); 362 } 363 void Tick(); 364 void UpdateMeasurements(double current_time); 365 366 // Instead of querying current wall time each tick, 367 // we use this constant to control query intervals. 368 static const unsigned kWallTimeQueryIntervalMs = 100; 369 370 private: 371 // As the result needs to be accessed from a different thread, we 372 // use type that guarantees atomic writes to memory. There should 373 // be <= 1000 ticks per second, thus storing a value of a 10 ** 5 374 // order should provide enough precision while keeping away from a 375 // potential overflow. 376 static const int kResultScale = 100000; 377 378 AtomicWord result_; 379 // All other fields are accessed only from the sampler thread. 380 double ticks_per_ms_; 381 unsigned measurements_count_; 382 unsigned wall_time_query_countdown_; 383 double last_wall_time_; 384 385 DISALLOW_COPY_AND_ASSIGN(SampleRateCalculator); 386 }; 387 388 389 class ProfileGenerator { 390 public: 391 explicit ProfileGenerator(CpuProfilesCollection* profiles); 392 393 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 394 String* name, 395 String* resource_name, 396 int line_number)) { 397 return profiles_->NewCodeEntry(tag, name, resource_name, line_number); 398 } 399 400 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 401 const char* name)) { 402 return profiles_->NewCodeEntry(tag, name); 403 } 404 405 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 406 const char* name_prefix, 407 String* name)) { 408 return profiles_->NewCodeEntry(tag, name_prefix, name); 409 } 410 411 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 412 int args_count)) { 413 return profiles_->NewCodeEntry(tag, args_count); 414 } 415 416 INLINE(CodeEntry* NewCodeEntry(int security_token_id)) { 417 return profiles_->NewCodeEntry(security_token_id); 418 } 419 420 void RecordTickSample(const TickSample& sample); 421 422 INLINE(CodeMap* code_map()) { return &code_map_; } 423 424 INLINE(void Tick()) { sample_rate_calc_.Tick(); } 425 INLINE(double actual_sampling_rate()) { 426 return sample_rate_calc_.ticks_per_ms(); 427 } 428 429 static const char* const kAnonymousFunctionName; 430 static const char* const kProgramEntryName; 431 static const char* const kGarbageCollectorEntryName; 432 433 private: 434 INLINE(CodeEntry* EntryForVMState(StateTag tag)); 435 436 CpuProfilesCollection* profiles_; 437 CodeMap code_map_; 438 CodeEntry* program_entry_; 439 CodeEntry* gc_entry_; 440 SampleRateCalculator sample_rate_calc_; 441 442 DISALLOW_COPY_AND_ASSIGN(ProfileGenerator); 443 }; 444 445 446 class HeapEntry; 447 448 class HeapGraphEdge BASE_EMBEDDED { 449 public: 450 enum Type { 451 kContextVariable = v8::HeapGraphEdge::kContextVariable, 452 kElement = v8::HeapGraphEdge::kElement, 453 kProperty = v8::HeapGraphEdge::kProperty, 454 kInternal = v8::HeapGraphEdge::kInternal, 455 kHidden = v8::HeapGraphEdge::kHidden, 456 kShortcut = v8::HeapGraphEdge::kShortcut 457 }; 458 459 HeapGraphEdge() { } 460 void Init(int child_index, Type type, const char* name, HeapEntry* to); 461 void Init(int child_index, Type type, int index, HeapEntry* to); 462 void Init(int child_index, int index, HeapEntry* to); 463 464 Type type() { return static_cast<Type>(type_); } 465 int index() { 466 ASSERT(type_ == kElement || type_ == kHidden); 467 return index_; 468 } 469 const char* name() { 470 ASSERT(type_ == kContextVariable 471 || type_ == kProperty 472 || type_ == kInternal 473 || type_ == kShortcut); 474 return name_; 475 } 476 HeapEntry* to() { return to_; } 477 478 HeapEntry* From(); 479 480 private: 481 int child_index_ : 29; 482 unsigned type_ : 3; 483 union { 484 int index_; 485 const char* name_; 486 }; 487 HeapEntry* to_; 488 489 DISALLOW_COPY_AND_ASSIGN(HeapGraphEdge); 490 }; 491 492 493 class HeapSnapshot; 494 495 // HeapEntry instances represent an entity from the heap (or a special 496 // virtual node, e.g. root). To make heap snapshots more compact, 497 // HeapEntries has a special memory layout (no Vectors or Lists used): 498 // 499 // +-----------------+ 500 // HeapEntry 501 // +-----------------+ 502 // HeapGraphEdge | 503 // ... } children_count 504 // HeapGraphEdge | 505 // +-----------------+ 506 // HeapGraphEdge* | 507 // ... } retainers_count 508 // HeapGraphEdge* | 509 // +-----------------+ 510 // 511 // In a HeapSnapshot, all entries are hand-allocated in a continuous array 512 // of raw bytes. 513 // 514 class HeapEntry BASE_EMBEDDED { 515 public: 516 enum Type { 517 kHidden = v8::HeapGraphNode::kHidden, 518 kArray = v8::HeapGraphNode::kArray, 519 kString = v8::HeapGraphNode::kString, 520 kObject = v8::HeapGraphNode::kObject, 521 kCode = v8::HeapGraphNode::kCode, 522 kClosure = v8::HeapGraphNode::kClosure, 523 kRegExp = v8::HeapGraphNode::kRegExp, 524 kHeapNumber = v8::HeapGraphNode::kHeapNumber, 525 kNative = v8::HeapGraphNode::kNative 526 }; 527 528 HeapEntry() { } 529 void Init(HeapSnapshot* snapshot, 530 Type type, 531 const char* name, 532 uint64_t id, 533 int self_size, 534 int children_count, 535 int retainers_count); 536 537 HeapSnapshot* snapshot() { return snapshot_; } 538 Type type() { return static_cast<Type>(type_); } 539 const char* name() { return name_; } 540 inline uint64_t id(); 541 int self_size() { return self_size_; } 542 int retained_size() { return retained_size_; } 543 void add_retained_size(int size) { retained_size_ += size; } 544 void set_retained_size(int value) { retained_size_ = value; } 545 int ordered_index() { return ordered_index_; } 546 void set_ordered_index(int value) { ordered_index_ = value; } 547 548 Vector<HeapGraphEdge> children() { 549 return Vector<HeapGraphEdge>(children_arr(), children_count_); } 550 Vector<HeapGraphEdge*> retainers() { 551 return Vector<HeapGraphEdge*>(retainers_arr(), retainers_count_); } 552 HeapEntry* dominator() { return dominator_; } 553 void set_dominator(HeapEntry* entry) { dominator_ = entry; } 554 555 void clear_paint() { painted_ = kUnpainted; } 556 bool painted_reachable() { return painted_ == kPainted; } 557 void paint_reachable() { 558 ASSERT(painted_ == kUnpainted); 559 painted_ = kPainted; 560 } 561 bool not_painted_reachable_from_others() { 562 return painted_ != kPaintedReachableFromOthers; 563 } 564 void paint_reachable_from_others() { 565 painted_ = kPaintedReachableFromOthers; 566 } 567 template<class Visitor> 568 void ApplyAndPaintAllReachable(Visitor* visitor); 569 void PaintAllReachable(); 570 571 void SetIndexedReference(HeapGraphEdge::Type type, 572 int child_index, 573 int index, 574 HeapEntry* entry, 575 int retainer_index); 576 void SetNamedReference(HeapGraphEdge::Type type, 577 int child_index, 578 const char* name, 579 HeapEntry* entry, 580 int retainer_index); 581 void SetUnidirElementReference(int child_index, int index, HeapEntry* entry); 582 583 int EntrySize() { return EntriesSize(1, children_count_, retainers_count_); } 584 int RetainedSize(bool exact); 585 586 void Print(int max_depth, int indent); 587 588 static int EntriesSize(int entries_count, 589 int children_count, 590 int retainers_count); 591 592 private: 593 HeapGraphEdge* children_arr() { 594 return reinterpret_cast<HeapGraphEdge*>(this + 1); 595 } 596 HeapGraphEdge** retainers_arr() { 597 return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_); 598 } 599 void CalculateExactRetainedSize(); 600 const char* TypeAsString(); 601 602 unsigned painted_: 2; 603 unsigned type_: 4; 604 int children_count_: 26; 605 int retainers_count_; 606 int self_size_; 607 union { 608 int ordered_index_; // Used during dominator tree building. 609 int retained_size_; // At that moment, there is no retained size yet. 610 }; 611 HeapEntry* dominator_; 612 HeapSnapshot* snapshot_; 613 struct Id { 614 uint32_t id1_; 615 uint32_t id2_; 616 } id_; // This is to avoid extra padding of 64-bit value. 617 const char* name_; 618 619 // Paints used for exact retained sizes calculation. 620 static const unsigned kUnpainted = 0; 621 static const unsigned kPainted = 1; 622 static const unsigned kPaintedReachableFromOthers = 2; 623 624 static const int kExactRetainedSizeTag = 1; 625 626 DISALLOW_COPY_AND_ASSIGN(HeapEntry); 627 }; 628 629 630 class HeapSnapshotsCollection; 631 632 // HeapSnapshot represents a single heap snapshot. It is stored in 633 // HeapSnapshotsCollection, which is also a factory for 634 // HeapSnapshots. All HeapSnapshots share strings copied from JS heap 635 // to be able to return them even if they were collected. 636 // HeapSnapshotGenerator fills in a HeapSnapshot. 637 class HeapSnapshot { 638 public: 639 enum Type { 640 kFull = v8::HeapSnapshot::kFull, 641 kAggregated = v8::HeapSnapshot::kAggregated 642 }; 643 644 HeapSnapshot(HeapSnapshotsCollection* collection, 645 Type type, 646 const char* title, 647 unsigned uid); 648 ~HeapSnapshot(); 649 void Delete(); 650 651 HeapSnapshotsCollection* collection() { return collection_; } 652 Type type() { return type_; } 653 const char* title() { return title_; } 654 unsigned uid() { return uid_; } 655 HeapEntry* root() { return root_entry_; } 656 HeapEntry* gc_roots() { return gc_roots_entry_; } 657 HeapEntry* natives_root() { return natives_root_entry_; } 658 List<HeapEntry*>* entries() { return &entries_; } 659 660 void AllocateEntries( 661 int entries_count, int children_count, int retainers_count); 662 HeapEntry* AddEntry(HeapEntry::Type type, 663 const char* name, 664 uint64_t id, 665 int size, 666 int children_count, 667 int retainers_count); 668 HeapEntry* AddRootEntry(int children_count); 669 HeapEntry* AddGcRootsEntry(int children_count, int retainers_count); 670 HeapEntry* AddNativesRootEntry(int children_count, int retainers_count); 671 void ClearPaint(); 672 HeapEntry* GetEntryById(uint64_t id); 673 List<HeapEntry*>* GetSortedEntriesList(); 674 template<class Visitor> 675 void IterateEntries(Visitor* visitor) { entries_.Iterate(visitor); } 676 void SetDominatorsToSelf(); 677 678 void Print(int max_depth); 679 void PrintEntriesSize(); 680 681 private: 682 HeapEntry* GetNextEntryToInit(); 683 684 HeapSnapshotsCollection* collection_; 685 Type type_; 686 const char* title_; 687 unsigned uid_; 688 HeapEntry* root_entry_; 689 HeapEntry* gc_roots_entry_; 690 HeapEntry* natives_root_entry_; 691 char* raw_entries_; 692 List<HeapEntry*> entries_; 693 bool entries_sorted_; 694 #ifdef DEBUG 695 int raw_entries_size_; 696 #endif 697 698 friend class HeapSnapshotTester; 699 700 DISALLOW_COPY_AND_ASSIGN(HeapSnapshot); 701 }; 702 703 704 class HeapObjectsMap { 705 public: 706 HeapObjectsMap(); 707 ~HeapObjectsMap(); 708 709 void SnapshotGenerationFinished(); 710 uint64_t FindObject(Address addr); 711 void MoveObject(Address from, Address to); 712 713 static uint64_t GenerateId(v8::RetainedObjectInfo* info); 714 715 static const uint64_t kInternalRootObjectId; 716 static const uint64_t kGcRootsObjectId; 717 static const uint64_t kNativesRootObjectId; 718 static const uint64_t kFirstAvailableObjectId; 719 720 private: 721 struct EntryInfo { 722 explicit EntryInfo(uint64_t id) : id(id), accessed(true) { } 723 EntryInfo(uint64_t id, bool accessed) : id(id), accessed(accessed) { } 724 uint64_t id; 725 bool accessed; 726 }; 727 728 void AddEntry(Address addr, uint64_t id); 729 uint64_t FindEntry(Address addr); 730 void RemoveDeadEntries(); 731 732 static bool AddressesMatch(void* key1, void* key2) { 733 return key1 == key2; 734 } 735 736 static uint32_t AddressHash(Address addr) { 737 return ComputeIntegerHash( 738 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr))); 739 } 740 741 bool initial_fill_mode_; 742 uint64_t next_id_; 743 HashMap entries_map_; 744 List<EntryInfo>* entries_; 745 746 DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap); 747 }; 748 749 750 class HeapSnapshotsCollection { 751 public: 752 HeapSnapshotsCollection(); 753 ~HeapSnapshotsCollection(); 754 755 bool is_tracking_objects() { return is_tracking_objects_; } 756 757 HeapSnapshot* NewSnapshot( 758 HeapSnapshot::Type type, const char* name, unsigned uid); 759 void SnapshotGenerationFinished(HeapSnapshot* snapshot); 760 List<HeapSnapshot*>* snapshots() { return &snapshots_; } 761 HeapSnapshot* GetSnapshot(unsigned uid); 762 void RemoveSnapshot(HeapSnapshot* snapshot); 763 764 StringsStorage* names() { return &names_; } 765 TokenEnumerator* token_enumerator() { return token_enumerator_; } 766 767 uint64_t GetObjectId(Address addr) { return ids_.FindObject(addr); } 768 void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); } 769 770 private: 771 INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) { 772 return key1 == key2; 773 } 774 775 bool is_tracking_objects_; // Whether tracking object moves is needed. 776 List<HeapSnapshot*> snapshots_; 777 // Mapping from snapshots' uids to HeapSnapshot* pointers. 778 HashMap snapshots_uids_; 779 StringsStorage names_; 780 TokenEnumerator* token_enumerator_; 781 // Mapping from HeapObject addresses to objects' uids. 782 HeapObjectsMap ids_; 783 784 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsCollection); 785 }; 786 787 788 // A typedef for referencing anything that can be snapshotted living 789 // in any kind of heap memory. 790 typedef void* HeapThing; 791 792 793 // An interface that creates HeapEntries by HeapThings. 794 class HeapEntriesAllocator { 795 public: 796 virtual ~HeapEntriesAllocator() { } 797 virtual HeapEntry* AllocateEntry( 798 HeapThing ptr, int children_count, int retainers_count) = 0; 799 }; 800 801 802 // The HeapEntriesMap instance is used to track a mapping between 803 // real heap objects and their representations in heap snapshots. 804 class HeapEntriesMap { 805 public: 806 HeapEntriesMap(); 807 ~HeapEntriesMap(); 808 809 void AllocateEntries(); 810 HeapEntry* Map(HeapThing thing); 811 void Pair(HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry); 812 void CountReference(HeapThing from, HeapThing to, 813 int* prev_children_count = NULL, 814 int* prev_retainers_count = NULL); 815 816 int entries_count() { return entries_count_; } 817 int total_children_count() { return total_children_count_; } 818 int total_retainers_count() { return total_retainers_count_; } 819 820 static HeapEntry *const kHeapEntryPlaceholder; 821 822 private: 823 struct EntryInfo { 824 EntryInfo(HeapEntry* entry, HeapEntriesAllocator* allocator) 825 : entry(entry), 826 allocator(allocator), 827 children_count(0), 828 retainers_count(0) { 829 } 830 HeapEntry* entry; 831 HeapEntriesAllocator* allocator; 832 int children_count; 833 int retainers_count; 834 }; 835 836 static uint32_t Hash(HeapThing thing) { 837 return ComputeIntegerHash( 838 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing))); 839 } 840 static bool HeapThingsMatch(HeapThing key1, HeapThing key2) { 841 return key1 == key2; 842 } 843 844 HashMap entries_; 845 int entries_count_; 846 int total_children_count_; 847 int total_retainers_count_; 848 849 friend class HeapObjectsSet; 850 851 DISALLOW_COPY_AND_ASSIGN(HeapEntriesMap); 852 }; 853 854 855 class HeapObjectsSet { 856 public: 857 HeapObjectsSet(); 858 void Clear(); 859 bool Contains(Object* object); 860 void Insert(Object* obj); 861 862 private: 863 HashMap entries_; 864 865 DISALLOW_COPY_AND_ASSIGN(HeapObjectsSet); 866 }; 867 868 869 // An interface used to populate a snapshot with nodes and edges. 870 class SnapshotFillerInterface { 871 public: 872 virtual ~SnapshotFillerInterface() { } 873 virtual HeapEntry* AddEntry(HeapThing ptr, 874 HeapEntriesAllocator* allocator) = 0; 875 virtual HeapEntry* FindEntry(HeapThing ptr) = 0; 876 virtual HeapEntry* FindOrAddEntry(HeapThing ptr, 877 HeapEntriesAllocator* allocator) = 0; 878 virtual void SetIndexedReference(HeapGraphEdge::Type type, 879 HeapThing parent_ptr, 880 HeapEntry* parent_entry, 881 int index, 882 HeapThing child_ptr, 883 HeapEntry* child_entry) = 0; 884 virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, 885 HeapThing parent_ptr, 886 HeapEntry* parent_entry, 887 HeapThing child_ptr, 888 HeapEntry* child_entry) = 0; 889 virtual void SetNamedReference(HeapGraphEdge::Type type, 890 HeapThing parent_ptr, 891 HeapEntry* parent_entry, 892 const char* reference_name, 893 HeapThing child_ptr, 894 HeapEntry* child_entry) = 0; 895 virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, 896 HeapThing parent_ptr, 897 HeapEntry* parent_entry, 898 HeapThing child_ptr, 899 HeapEntry* child_entry) = 0; 900 }; 901 902 903 class SnapshottingProgressReportingInterface { 904 public: 905 virtual ~SnapshottingProgressReportingInterface() { } 906 virtual void ProgressStep() = 0; 907 virtual bool ProgressReport(bool force) = 0; 908 }; 909 910 911 // An implementation of V8 heap graph extractor. 912 class V8HeapExplorer : public HeapEntriesAllocator { 913 public: 914 V8HeapExplorer(HeapSnapshot* snapshot, 915 SnapshottingProgressReportingInterface* progress); 916 virtual ~V8HeapExplorer(); 917 virtual HeapEntry* AllocateEntry( 918 HeapThing ptr, int children_count, int retainers_count); 919 void AddRootEntries(SnapshotFillerInterface* filler); 920 int EstimateObjectsCount(); 921 bool IterateAndExtractReferences(SnapshotFillerInterface* filler); 922 923 static HeapObject* const kInternalRootObject; 924 925 private: 926 HeapEntry* AddEntry( 927 HeapObject* object, int children_count, int retainers_count); 928 HeapEntry* AddEntry(HeapObject* object, 929 HeapEntry::Type type, 930 const char* name, 931 int children_count, 932 int retainers_count); 933 const char* GetSystemEntryName(HeapObject* object); 934 void ExtractReferences(HeapObject* obj); 935 void ExtractClosureReferences(JSObject* js_obj, HeapEntry* entry); 936 void ExtractPropertyReferences(JSObject* js_obj, HeapEntry* entry); 937 void ExtractElementReferences(JSObject* js_obj, HeapEntry* entry); 938 void ExtractInternalReferences(JSObject* js_obj, HeapEntry* entry); 939 void SetClosureReference(HeapObject* parent_obj, 940 HeapEntry* parent, 941 String* reference_name, 942 Object* child); 943 void SetElementReference(HeapObject* parent_obj, 944 HeapEntry* parent, 945 int index, 946 Object* child); 947 void SetInternalReference(HeapObject* parent_obj, 948 HeapEntry* parent, 949 const char* reference_name, 950 Object* child, 951 int field_offset = -1); 952 void SetInternalReference(HeapObject* parent_obj, 953 HeapEntry* parent, 954 int index, 955 Object* child, 956 int field_offset = -1); 957 void SetHiddenReference(HeapObject* parent_obj, 958 HeapEntry* parent, 959 int index, 960 Object* child); 961 void SetPropertyReference(HeapObject* parent_obj, 962 HeapEntry* parent, 963 String* reference_name, 964 Object* child, 965 int field_offset = -1); 966 void SetPropertyShortcutReference(HeapObject* parent_obj, 967 HeapEntry* parent, 968 String* reference_name, 969 Object* child); 970 void SetRootShortcutReference(Object* child); 971 void SetRootGcRootsReference(); 972 void SetGcRootsReference(Object* child); 973 974 HeapEntry* GetEntry(Object* obj); 975 976 HeapSnapshot* snapshot_; 977 HeapSnapshotsCollection* collection_; 978 SnapshottingProgressReportingInterface* progress_; 979 SnapshotFillerInterface* filler_; 980 981 static HeapObject* const kGcRootsObject; 982 983 friend class IndexedReferencesExtractor; 984 friend class RootsReferencesExtractor; 985 986 DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer); 987 }; 988 989 990 // An implementation of retained native objects extractor. 991 class NativeObjectsExplorer : public HeapEntriesAllocator { 992 public: 993 NativeObjectsExplorer(HeapSnapshot* snapshot, 994 SnapshottingProgressReportingInterface* progress); 995 virtual ~NativeObjectsExplorer(); 996 virtual HeapEntry* AllocateEntry( 997 HeapThing ptr, int children_count, int retainers_count); 998 void AddRootEntries(SnapshotFillerInterface* filler); 999 int EstimateObjectsCount(); 1000 bool IterateAndExtractReferences(SnapshotFillerInterface* filler); 1001 1002 private: 1003 void FillRetainedObjects(); 1004 List<HeapObject*>* GetListMaybeDisposeInfo(v8::RetainedObjectInfo* info); 1005 void SetNativeRootReference(v8::RetainedObjectInfo* info); 1006 void SetRootNativesRootReference(); 1007 void SetWrapperNativeReferences(HeapObject* wrapper, 1008 v8::RetainedObjectInfo* info); 1009 void VisitSubtreeWrapper(Object** p, uint16_t class_id); 1010 1011 static uint32_t InfoHash(v8::RetainedObjectInfo* info) { 1012 return ComputeIntegerHash(static_cast<uint32_t>(info->GetHash())); 1013 } 1014 static bool RetainedInfosMatch(void* key1, void* key2) { 1015 return key1 == key2 || 1016 (reinterpret_cast<v8::RetainedObjectInfo*>(key1))->IsEquivalent( 1017 reinterpret_cast<v8::RetainedObjectInfo*>(key2)); 1018 } 1019 1020 HeapSnapshot* snapshot_; 1021 HeapSnapshotsCollection* collection_; 1022 SnapshottingProgressReportingInterface* progress_; 1023 bool embedder_queried_; 1024 HeapObjectsSet in_groups_; 1025 // RetainedObjectInfo* -> List<HeapObject*>* 1026 HashMap objects_by_info_; 1027 // Used during references extraction. 1028 SnapshotFillerInterface* filler_; 1029 1030 static HeapThing const kNativesRootObject; 1031 1032 friend class GlobalHandlesExtractor; 1033 1034 DISALLOW_COPY_AND_ASSIGN(NativeObjectsExplorer); 1035 }; 1036 1037 1038 class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface { 1039 public: 1040 HeapSnapshotGenerator(HeapSnapshot* snapshot, 1041 v8::ActivityControl* control); 1042 bool GenerateSnapshot(); 1043 1044 private: 1045 bool ApproximateRetainedSizes(); 1046 bool BuildDominatorTree(const Vector<HeapEntry*>& entries, 1047 Vector<HeapEntry*>* dominators); 1048 bool CountEntriesAndReferences(); 1049 bool FillReferences(); 1050 void FillReversePostorderIndexes(Vector<HeapEntry*>* entries); 1051 void ProgressStep(); 1052 bool ProgressReport(bool force = false); 1053 bool SetEntriesDominators(); 1054 void SetProgressTotal(int iterations_count); 1055 1056 HeapSnapshot* snapshot_; 1057 v8::ActivityControl* control_; 1058 V8HeapExplorer v8_heap_explorer_; 1059 NativeObjectsExplorer dom_explorer_; 1060 // Mapping from HeapThing pointers to HeapEntry* pointers. 1061 HeapEntriesMap entries_; 1062 // Used during snapshot generation. 1063 int progress_counter_; 1064 int progress_total_; 1065 1066 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator); 1067 }; 1068 1069 class OutputStreamWriter; 1070 1071 class HeapSnapshotJSONSerializer { 1072 public: 1073 explicit HeapSnapshotJSONSerializer(HeapSnapshot* snapshot) 1074 : snapshot_(snapshot), 1075 nodes_(ObjectsMatch), 1076 strings_(ObjectsMatch), 1077 next_node_id_(1), 1078 next_string_id_(1), 1079 writer_(NULL) { 1080 } 1081 void Serialize(v8::OutputStream* stream); 1082 1083 private: 1084 INLINE(static bool ObjectsMatch(void* key1, void* key2)) { 1085 return key1 == key2; 1086 } 1087 1088 INLINE(static uint32_t ObjectHash(const void* key)) { 1089 return ComputeIntegerHash( 1090 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key))); 1091 } 1092 1093 void EnumerateNodes(); 1094 int GetNodeId(HeapEntry* entry); 1095 int GetStringId(const char* s); 1096 void SerializeEdge(HeapGraphEdge* edge); 1097 void SerializeImpl(); 1098 void SerializeNode(HeapEntry* entry); 1099 void SerializeNodes(); 1100 void SerializeSnapshot(); 1101 void SerializeString(const unsigned char* s); 1102 void SerializeStrings(); 1103 void SortHashMap(HashMap* map, List<HashMap::Entry*>* sorted_entries); 1104 1105 HeapSnapshot* snapshot_; 1106 HashMap nodes_; 1107 HashMap strings_; 1108 int next_node_id_; 1109 int next_string_id_; 1110 OutputStreamWriter* writer_; 1111 1112 friend class HeapSnapshotJSONSerializerEnumerator; 1113 friend class HeapSnapshotJSONSerializerIterator; 1114 1115 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotJSONSerializer); 1116 }; 1117 1118 1119 String* GetConstructorNameForHeapProfile(JSObject* object); 1120 1121 } } // namespace v8::internal 1122 1123 #endif // ENABLE_LOGGING_AND_PROFILING 1124 1125 #endif // V8_PROFILE_GENERATOR_H_ 1126