Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #include "profile-generator-inl.h"
     31 
     32 #include "global-handles.h"
     33 #include "heap-profiler.h"
     34 #include "scopeinfo.h"
     35 #include "unicode.h"
     36 #include "zone-inl.h"
     37 
     38 namespace v8 {
     39 namespace internal {
     40 
     41 
     42 TokenEnumerator::TokenEnumerator()
     43     : token_locations_(4),
     44       token_removed_(4) {
     45 }
     46 
     47 
     48 TokenEnumerator::~TokenEnumerator() {
     49   Isolate* isolate = Isolate::Current();
     50   for (int i = 0; i < token_locations_.length(); ++i) {
     51     if (!token_removed_[i]) {
     52       isolate->global_handles()->ClearWeakness(token_locations_[i]);
     53       isolate->global_handles()->Destroy(token_locations_[i]);
     54     }
     55   }
     56 }
     57 
     58 
     59 int TokenEnumerator::GetTokenId(Object* token) {
     60   Isolate* isolate = Isolate::Current();
     61   if (token == NULL) return TokenEnumerator::kNoSecurityToken;
     62   for (int i = 0; i < token_locations_.length(); ++i) {
     63     if (*token_locations_[i] == token && !token_removed_[i]) return i;
     64   }
     65   Handle<Object> handle = isolate->global_handles()->Create(token);
     66   // handle.location() points to a memory cell holding a pointer
     67   // to a token object in the V8's heap.
     68   isolate->global_handles()->MakeWeak(handle.location(), this,
     69                                       TokenRemovedCallback);
     70   token_locations_.Add(handle.location());
     71   token_removed_.Add(false);
     72   return token_locations_.length() - 1;
     73 }
     74 
     75 
     76 void TokenEnumerator::TokenRemovedCallback(v8::Persistent<v8::Value> handle,
     77                                            void* parameter) {
     78   reinterpret_cast<TokenEnumerator*>(parameter)->TokenRemoved(
     79       Utils::OpenHandle(*handle).location());
     80   handle.Dispose();
     81 }
     82 
     83 
     84 void TokenEnumerator::TokenRemoved(Object** token_location) {
     85   for (int i = 0; i < token_locations_.length(); ++i) {
     86     if (token_locations_[i] == token_location && !token_removed_[i]) {
     87       token_removed_[i] = true;
     88       return;
     89     }
     90   }
     91 }
     92 
     93 
     94 StringsStorage::StringsStorage()
     95     : names_(StringsMatch) {
     96 }
     97 
     98 
     99 StringsStorage::~StringsStorage() {
    100   for (HashMap::Entry* p = names_.Start();
    101        p != NULL;
    102        p = names_.Next(p)) {
    103     DeleteArray(reinterpret_cast<const char*>(p->value));
    104   }
    105 }
    106 
    107 
    108 const char* StringsStorage::GetCopy(const char* src) {
    109   int len = static_cast<int>(strlen(src));
    110   Vector<char> dst = Vector<char>::New(len + 1);
    111   OS::StrNCpy(dst, src, len);
    112   dst[len] = '\0';
    113   uint32_t hash =
    114       HashSequentialString(dst.start(), len, HEAP->HashSeed());
    115   return AddOrDisposeString(dst.start(), hash);
    116 }
    117 
    118 
    119 const char* StringsStorage::GetFormatted(const char* format, ...) {
    120   va_list args;
    121   va_start(args, format);
    122   const char* result = GetVFormatted(format, args);
    123   va_end(args);
    124   return result;
    125 }
    126 
    127 
    128 const char* StringsStorage::AddOrDisposeString(char* str, uint32_t hash) {
    129   HashMap::Entry* cache_entry = names_.Lookup(str, hash, true);
    130   if (cache_entry->value == NULL) {
    131     // New entry added.
    132     cache_entry->value = str;
    133   } else {
    134     DeleteArray(str);
    135   }
    136   return reinterpret_cast<const char*>(cache_entry->value);
    137 }
    138 
    139 
    140 const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
    141   Vector<char> str = Vector<char>::New(1024);
    142   int len = OS::VSNPrintF(str, format, args);
    143   if (len == -1) {
    144     DeleteArray(str.start());
    145     return format;
    146   }
    147   uint32_t hash = HashSequentialString(
    148       str.start(), len, HEAP->HashSeed());
    149   return AddOrDisposeString(str.start(), hash);
    150 }
    151 
    152 
    153 const char* StringsStorage::GetName(String* name) {
    154   if (name->IsString()) {
    155     int length = Min(kMaxNameSize, name->length());
    156     SmartArrayPointer<char> data =
    157         name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length);
    158     uint32_t hash =
    159         HashSequentialString(*data, length, name->GetHeap()->HashSeed());
    160     return AddOrDisposeString(data.Detach(), hash);
    161   }
    162   return "";
    163 }
    164 
    165 
    166 const char* StringsStorage::GetName(int index) {
    167   return GetFormatted("%d", index);
    168 }
    169 
    170 
    171 const char* const CodeEntry::kEmptyNamePrefix = "";
    172 
    173 
    174 void CodeEntry::CopyData(const CodeEntry& source) {
    175   tag_ = source.tag_;
    176   name_prefix_ = source.name_prefix_;
    177   name_ = source.name_;
    178   resource_name_ = source.resource_name_;
    179   line_number_ = source.line_number_;
    180 }
    181 
    182 
    183 uint32_t CodeEntry::GetCallUid() const {
    184   uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
    185   if (shared_id_ != 0) {
    186     hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
    187                                v8::internal::kZeroHashSeed);
    188   } else {
    189     hash ^= ComputeIntegerHash(
    190         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
    191         v8::internal::kZeroHashSeed);
    192     hash ^= ComputeIntegerHash(
    193         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
    194         v8::internal::kZeroHashSeed);
    195     hash ^= ComputeIntegerHash(
    196         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
    197         v8::internal::kZeroHashSeed);
    198     hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
    199   }
    200   return hash;
    201 }
    202 
    203 
    204 bool CodeEntry::IsSameAs(CodeEntry* entry) const {
    205   return this == entry
    206       || (tag_ == entry->tag_
    207           && shared_id_ == entry->shared_id_
    208           && (shared_id_ != 0
    209               || (name_prefix_ == entry->name_prefix_
    210                   && name_ == entry->name_
    211                   && resource_name_ == entry->resource_name_
    212                   && line_number_ == entry->line_number_)));
    213 }
    214 
    215 
    216 ProfileNode* ProfileNode::FindChild(CodeEntry* entry) {
    217   HashMap::Entry* map_entry =
    218       children_.Lookup(entry, CodeEntryHash(entry), false);
    219   return map_entry != NULL ?
    220       reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
    221 }
    222 
    223 
    224 ProfileNode* ProfileNode::FindOrAddChild(CodeEntry* entry) {
    225   HashMap::Entry* map_entry =
    226       children_.Lookup(entry, CodeEntryHash(entry), true);
    227   if (map_entry->value == NULL) {
    228     // New node added.
    229     ProfileNode* new_node = new ProfileNode(tree_, entry);
    230     map_entry->value = new_node;
    231     children_list_.Add(new_node);
    232   }
    233   return reinterpret_cast<ProfileNode*>(map_entry->value);
    234 }
    235 
    236 
    237 double ProfileNode::GetSelfMillis() const {
    238   return tree_->TicksToMillis(self_ticks_);
    239 }
    240 
    241 
    242 double ProfileNode::GetTotalMillis() const {
    243   return tree_->TicksToMillis(total_ticks_);
    244 }
    245 
    246 
    247 void ProfileNode::Print(int indent) {
    248   OS::Print("%5u %5u %*c %s%s [%d]",
    249             total_ticks_, self_ticks_,
    250             indent, ' ',
    251             entry_->name_prefix(),
    252             entry_->name(),
    253             entry_->security_token_id());
    254   if (entry_->resource_name()[0] != '\0')
    255     OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
    256   OS::Print("\n");
    257   for (HashMap::Entry* p = children_.Start();
    258        p != NULL;
    259        p = children_.Next(p)) {
    260     reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
    261   }
    262 }
    263 
    264 
    265 class DeleteNodesCallback {
    266  public:
    267   void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
    268 
    269   void AfterAllChildrenTraversed(ProfileNode* node) {
    270     delete node;
    271   }
    272 
    273   void AfterChildTraversed(ProfileNode*, ProfileNode*) { }
    274 };
    275 
    276 
    277 ProfileTree::ProfileTree()
    278     : root_entry_(Logger::FUNCTION_TAG,
    279                   "",
    280                   "(root)",
    281                   "",
    282                   0,
    283                   TokenEnumerator::kNoSecurityToken),
    284       root_(new ProfileNode(this, &root_entry_)) {
    285 }
    286 
    287 
    288 ProfileTree::~ProfileTree() {
    289   DeleteNodesCallback cb;
    290   TraverseDepthFirst(&cb);
    291 }
    292 
    293 
    294 void ProfileTree::AddPathFromEnd(const Vector<CodeEntry*>& path) {
    295   ProfileNode* node = root_;
    296   for (CodeEntry** entry = path.start() + path.length() - 1;
    297        entry != path.start() - 1;
    298        --entry) {
    299     if (*entry != NULL) {
    300       node = node->FindOrAddChild(*entry);
    301     }
    302   }
    303   node->IncrementSelfTicks();
    304 }
    305 
    306 
    307 void ProfileTree::AddPathFromStart(const Vector<CodeEntry*>& path) {
    308   ProfileNode* node = root_;
    309   for (CodeEntry** entry = path.start();
    310        entry != path.start() + path.length();
    311        ++entry) {
    312     if (*entry != NULL) {
    313       node = node->FindOrAddChild(*entry);
    314     }
    315   }
    316   node->IncrementSelfTicks();
    317 }
    318 
    319 
    320 struct NodesPair {
    321   NodesPair(ProfileNode* src, ProfileNode* dst)
    322       : src(src), dst(dst) { }
    323   ProfileNode* src;
    324   ProfileNode* dst;
    325 };
    326 
    327 
    328 class FilteredCloneCallback {
    329  public:
    330   FilteredCloneCallback(ProfileNode* dst_root, int security_token_id)
    331       : stack_(10),
    332         security_token_id_(security_token_id) {
    333     stack_.Add(NodesPair(NULL, dst_root));
    334   }
    335 
    336   void BeforeTraversingChild(ProfileNode* parent, ProfileNode* child) {
    337     if (IsTokenAcceptable(child->entry()->security_token_id(),
    338                           parent->entry()->security_token_id())) {
    339       ProfileNode* clone = stack_.last().dst->FindOrAddChild(child->entry());
    340       clone->IncreaseSelfTicks(child->self_ticks());
    341       stack_.Add(NodesPair(child, clone));
    342     } else {
    343       // Attribute ticks to parent node.
    344       stack_.last().dst->IncreaseSelfTicks(child->self_ticks());
    345     }
    346   }
    347 
    348   void AfterAllChildrenTraversed(ProfileNode* parent) { }
    349 
    350   void AfterChildTraversed(ProfileNode*, ProfileNode* child) {
    351     if (stack_.last().src == child) {
    352       stack_.RemoveLast();
    353     }
    354   }
    355 
    356  private:
    357   bool IsTokenAcceptable(int token, int parent_token) {
    358     if (token == TokenEnumerator::kNoSecurityToken
    359         || token == security_token_id_) return true;
    360     if (token == TokenEnumerator::kInheritsSecurityToken) {
    361       ASSERT(parent_token != TokenEnumerator::kInheritsSecurityToken);
    362       return parent_token == TokenEnumerator::kNoSecurityToken
    363           || parent_token == security_token_id_;
    364     }
    365     return false;
    366   }
    367 
    368   List<NodesPair> stack_;
    369   int security_token_id_;
    370 };
    371 
    372 void ProfileTree::FilteredClone(ProfileTree* src, int security_token_id) {
    373   ms_to_ticks_scale_ = src->ms_to_ticks_scale_;
    374   FilteredCloneCallback cb(root_, security_token_id);
    375   src->TraverseDepthFirst(&cb);
    376   CalculateTotalTicks();
    377 }
    378 
    379 
    380 void ProfileTree::SetTickRatePerMs(double ticks_per_ms) {
    381   ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
    382 }
    383 
    384 
    385 class Position {
    386  public:
    387   explicit Position(ProfileNode* node)
    388       : node(node), child_idx_(0) { }
    389   INLINE(ProfileNode* current_child()) {
    390     return node->children()->at(child_idx_);
    391   }
    392   INLINE(bool has_current_child()) {
    393     return child_idx_ < node->children()->length();
    394   }
    395   INLINE(void next_child()) { ++child_idx_; }
    396 
    397   ProfileNode* node;
    398  private:
    399   int child_idx_;
    400 };
    401 
    402 
    403 // Non-recursive implementation of a depth-first post-order tree traversal.
    404 template <typename Callback>
    405 void ProfileTree::TraverseDepthFirst(Callback* callback) {
    406   List<Position> stack(10);
    407   stack.Add(Position(root_));
    408   while (stack.length() > 0) {
    409     Position& current = stack.last();
    410     if (current.has_current_child()) {
    411       callback->BeforeTraversingChild(current.node, current.current_child());
    412       stack.Add(Position(current.current_child()));
    413     } else {
    414       callback->AfterAllChildrenTraversed(current.node);
    415       if (stack.length() > 1) {
    416         Position& parent = stack[stack.length() - 2];
    417         callback->AfterChildTraversed(parent.node, current.node);
    418         parent.next_child();
    419       }
    420       // Remove child from the stack.
    421       stack.RemoveLast();
    422     }
    423   }
    424 }
    425 
    426 
    427 class CalculateTotalTicksCallback {
    428  public:
    429   void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
    430 
    431   void AfterAllChildrenTraversed(ProfileNode* node) {
    432     node->IncreaseTotalTicks(node->self_ticks());
    433   }
    434 
    435   void AfterChildTraversed(ProfileNode* parent, ProfileNode* child) {
    436     parent->IncreaseTotalTicks(child->total_ticks());
    437   }
    438 };
    439 
    440 
    441 void ProfileTree::CalculateTotalTicks() {
    442   CalculateTotalTicksCallback cb;
    443   TraverseDepthFirst(&cb);
    444 }
    445 
    446 
    447 void ProfileTree::ShortPrint() {
    448   OS::Print("root: %u %u %.2fms %.2fms\n",
    449             root_->total_ticks(), root_->self_ticks(),
    450             root_->GetTotalMillis(), root_->GetSelfMillis());
    451 }
    452 
    453 
    454 void CpuProfile::AddPath(const Vector<CodeEntry*>& path) {
    455   top_down_.AddPathFromEnd(path);
    456   bottom_up_.AddPathFromStart(path);
    457 }
    458 
    459 
    460 void CpuProfile::CalculateTotalTicks() {
    461   top_down_.CalculateTotalTicks();
    462   bottom_up_.CalculateTotalTicks();
    463 }
    464 
    465 
    466 void CpuProfile::SetActualSamplingRate(double actual_sampling_rate) {
    467   top_down_.SetTickRatePerMs(actual_sampling_rate);
    468   bottom_up_.SetTickRatePerMs(actual_sampling_rate);
    469 }
    470 
    471 
    472 CpuProfile* CpuProfile::FilteredClone(int security_token_id) {
    473   ASSERT(security_token_id != TokenEnumerator::kNoSecurityToken);
    474   CpuProfile* clone = new CpuProfile(title_, uid_);
    475   clone->top_down_.FilteredClone(&top_down_, security_token_id);
    476   clone->bottom_up_.FilteredClone(&bottom_up_, security_token_id);
    477   return clone;
    478 }
    479 
    480 
    481 void CpuProfile::ShortPrint() {
    482   OS::Print("top down ");
    483   top_down_.ShortPrint();
    484   OS::Print("bottom up ");
    485   bottom_up_.ShortPrint();
    486 }
    487 
    488 
    489 void CpuProfile::Print() {
    490   OS::Print("[Top down]:\n");
    491   top_down_.Print();
    492   OS::Print("[Bottom up]:\n");
    493   bottom_up_.Print();
    494 }
    495 
    496 
    497 CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
    498 const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
    499 
    500 
    501 void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
    502   DeleteAllCoveredCode(addr, addr + size);
    503   CodeTree::Locator locator;
    504   tree_.Insert(addr, &locator);
    505   locator.set_value(CodeEntryInfo(entry, size));
    506 }
    507 
    508 
    509 void CodeMap::DeleteAllCoveredCode(Address start, Address end) {
    510   List<Address> to_delete;
    511   Address addr = end - 1;
    512   while (addr >= start) {
    513     CodeTree::Locator locator;
    514     if (!tree_.FindGreatestLessThan(addr, &locator)) break;
    515     Address start2 = locator.key(), end2 = start2 + locator.value().size;
    516     if (start2 < end && start < end2) to_delete.Add(start2);
    517     addr = start2 - 1;
    518   }
    519   for (int i = 0; i < to_delete.length(); ++i) tree_.Remove(to_delete[i]);
    520 }
    521 
    522 
    523 CodeEntry* CodeMap::FindEntry(Address addr) {
    524   CodeTree::Locator locator;
    525   if (tree_.FindGreatestLessThan(addr, &locator)) {
    526     // locator.key() <= addr. Need to check that addr is within entry.
    527     const CodeEntryInfo& entry = locator.value();
    528     if (addr < (locator.key() + entry.size))
    529       return entry.entry;
    530   }
    531   return NULL;
    532 }
    533 
    534 
    535 int CodeMap::GetSharedId(Address addr) {
    536   CodeTree::Locator locator;
    537   // For shared function entries, 'size' field is used to store their IDs.
    538   if (tree_.Find(addr, &locator)) {
    539     const CodeEntryInfo& entry = locator.value();
    540     ASSERT(entry.entry == kSharedFunctionCodeEntry);
    541     return entry.size;
    542   } else {
    543     tree_.Insert(addr, &locator);
    544     int id = next_shared_id_++;
    545     locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
    546     return id;
    547   }
    548 }
    549 
    550 
    551 void CodeMap::MoveCode(Address from, Address to) {
    552   if (from == to) return;
    553   CodeTree::Locator locator;
    554   if (!tree_.Find(from, &locator)) return;
    555   CodeEntryInfo entry = locator.value();
    556   tree_.Remove(from);
    557   AddCode(to, entry.entry, entry.size);
    558 }
    559 
    560 
    561 void CodeMap::CodeTreePrinter::Call(
    562     const Address& key, const CodeMap::CodeEntryInfo& value) {
    563   OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
    564 }
    565 
    566 
    567 void CodeMap::Print() {
    568   CodeTreePrinter printer;
    569   tree_.ForEach(&printer);
    570 }
    571 
    572 
    573 CpuProfilesCollection::CpuProfilesCollection()
    574     : profiles_uids_(UidsMatch),
    575       current_profiles_semaphore_(OS::CreateSemaphore(1)) {
    576   // Create list of unabridged profiles.
    577   profiles_by_token_.Add(new List<CpuProfile*>());
    578 }
    579 
    580 
    581 static void DeleteCodeEntry(CodeEntry** entry_ptr) {
    582   delete *entry_ptr;
    583 }
    584 
    585 static void DeleteCpuProfile(CpuProfile** profile_ptr) {
    586   delete *profile_ptr;
    587 }
    588 
    589 static void DeleteProfilesList(List<CpuProfile*>** list_ptr) {
    590   if (*list_ptr != NULL) {
    591     (*list_ptr)->Iterate(DeleteCpuProfile);
    592     delete *list_ptr;
    593   }
    594 }
    595 
    596 CpuProfilesCollection::~CpuProfilesCollection() {
    597   delete current_profiles_semaphore_;
    598   current_profiles_.Iterate(DeleteCpuProfile);
    599   detached_profiles_.Iterate(DeleteCpuProfile);
    600   profiles_by_token_.Iterate(DeleteProfilesList);
    601   code_entries_.Iterate(DeleteCodeEntry);
    602 }
    603 
    604 
    605 bool CpuProfilesCollection::StartProfiling(const char* title, unsigned uid) {
    606   ASSERT(uid > 0);
    607   current_profiles_semaphore_->Wait();
    608   if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
    609     current_profiles_semaphore_->Signal();
    610     return false;
    611   }
    612   for (int i = 0; i < current_profiles_.length(); ++i) {
    613     if (strcmp(current_profiles_[i]->title(), title) == 0) {
    614       // Ignore attempts to start profile with the same title.
    615       current_profiles_semaphore_->Signal();
    616       return false;
    617     }
    618   }
    619   current_profiles_.Add(new CpuProfile(title, uid));
    620   current_profiles_semaphore_->Signal();
    621   return true;
    622 }
    623 
    624 
    625 bool CpuProfilesCollection::StartProfiling(String* title, unsigned uid) {
    626   return StartProfiling(GetName(title), uid);
    627 }
    628 
    629 
    630 CpuProfile* CpuProfilesCollection::StopProfiling(int security_token_id,
    631                                                  const char* title,
    632                                                  double actual_sampling_rate) {
    633   const int title_len = StrLength(title);
    634   CpuProfile* profile = NULL;
    635   current_profiles_semaphore_->Wait();
    636   for (int i = current_profiles_.length() - 1; i >= 0; --i) {
    637     if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
    638       profile = current_profiles_.Remove(i);
    639       break;
    640     }
    641   }
    642   current_profiles_semaphore_->Signal();
    643 
    644   if (profile != NULL) {
    645     profile->CalculateTotalTicks();
    646     profile->SetActualSamplingRate(actual_sampling_rate);
    647     List<CpuProfile*>* unabridged_list =
    648         profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
    649     unabridged_list->Add(profile);
    650     HashMap::Entry* entry =
    651         profiles_uids_.Lookup(reinterpret_cast<void*>(profile->uid()),
    652                               static_cast<uint32_t>(profile->uid()),
    653                               true);
    654     ASSERT(entry->value == NULL);
    655     entry->value = reinterpret_cast<void*>(unabridged_list->length() - 1);
    656     return GetProfile(security_token_id, profile->uid());
    657   }
    658   return NULL;
    659 }
    660 
    661 
    662 CpuProfile* CpuProfilesCollection::GetProfile(int security_token_id,
    663                                               unsigned uid) {
    664   int index = GetProfileIndex(uid);
    665   if (index < 0) return NULL;
    666   List<CpuProfile*>* unabridged_list =
    667       profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
    668   if (security_token_id == TokenEnumerator::kNoSecurityToken) {
    669     return unabridged_list->at(index);
    670   }
    671   List<CpuProfile*>* list = GetProfilesList(security_token_id);
    672   if (list->at(index) == NULL) {
    673     (*list)[index] =
    674         unabridged_list->at(index)->FilteredClone(security_token_id);
    675   }
    676   return list->at(index);
    677 }
    678 
    679 
    680 int CpuProfilesCollection::GetProfileIndex(unsigned uid) {
    681   HashMap::Entry* entry = profiles_uids_.Lookup(reinterpret_cast<void*>(uid),
    682                                                 static_cast<uint32_t>(uid),
    683                                                 false);
    684   return entry != NULL ?
    685       static_cast<int>(reinterpret_cast<intptr_t>(entry->value)) : -1;
    686 }
    687 
    688 
    689 bool CpuProfilesCollection::IsLastProfile(const char* title) {
    690   // Called from VM thread, and only it can mutate the list,
    691   // so no locking is needed here.
    692   if (current_profiles_.length() != 1) return false;
    693   return StrLength(title) == 0
    694       || strcmp(current_profiles_[0]->title(), title) == 0;
    695 }
    696 
    697 
    698 void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
    699   // Called from VM thread for a completed profile.
    700   unsigned uid = profile->uid();
    701   int index = GetProfileIndex(uid);
    702   if (index < 0) {
    703     detached_profiles_.RemoveElement(profile);
    704     return;
    705   }
    706   profiles_uids_.Remove(reinterpret_cast<void*>(uid),
    707                         static_cast<uint32_t>(uid));
    708   // Decrement all indexes above the deleted one.
    709   for (HashMap::Entry* p = profiles_uids_.Start();
    710        p != NULL;
    711        p = profiles_uids_.Next(p)) {
    712     intptr_t p_index = reinterpret_cast<intptr_t>(p->value);
    713     if (p_index > index) {
    714       p->value = reinterpret_cast<void*>(p_index - 1);
    715     }
    716   }
    717   for (int i = 0; i < profiles_by_token_.length(); ++i) {
    718     List<CpuProfile*>* list = profiles_by_token_[i];
    719     if (list != NULL && index < list->length()) {
    720       // Move all filtered clones into detached_profiles_,
    721       // so we can know that they are still in use.
    722       CpuProfile* cloned_profile = list->Remove(index);
    723       if (cloned_profile != NULL && cloned_profile != profile) {
    724         detached_profiles_.Add(cloned_profile);
    725       }
    726     }
    727   }
    728 }
    729 
    730 
    731 int CpuProfilesCollection::TokenToIndex(int security_token_id) {
    732   ASSERT(TokenEnumerator::kNoSecurityToken == -1);
    733   return security_token_id + 1;  // kNoSecurityToken -> 0, 0 -> 1, ...
    734 }
    735 
    736 
    737 List<CpuProfile*>* CpuProfilesCollection::GetProfilesList(
    738     int security_token_id) {
    739   const int index = TokenToIndex(security_token_id);
    740   const int lists_to_add = index - profiles_by_token_.length() + 1;
    741   if (lists_to_add > 0) profiles_by_token_.AddBlock(NULL, lists_to_add);
    742   List<CpuProfile*>* unabridged_list =
    743       profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
    744   const int current_count = unabridged_list->length();
    745   if (profiles_by_token_[index] == NULL) {
    746     profiles_by_token_[index] = new List<CpuProfile*>(current_count);
    747   }
    748   List<CpuProfile*>* list = profiles_by_token_[index];
    749   const int profiles_to_add = current_count - list->length();
    750   if (profiles_to_add > 0) list->AddBlock(NULL, profiles_to_add);
    751   return list;
    752 }
    753 
    754 
    755 List<CpuProfile*>* CpuProfilesCollection::Profiles(int security_token_id) {
    756   List<CpuProfile*>* unabridged_list =
    757       profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
    758   if (security_token_id == TokenEnumerator::kNoSecurityToken) {
    759     return unabridged_list;
    760   }
    761   List<CpuProfile*>* list = GetProfilesList(security_token_id);
    762   const int current_count = unabridged_list->length();
    763   for (int i = 0; i < current_count; ++i) {
    764     if (list->at(i) == NULL) {
    765       (*list)[i] = unabridged_list->at(i)->FilteredClone(security_token_id);
    766     }
    767   }
    768   return list;
    769 }
    770 
    771 
    772 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
    773                                                String* name,
    774                                                String* resource_name,
    775                                                int line_number) {
    776   CodeEntry* entry = new CodeEntry(tag,
    777                                    CodeEntry::kEmptyNamePrefix,
    778                                    GetFunctionName(name),
    779                                    GetName(resource_name),
    780                                    line_number,
    781                                    TokenEnumerator::kNoSecurityToken);
    782   code_entries_.Add(entry);
    783   return entry;
    784 }
    785 
    786 
    787 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
    788                                                const char* name) {
    789   CodeEntry* entry = new CodeEntry(tag,
    790                                    CodeEntry::kEmptyNamePrefix,
    791                                    GetFunctionName(name),
    792                                    "",
    793                                    v8::CpuProfileNode::kNoLineNumberInfo,
    794                                    TokenEnumerator::kNoSecurityToken);
    795   code_entries_.Add(entry);
    796   return entry;
    797 }
    798 
    799 
    800 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
    801                                                const char* name_prefix,
    802                                                String* name) {
    803   CodeEntry* entry = new CodeEntry(tag,
    804                                    name_prefix,
    805                                    GetName(name),
    806                                    "",
    807                                    v8::CpuProfileNode::kNoLineNumberInfo,
    808                                    TokenEnumerator::kInheritsSecurityToken);
    809   code_entries_.Add(entry);
    810   return entry;
    811 }
    812 
    813 
    814 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
    815                                                int args_count) {
    816   CodeEntry* entry = new CodeEntry(tag,
    817                                    "args_count: ",
    818                                    GetName(args_count),
    819                                    "",
    820                                    v8::CpuProfileNode::kNoLineNumberInfo,
    821                                    TokenEnumerator::kInheritsSecurityToken);
    822   code_entries_.Add(entry);
    823   return entry;
    824 }
    825 
    826 
    827 void CpuProfilesCollection::AddPathToCurrentProfiles(
    828     const Vector<CodeEntry*>& path) {
    829   // As starting / stopping profiles is rare relatively to this
    830   // method, we don't bother minimizing the duration of lock holding,
    831   // e.g. copying contents of the list to a local vector.
    832   current_profiles_semaphore_->Wait();
    833   for (int i = 0; i < current_profiles_.length(); ++i) {
    834     current_profiles_[i]->AddPath(path);
    835   }
    836   current_profiles_semaphore_->Signal();
    837 }
    838 
    839 
    840 void SampleRateCalculator::Tick() {
    841   if (--wall_time_query_countdown_ == 0)
    842     UpdateMeasurements(OS::TimeCurrentMillis());
    843 }
    844 
    845 
    846 void SampleRateCalculator::UpdateMeasurements(double current_time) {
    847   if (measurements_count_++ != 0) {
    848     const double measured_ticks_per_ms =
    849         (kWallTimeQueryIntervalMs * ticks_per_ms_) /
    850         (current_time - last_wall_time_);
    851     // Update the average value.
    852     ticks_per_ms_ +=
    853         (measured_ticks_per_ms - ticks_per_ms_) / measurements_count_;
    854     // Update the externally accessible result.
    855     result_ = static_cast<AtomicWord>(ticks_per_ms_ * kResultScale);
    856   }
    857   last_wall_time_ = current_time;
    858   wall_time_query_countdown_ =
    859       static_cast<unsigned>(kWallTimeQueryIntervalMs * ticks_per_ms_);
    860 }
    861 
    862 
    863 const char* const ProfileGenerator::kAnonymousFunctionName =
    864     "(anonymous function)";
    865 const char* const ProfileGenerator::kProgramEntryName =
    866     "(program)";
    867 const char* const ProfileGenerator::kGarbageCollectorEntryName =
    868     "(garbage collector)";
    869 
    870 
    871 ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
    872     : profiles_(profiles),
    873       program_entry_(
    874           profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
    875       gc_entry_(
    876           profiles->NewCodeEntry(Logger::BUILTIN_TAG,
    877                                  kGarbageCollectorEntryName)) {
    878 }
    879 
    880 
    881 void ProfileGenerator::RecordTickSample(const TickSample& sample) {
    882   // Allocate space for stack frames + pc + function + vm-state.
    883   ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
    884   // As actual number of decoded code entries may vary, initialize
    885   // entries vector with NULL values.
    886   CodeEntry** entry = entries.start();
    887   memset(entry, 0, entries.length() * sizeof(*entry));
    888   if (sample.pc != NULL) {
    889     *entry++ = code_map_.FindEntry(sample.pc);
    890 
    891     if (sample.has_external_callback) {
    892       // Don't use PC when in external callback code, as it can point
    893       // inside callback's code, and we will erroneously report
    894       // that a callback calls itself.
    895       *(entries.start()) = NULL;
    896       *entry++ = code_map_.FindEntry(sample.external_callback);
    897     } else if (sample.tos != NULL) {
    898       // Find out, if top of stack was pointing inside a JS function
    899       // meaning that we have encountered a frameless invocation.
    900       *entry = code_map_.FindEntry(sample.tos);
    901       if (*entry != NULL && !(*entry)->is_js_function()) {
    902         *entry = NULL;
    903       }
    904       entry++;
    905     }
    906 
    907     for (const Address* stack_pos = sample.stack,
    908            *stack_end = stack_pos + sample.frames_count;
    909          stack_pos != stack_end;
    910          ++stack_pos) {
    911       *entry++ = code_map_.FindEntry(*stack_pos);
    912     }
    913   }
    914 
    915   if (FLAG_prof_browser_mode) {
    916     bool no_symbolized_entries = true;
    917     for (CodeEntry** e = entries.start(); e != entry; ++e) {
    918       if (*e != NULL) {
    919         no_symbolized_entries = false;
    920         break;
    921       }
    922     }
    923     // If no frames were symbolized, put the VM state entry in.
    924     if (no_symbolized_entries) {
    925       *entry++ = EntryForVMState(sample.state);
    926     }
    927   }
    928 
    929   profiles_->AddPathToCurrentProfiles(entries);
    930 }
    931 
    932 
    933 void HeapGraphEdge::Init(
    934     int child_index, Type type, const char* name, HeapEntry* to) {
    935   ASSERT(type == kContextVariable
    936          || type == kProperty
    937          || type == kInternal
    938          || type == kShortcut);
    939   child_index_ = child_index;
    940   type_ = type;
    941   name_ = name;
    942   to_ = to;
    943 }
    944 
    945 
    946 void HeapGraphEdge::Init(int child_index, Type type, int index, HeapEntry* to) {
    947   ASSERT(type == kElement || type == kHidden || type == kWeak);
    948   child_index_ = child_index;
    949   type_ = type;
    950   index_ = index;
    951   to_ = to;
    952 }
    953 
    954 
    955 void HeapGraphEdge::Init(int child_index, int index, HeapEntry* to) {
    956   Init(child_index, kElement, index, to);
    957 }
    958 
    959 
    960 HeapEntry* HeapGraphEdge::From() {
    961   return reinterpret_cast<HeapEntry*>(this - child_index_) - 1;
    962 }
    963 
    964 
    965 void HeapEntry::Init(HeapSnapshot* snapshot,
    966                      Type type,
    967                      const char* name,
    968                      SnapshotObjectId id,
    969                      int self_size,
    970                      int children_count,
    971                      int retainers_count) {
    972   snapshot_ = snapshot;
    973   type_ = type;
    974   painted_ = false;
    975   name_ = name;
    976   self_size_ = self_size;
    977   retained_size_ = 0;
    978   children_count_ = children_count;
    979   retainers_count_ = retainers_count;
    980   dominator_ = NULL;
    981   id_ = id;
    982 }
    983 
    984 
    985 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
    986                                   int child_index,
    987                                   const char* name,
    988                                   HeapEntry* entry,
    989                                   int retainer_index) {
    990   children()[child_index].Init(child_index, type, name, entry);
    991   entry->retainers()[retainer_index] = children_arr() + child_index;
    992 }
    993 
    994 
    995 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
    996                                     int child_index,
    997                                     int index,
    998                                     HeapEntry* entry,
    999                                     int retainer_index) {
   1000   children()[child_index].Init(child_index, type, index, entry);
   1001   entry->retainers()[retainer_index] = children_arr() + child_index;
   1002 }
   1003 
   1004 
   1005 void HeapEntry::SetUnidirElementReference(
   1006     int child_index, int index, HeapEntry* entry) {
   1007   children()[child_index].Init(child_index, index, entry);
   1008 }
   1009 
   1010 
   1011 Handle<HeapObject> HeapEntry::GetHeapObject() {
   1012   return snapshot_->collection()->FindHeapObjectById(id());
   1013 }
   1014 
   1015 
   1016 void HeapEntry::Print(
   1017     const char* prefix, const char* edge_name, int max_depth, int indent) {
   1018   OS::Print("%6d %7d @%6llu %*c %s%s: ",
   1019             self_size(), retained_size(), id(),
   1020             indent, ' ', prefix, edge_name);
   1021   if (type() != kString) {
   1022     OS::Print("%s %.40s\n", TypeAsString(), name_);
   1023   } else {
   1024     OS::Print("\"");
   1025     const char* c = name_;
   1026     while (*c && (c - name_) <= 40) {
   1027       if (*c != '\n')
   1028         OS::Print("%c", *c);
   1029       else
   1030         OS::Print("\\n");
   1031       ++c;
   1032     }
   1033     OS::Print("\"\n");
   1034   }
   1035   if (--max_depth == 0) return;
   1036   Vector<HeapGraphEdge> ch = children();
   1037   for (int i = 0; i < ch.length(); ++i) {
   1038     HeapGraphEdge& edge = ch[i];
   1039     const char* edge_prefix = "";
   1040     EmbeddedVector<char, 64> index;
   1041     const char* edge_name = index.start();
   1042     switch (edge.type()) {
   1043       case HeapGraphEdge::kContextVariable:
   1044         edge_prefix = "#";
   1045         edge_name = edge.name();
   1046         break;
   1047       case HeapGraphEdge::kElement:
   1048         OS::SNPrintF(index, "%d", edge.index());
   1049         break;
   1050       case HeapGraphEdge::kInternal:
   1051         edge_prefix = "$";
   1052         edge_name = edge.name();
   1053         break;
   1054       case HeapGraphEdge::kProperty:
   1055         edge_name = edge.name();
   1056         break;
   1057       case HeapGraphEdge::kHidden:
   1058         edge_prefix = "$";
   1059         OS::SNPrintF(index, "%d", edge.index());
   1060         break;
   1061       case HeapGraphEdge::kShortcut:
   1062         edge_prefix = "^";
   1063         edge_name = edge.name();
   1064         break;
   1065       case HeapGraphEdge::kWeak:
   1066         edge_prefix = "w";
   1067         OS::SNPrintF(index, "%d", edge.index());
   1068         break;
   1069       default:
   1070         OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
   1071     }
   1072     edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
   1073   }
   1074 }
   1075 
   1076 
   1077 const char* HeapEntry::TypeAsString() {
   1078   switch (type()) {
   1079     case kHidden: return "/hidden/";
   1080     case kObject: return "/object/";
   1081     case kClosure: return "/closure/";
   1082     case kString: return "/string/";
   1083     case kCode: return "/code/";
   1084     case kArray: return "/array/";
   1085     case kRegExp: return "/regexp/";
   1086     case kHeapNumber: return "/number/";
   1087     case kNative: return "/native/";
   1088     case kSynthetic: return "/synthetic/";
   1089     default: return "???";
   1090   }
   1091 }
   1092 
   1093 
   1094 size_t HeapEntry::EntriesSize(int entries_count,
   1095                               int children_count,
   1096                               int retainers_count) {
   1097   return sizeof(HeapEntry) * entries_count         // NOLINT
   1098       + sizeof(HeapGraphEdge) * children_count     // NOLINT
   1099       + sizeof(HeapGraphEdge*) * retainers_count;  // NOLINT
   1100 }
   1101 
   1102 
   1103 // It is very important to keep objects that form a heap snapshot
   1104 // as small as possible.
   1105 namespace {  // Avoid littering the global namespace.
   1106 
   1107 template <size_t ptr_size> struct SnapshotSizeConstants;
   1108 
   1109 template <> struct SnapshotSizeConstants<4> {
   1110   static const int kExpectedHeapGraphEdgeSize = 12;
   1111   static const int kExpectedHeapEntrySize = 32;
   1112   static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
   1113 };
   1114 
   1115 template <> struct SnapshotSizeConstants<8> {
   1116   static const int kExpectedHeapGraphEdgeSize = 24;
   1117   static const int kExpectedHeapEntrySize = 48;
   1118   static const uint64_t kMaxSerializableSnapshotRawSize =
   1119       static_cast<uint64_t>(6000) * MB;
   1120 };
   1121 
   1122 }  // namespace
   1123 
   1124 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
   1125                            HeapSnapshot::Type type,
   1126                            const char* title,
   1127                            unsigned uid)
   1128     : collection_(collection),
   1129       type_(type),
   1130       title_(title),
   1131       uid_(uid),
   1132       root_entry_(NULL),
   1133       gc_roots_entry_(NULL),
   1134       natives_root_entry_(NULL),
   1135       raw_entries_(NULL),
   1136       entries_sorted_(false) {
   1137   STATIC_CHECK(
   1138       sizeof(HeapGraphEdge) ==
   1139       SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
   1140   STATIC_CHECK(
   1141       sizeof(HeapEntry) ==
   1142       SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
   1143   for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
   1144     gc_subroot_entries_[i] = NULL;
   1145   }
   1146 }
   1147 
   1148 
   1149 HeapSnapshot::~HeapSnapshot() {
   1150   DeleteArray(raw_entries_);
   1151 }
   1152 
   1153 
   1154 void HeapSnapshot::Delete() {
   1155   collection_->RemoveSnapshot(this);
   1156   delete this;
   1157 }
   1158 
   1159 
   1160 void HeapSnapshot::AllocateEntries(int entries_count,
   1161                                    int children_count,
   1162                                    int retainers_count) {
   1163   ASSERT(raw_entries_ == NULL);
   1164   raw_entries_size_ =
   1165       HeapEntry::EntriesSize(entries_count, children_count, retainers_count);
   1166   raw_entries_ = NewArray<char>(raw_entries_size_);
   1167 }
   1168 
   1169 
   1170 static void HeapEntryClearPaint(HeapEntry** entry_ptr) {
   1171   (*entry_ptr)->clear_paint();
   1172 }
   1173 
   1174 
   1175 void HeapSnapshot::ClearPaint() {
   1176   entries_.Iterate(HeapEntryClearPaint);
   1177 }
   1178 
   1179 
   1180 HeapEntry* HeapSnapshot::AddRootEntry(int children_count) {
   1181   ASSERT(root_entry_ == NULL);
   1182   return (root_entry_ = AddEntry(HeapEntry::kObject,
   1183                                  "",
   1184                                  HeapObjectsMap::kInternalRootObjectId,
   1185                                  0,
   1186                                  children_count,
   1187                                  0));
   1188 }
   1189 
   1190 
   1191 HeapEntry* HeapSnapshot::AddGcRootsEntry(int children_count,
   1192                                          int retainers_count) {
   1193   ASSERT(gc_roots_entry_ == NULL);
   1194   return (gc_roots_entry_ = AddEntry(HeapEntry::kObject,
   1195                                      "(GC roots)",
   1196                                      HeapObjectsMap::kGcRootsObjectId,
   1197                                      0,
   1198                                      children_count,
   1199                                      retainers_count));
   1200 }
   1201 
   1202 
   1203 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag,
   1204                                            int children_count,
   1205                                            int retainers_count) {
   1206   ASSERT(gc_subroot_entries_[tag] == NULL);
   1207   ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
   1208   return (gc_subroot_entries_[tag] = AddEntry(
   1209       HeapEntry::kObject,
   1210       VisitorSynchronization::kTagNames[tag],
   1211       HeapObjectsMap::GetNthGcSubrootId(tag),
   1212       0,
   1213       children_count,
   1214       retainers_count));
   1215 }
   1216 
   1217 
   1218 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
   1219                                   const char* name,
   1220                                   SnapshotObjectId id,
   1221                                   int size,
   1222                                   int children_count,
   1223                                   int retainers_count) {
   1224   HeapEntry* entry = GetNextEntryToInit();
   1225   entry->Init(this, type, name, id, size, children_count, retainers_count);
   1226   return entry;
   1227 }
   1228 
   1229 
   1230 void HeapSnapshot::SetDominatorsToSelf() {
   1231   for (int i = 0; i < entries_.length(); ++i) {
   1232     HeapEntry* entry = entries_[i];
   1233     if (entry->dominator() == NULL) entry->set_dominator(entry);
   1234   }
   1235 }
   1236 
   1237 
   1238 HeapEntry* HeapSnapshot::GetNextEntryToInit() {
   1239   if (entries_.length() > 0) {
   1240     HeapEntry* last_entry = entries_.last();
   1241     entries_.Add(reinterpret_cast<HeapEntry*>(
   1242         reinterpret_cast<char*>(last_entry) + last_entry->EntrySize()));
   1243   } else {
   1244     entries_.Add(reinterpret_cast<HeapEntry*>(raw_entries_));
   1245   }
   1246   ASSERT(reinterpret_cast<char*>(entries_.last()) <
   1247          (raw_entries_ + raw_entries_size_));
   1248   return entries_.last();
   1249 }
   1250 
   1251 
   1252 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
   1253   List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
   1254 
   1255   // Perform a binary search by id.
   1256   int low = 0;
   1257   int high = entries_by_id->length() - 1;
   1258   while (low <= high) {
   1259     int mid =
   1260         (static_cast<unsigned int>(low) + static_cast<unsigned int>(high)) >> 1;
   1261     SnapshotObjectId mid_id = entries_by_id->at(mid)->id();
   1262     if (mid_id > id)
   1263       high = mid - 1;
   1264     else if (mid_id < id)
   1265       low = mid + 1;
   1266     else
   1267       return entries_by_id->at(mid);
   1268   }
   1269   return NULL;
   1270 }
   1271 
   1272 
   1273 template<class T>
   1274 static int SortByIds(const T* entry1_ptr,
   1275                      const T* entry2_ptr) {
   1276   if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
   1277   return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
   1278 }
   1279 
   1280 
   1281 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
   1282   if (!entries_sorted_) {
   1283     entries_.Sort(SortByIds);
   1284     entries_sorted_ = true;
   1285   }
   1286   return &entries_;
   1287 }
   1288 
   1289 
   1290 void HeapSnapshot::Print(int max_depth) {
   1291   root()->Print("", "", max_depth, 0);
   1292 }
   1293 
   1294 
   1295 // We split IDs on evens for embedder objects (see
   1296 // HeapObjectsMap::GenerateId) and odds for native objects.
   1297 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
   1298 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
   1299     HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
   1300 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
   1301     HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
   1302 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
   1303     HeapObjectsMap::kGcRootsFirstSubrootId +
   1304     VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
   1305 
   1306 HeapObjectsMap::HeapObjectsMap()
   1307     : initial_fill_mode_(true),
   1308       next_id_(kFirstAvailableObjectId),
   1309       entries_map_(AddressesMatch),
   1310       entries_(new List<EntryInfo>()) { }
   1311 
   1312 
   1313 HeapObjectsMap::~HeapObjectsMap() {
   1314   delete entries_;
   1315 }
   1316 
   1317 
   1318 void HeapObjectsMap::SnapshotGenerationFinished() {
   1319   initial_fill_mode_ = false;
   1320   RemoveDeadEntries();
   1321 }
   1322 
   1323 
   1324 SnapshotObjectId HeapObjectsMap::FindObject(Address addr) {
   1325   if (!initial_fill_mode_) {
   1326     SnapshotObjectId existing = FindEntry(addr);
   1327     if (existing != 0) return existing;
   1328   }
   1329   SnapshotObjectId id = next_id_;
   1330   next_id_ += kObjectIdStep;
   1331   AddEntry(addr, id);
   1332   return id;
   1333 }
   1334 
   1335 
   1336 void HeapObjectsMap::MoveObject(Address from, Address to) {
   1337   if (from == to) return;
   1338   HashMap::Entry* entry = entries_map_.Lookup(from, AddressHash(from), false);
   1339   if (entry != NULL) {
   1340     void* value = entry->value;
   1341     entries_map_.Remove(from, AddressHash(from));
   1342     if (to != NULL) {
   1343       entry = entries_map_.Lookup(to, AddressHash(to), true);
   1344       // We can have an entry at the new location, it is OK, as GC can overwrite
   1345       // dead objects with alive objects being moved.
   1346       entry->value = value;
   1347     }
   1348   }
   1349 }
   1350 
   1351 
   1352 void HeapObjectsMap::AddEntry(Address addr, SnapshotObjectId id) {
   1353   HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
   1354   ASSERT(entry->value == NULL);
   1355   entry->value = reinterpret_cast<void*>(entries_->length());
   1356   entries_->Add(EntryInfo(id));
   1357 }
   1358 
   1359 
   1360 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
   1361   HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
   1362   if (entry != NULL) {
   1363     int entry_index =
   1364         static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
   1365     EntryInfo& entry_info = entries_->at(entry_index);
   1366     entry_info.accessed = true;
   1367     return entry_info.id;
   1368   } else {
   1369     return 0;
   1370   }
   1371 }
   1372 
   1373 
   1374 void HeapObjectsMap::RemoveDeadEntries() {
   1375   List<EntryInfo>* new_entries = new List<EntryInfo>();
   1376   List<void*> dead_entries;
   1377   for (HashMap::Entry* entry = entries_map_.Start();
   1378        entry != NULL;
   1379        entry = entries_map_.Next(entry)) {
   1380     int entry_index =
   1381         static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
   1382     EntryInfo& entry_info = entries_->at(entry_index);
   1383     if (entry_info.accessed) {
   1384       entry->value = reinterpret_cast<void*>(new_entries->length());
   1385       new_entries->Add(EntryInfo(entry_info.id, false));
   1386     } else {
   1387       dead_entries.Add(entry->key);
   1388     }
   1389   }
   1390   for (int i = 0; i < dead_entries.length(); ++i) {
   1391     void* raw_entry = dead_entries[i];
   1392     entries_map_.Remove(
   1393         raw_entry, AddressHash(reinterpret_cast<Address>(raw_entry)));
   1394   }
   1395   delete entries_;
   1396   entries_ = new_entries;
   1397 }
   1398 
   1399 
   1400 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
   1401   SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
   1402   const char* label = info->GetLabel();
   1403   id ^= HashSequentialString(label,
   1404                              static_cast<int>(strlen(label)),
   1405                              HEAP->HashSeed());
   1406   intptr_t element_count = info->GetElementCount();
   1407   if (element_count != -1)
   1408     id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
   1409                              v8::internal::kZeroHashSeed);
   1410   return id << 1;
   1411 }
   1412 
   1413 
   1414 HeapSnapshotsCollection::HeapSnapshotsCollection()
   1415     : is_tracking_objects_(false),
   1416       snapshots_uids_(HeapSnapshotsMatch),
   1417       token_enumerator_(new TokenEnumerator()) {
   1418 }
   1419 
   1420 
   1421 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
   1422   delete *snapshot_ptr;
   1423 }
   1424 
   1425 
   1426 HeapSnapshotsCollection::~HeapSnapshotsCollection() {
   1427   delete token_enumerator_;
   1428   snapshots_.Iterate(DeleteHeapSnapshot);
   1429 }
   1430 
   1431 
   1432 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type,
   1433                                                    const char* name,
   1434                                                    unsigned uid) {
   1435   is_tracking_objects_ = true;  // Start watching for heap objects moves.
   1436   return new HeapSnapshot(this, type, name, uid);
   1437 }
   1438 
   1439 
   1440 void HeapSnapshotsCollection::SnapshotGenerationFinished(
   1441     HeapSnapshot* snapshot) {
   1442   ids_.SnapshotGenerationFinished();
   1443   if (snapshot != NULL) {
   1444     snapshots_.Add(snapshot);
   1445     HashMap::Entry* entry =
   1446         snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
   1447                                static_cast<uint32_t>(snapshot->uid()),
   1448                                true);
   1449     ASSERT(entry->value == NULL);
   1450     entry->value = snapshot;
   1451   }
   1452 }
   1453 
   1454 
   1455 HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
   1456   HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
   1457                                                  static_cast<uint32_t>(uid),
   1458                                                  false);
   1459   return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
   1460 }
   1461 
   1462 
   1463 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
   1464   snapshots_.RemoveElement(snapshot);
   1465   unsigned uid = snapshot->uid();
   1466   snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
   1467                          static_cast<uint32_t>(uid));
   1468 }
   1469 
   1470 
   1471 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
   1472     SnapshotObjectId id) {
   1473   // First perform a full GC in order to avoid dead objects.
   1474   HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
   1475                           "HeapSnapshotsCollection::FindHeapObjectById");
   1476   AssertNoAllocation no_allocation;
   1477   HeapObject* object = NULL;
   1478   HeapIterator iterator(HeapIterator::kFilterUnreachable);
   1479   // Make sure that object with the given id is still reachable.
   1480   for (HeapObject* obj = iterator.next();
   1481        obj != NULL;
   1482        obj = iterator.next()) {
   1483     if (ids_.FindObject(obj->address()) == id) {
   1484       ASSERT(object == NULL);
   1485       object = obj;
   1486       // Can't break -- kFilterUnreachable requires full heap traversal.
   1487     }
   1488   }
   1489   return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
   1490 }
   1491 
   1492 
   1493 HeapEntry* const HeapEntriesMap::kHeapEntryPlaceholder =
   1494     reinterpret_cast<HeapEntry*>(1);
   1495 
   1496 HeapEntriesMap::HeapEntriesMap()
   1497     : entries_(HeapThingsMatch),
   1498       entries_count_(0),
   1499       total_children_count_(0),
   1500       total_retainers_count_(0) {
   1501 }
   1502 
   1503 
   1504 HeapEntriesMap::~HeapEntriesMap() {
   1505   for (HashMap::Entry* p = entries_.Start(); p != NULL; p = entries_.Next(p)) {
   1506     delete reinterpret_cast<EntryInfo*>(p->value);
   1507   }
   1508 }
   1509 
   1510 
   1511 void HeapEntriesMap::AllocateEntries() {
   1512   for (HashMap::Entry* p = entries_.Start();
   1513        p != NULL;
   1514        p = entries_.Next(p)) {
   1515     EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(p->value);
   1516     entry_info->entry = entry_info->allocator->AllocateEntry(
   1517         p->key,
   1518         entry_info->children_count,
   1519         entry_info->retainers_count);
   1520     ASSERT(entry_info->entry != NULL);
   1521     ASSERT(entry_info->entry != kHeapEntryPlaceholder);
   1522     entry_info->children_count = 0;
   1523     entry_info->retainers_count = 0;
   1524   }
   1525 }
   1526 
   1527 
   1528 HeapEntry* HeapEntriesMap::Map(HeapThing thing) {
   1529   HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
   1530   if (cache_entry != NULL) {
   1531     EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(cache_entry->value);
   1532     return entry_info->entry;
   1533   } else {
   1534     return NULL;
   1535   }
   1536 }
   1537 
   1538 
   1539 void HeapEntriesMap::Pair(
   1540     HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry) {
   1541   HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
   1542   ASSERT(cache_entry->value == NULL);
   1543   cache_entry->value = new EntryInfo(entry, allocator);
   1544   ++entries_count_;
   1545 }
   1546 
   1547 
   1548 void HeapEntriesMap::CountReference(HeapThing from, HeapThing to,
   1549                                     int* prev_children_count,
   1550                                     int* prev_retainers_count) {
   1551   HashMap::Entry* from_cache_entry = entries_.Lookup(from, Hash(from), false);
   1552   HashMap::Entry* to_cache_entry = entries_.Lookup(to, Hash(to), false);
   1553   ASSERT(from_cache_entry != NULL);
   1554   ASSERT(to_cache_entry != NULL);
   1555   EntryInfo* from_entry_info =
   1556       reinterpret_cast<EntryInfo*>(from_cache_entry->value);
   1557   EntryInfo* to_entry_info =
   1558       reinterpret_cast<EntryInfo*>(to_cache_entry->value);
   1559   if (prev_children_count)
   1560     *prev_children_count = from_entry_info->children_count;
   1561   if (prev_retainers_count)
   1562     *prev_retainers_count = to_entry_info->retainers_count;
   1563   ++from_entry_info->children_count;
   1564   ++to_entry_info->retainers_count;
   1565   ++total_children_count_;
   1566   ++total_retainers_count_;
   1567 }
   1568 
   1569 
   1570 HeapObjectsSet::HeapObjectsSet()
   1571     : entries_(HeapEntriesMap::HeapThingsMatch) {
   1572 }
   1573 
   1574 
   1575 void HeapObjectsSet::Clear() {
   1576   entries_.Clear();
   1577 }
   1578 
   1579 
   1580 bool HeapObjectsSet::Contains(Object* obj) {
   1581   if (!obj->IsHeapObject()) return false;
   1582   HeapObject* object = HeapObject::cast(obj);
   1583   HashMap::Entry* cache_entry =
   1584       entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
   1585   return cache_entry != NULL;
   1586 }
   1587 
   1588 
   1589 void HeapObjectsSet::Insert(Object* obj) {
   1590   if (!obj->IsHeapObject()) return;
   1591   HeapObject* object = HeapObject::cast(obj);
   1592   HashMap::Entry* cache_entry =
   1593       entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
   1594   if (cache_entry->value == NULL) {
   1595     cache_entry->value = HeapEntriesMap::kHeapEntryPlaceholder;
   1596   }
   1597 }
   1598 
   1599 
   1600 const char* HeapObjectsSet::GetTag(Object* obj) {
   1601   HeapObject* object = HeapObject::cast(obj);
   1602   HashMap::Entry* cache_entry =
   1603       entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
   1604   if (cache_entry != NULL
   1605       && cache_entry->value != HeapEntriesMap::kHeapEntryPlaceholder) {
   1606     return reinterpret_cast<const char*>(cache_entry->value);
   1607   } else {
   1608     return NULL;
   1609   }
   1610 }
   1611 
   1612 
   1613 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
   1614   if (!obj->IsHeapObject()) return;
   1615   HeapObject* object = HeapObject::cast(obj);
   1616   HashMap::Entry* cache_entry =
   1617       entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
   1618   cache_entry->value = const_cast<char*>(tag);
   1619 }
   1620 
   1621 
   1622 HeapObject* const V8HeapExplorer::kInternalRootObject =
   1623     reinterpret_cast<HeapObject*>(
   1624         static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
   1625 HeapObject* const V8HeapExplorer::kGcRootsObject =
   1626     reinterpret_cast<HeapObject*>(
   1627         static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
   1628 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
   1629     reinterpret_cast<HeapObject*>(
   1630         static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
   1631 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
   1632     reinterpret_cast<HeapObject*>(
   1633         static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
   1634 
   1635 
   1636 V8HeapExplorer::V8HeapExplorer(
   1637     HeapSnapshot* snapshot,
   1638     SnapshottingProgressReportingInterface* progress)
   1639     : heap_(Isolate::Current()->heap()),
   1640       snapshot_(snapshot),
   1641       collection_(snapshot_->collection()),
   1642       progress_(progress),
   1643       filler_(NULL) {
   1644 }
   1645 
   1646 
   1647 V8HeapExplorer::~V8HeapExplorer() {
   1648 }
   1649 
   1650 
   1651 HeapEntry* V8HeapExplorer::AllocateEntry(
   1652     HeapThing ptr, int children_count, int retainers_count) {
   1653   return AddEntry(
   1654       reinterpret_cast<HeapObject*>(ptr), children_count, retainers_count);
   1655 }
   1656 
   1657 
   1658 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
   1659                                     int children_count,
   1660                                     int retainers_count) {
   1661   if (object == kInternalRootObject) {
   1662     ASSERT(retainers_count == 0);
   1663     return snapshot_->AddRootEntry(children_count);
   1664   } else if (object == kGcRootsObject) {
   1665     return snapshot_->AddGcRootsEntry(children_count, retainers_count);
   1666   } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
   1667     return snapshot_->AddGcSubrootEntry(
   1668         GetGcSubrootOrder(object),
   1669         children_count,
   1670         retainers_count);
   1671   } else if (object->IsJSFunction()) {
   1672     JSFunction* func = JSFunction::cast(object);
   1673     SharedFunctionInfo* shared = func->shared();
   1674     const char* name = shared->bound() ? "native_bind" :
   1675         collection_->names()->GetName(String::cast(shared->name()));
   1676     return AddEntry(object,
   1677                     HeapEntry::kClosure,
   1678                     name,
   1679                     children_count,
   1680                     retainers_count);
   1681   } else if (object->IsJSRegExp()) {
   1682     JSRegExp* re = JSRegExp::cast(object);
   1683     return AddEntry(object,
   1684                     HeapEntry::kRegExp,
   1685                     collection_->names()->GetName(re->Pattern()),
   1686                     children_count,
   1687                     retainers_count);
   1688   } else if (object->IsJSObject()) {
   1689     return AddEntry(object,
   1690                     HeapEntry::kObject,
   1691                     "",
   1692                     children_count,
   1693                     retainers_count);
   1694   } else if (object->IsString()) {
   1695     return AddEntry(object,
   1696                     HeapEntry::kString,
   1697                     collection_->names()->GetName(String::cast(object)),
   1698                     children_count,
   1699                     retainers_count);
   1700   } else if (object->IsCode()) {
   1701     return AddEntry(object,
   1702                     HeapEntry::kCode,
   1703                     "",
   1704                     children_count,
   1705                     retainers_count);
   1706   } else if (object->IsSharedFunctionInfo()) {
   1707     SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
   1708     return AddEntry(object,
   1709                     HeapEntry::kCode,
   1710                     collection_->names()->GetName(String::cast(shared->name())),
   1711                     children_count,
   1712                     retainers_count);
   1713   } else if (object->IsScript()) {
   1714     Script* script = Script::cast(object);
   1715     return AddEntry(object,
   1716                     HeapEntry::kCode,
   1717                     script->name()->IsString() ?
   1718                         collection_->names()->GetName(
   1719                             String::cast(script->name()))
   1720                         : "",
   1721                     children_count,
   1722                     retainers_count);
   1723   } else if (object->IsGlobalContext()) {
   1724     return AddEntry(object,
   1725                     HeapEntry::kHidden,
   1726                     "system / GlobalContext",
   1727                     children_count,
   1728                     retainers_count);
   1729   } else if (object->IsContext()) {
   1730     return AddEntry(object,
   1731                     HeapEntry::kHidden,
   1732                     "system / Context",
   1733                     children_count,
   1734                     retainers_count);
   1735   } else if (object->IsFixedArray() ||
   1736              object->IsFixedDoubleArray() ||
   1737              object->IsByteArray() ||
   1738              object->IsExternalArray()) {
   1739     const char* tag = objects_tags_.GetTag(object);
   1740     return AddEntry(object,
   1741                     HeapEntry::kArray,
   1742                     tag != NULL ? tag : "",
   1743                     children_count,
   1744                     retainers_count);
   1745   } else if (object->IsHeapNumber()) {
   1746     return AddEntry(object,
   1747                     HeapEntry::kHeapNumber,
   1748                     "number",
   1749                     children_count,
   1750                     retainers_count);
   1751   }
   1752   return AddEntry(object,
   1753                   HeapEntry::kHidden,
   1754                   GetSystemEntryName(object),
   1755                   children_count,
   1756                   retainers_count);
   1757 }
   1758 
   1759 
   1760 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
   1761                                     HeapEntry::Type type,
   1762                                     const char* name,
   1763                                     int children_count,
   1764                                     int retainers_count) {
   1765   return snapshot_->AddEntry(type,
   1766                              name,
   1767                              collection_->GetObjectId(object->address()),
   1768                              object->Size(),
   1769                              children_count,
   1770                              retainers_count);
   1771 }
   1772 
   1773 
   1774 class GcSubrootsEnumerator : public ObjectVisitor {
   1775  public:
   1776   GcSubrootsEnumerator(
   1777       SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
   1778       : filler_(filler),
   1779         explorer_(explorer),
   1780         previous_object_count_(0),
   1781         object_count_(0) {
   1782   }
   1783   void VisitPointers(Object** start, Object** end) {
   1784     object_count_ += end - start;
   1785   }
   1786   void Synchronize(VisitorSynchronization::SyncTag tag) {
   1787     // Skip empty subroots.
   1788     if (previous_object_count_ != object_count_) {
   1789       previous_object_count_ = object_count_;
   1790       filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
   1791     }
   1792   }
   1793  private:
   1794   SnapshotFillerInterface* filler_;
   1795   V8HeapExplorer* explorer_;
   1796   intptr_t previous_object_count_;
   1797   intptr_t object_count_;
   1798 };
   1799 
   1800 
   1801 void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
   1802   filler->AddEntry(kInternalRootObject, this);
   1803   filler->AddEntry(kGcRootsObject, this);
   1804   GcSubrootsEnumerator enumerator(filler, this);
   1805   heap_->IterateRoots(&enumerator, VISIT_ALL);
   1806 }
   1807 
   1808 
   1809 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
   1810   switch (object->map()->instance_type()) {
   1811     case MAP_TYPE: return "system / Map";
   1812     case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
   1813     case FOREIGN_TYPE: return "system / Foreign";
   1814     case ODDBALL_TYPE: return "system / Oddball";
   1815 #define MAKE_STRUCT_CASE(NAME, Name, name) \
   1816     case NAME##_TYPE: return "system / "#Name;
   1817   STRUCT_LIST(MAKE_STRUCT_CASE)
   1818 #undef MAKE_STRUCT_CASE
   1819     default: return "system";
   1820   }
   1821 }
   1822 
   1823 
   1824 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
   1825   int objects_count = 0;
   1826   for (HeapObject* obj = iterator->next();
   1827        obj != NULL;
   1828        obj = iterator->next()) {
   1829     objects_count++;
   1830   }
   1831   return objects_count;
   1832 }
   1833 
   1834 
   1835 class IndexedReferencesExtractor : public ObjectVisitor {
   1836  public:
   1837   IndexedReferencesExtractor(V8HeapExplorer* generator,
   1838                              HeapObject* parent_obj,
   1839                              HeapEntry* parent_entry)
   1840       : generator_(generator),
   1841         parent_obj_(parent_obj),
   1842         parent_(parent_entry),
   1843         next_index_(1) {
   1844   }
   1845   void VisitPointers(Object** start, Object** end) {
   1846     for (Object** p = start; p < end; p++) {
   1847       if (CheckVisitedAndUnmark(p)) continue;
   1848       generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
   1849     }
   1850   }
   1851   static void MarkVisitedField(HeapObject* obj, int offset) {
   1852     if (offset < 0) return;
   1853     Address field = obj->address() + offset;
   1854     ASSERT(!Memory::Object_at(field)->IsFailure());
   1855     ASSERT(Memory::Object_at(field)->IsHeapObject());
   1856     *field |= kFailureTag;
   1857   }
   1858 
   1859  private:
   1860   bool CheckVisitedAndUnmark(Object** field) {
   1861     if ((*field)->IsFailure()) {
   1862       intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
   1863       *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
   1864       ASSERT((*field)->IsHeapObject());
   1865       return true;
   1866     }
   1867     return false;
   1868   }
   1869   V8HeapExplorer* generator_;
   1870   HeapObject* parent_obj_;
   1871   HeapEntry* parent_;
   1872   int next_index_;
   1873 };
   1874 
   1875 
   1876 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
   1877   HeapEntry* entry = GetEntry(obj);
   1878   if (entry == NULL) return;  // No interest in this object.
   1879 
   1880   bool extract_indexed_refs = true;
   1881   if (obj->IsJSGlobalProxy()) {
   1882     // We need to reference JS global objects from snapshot's root.
   1883     // We use JSGlobalProxy because this is what embedder (e.g. browser)
   1884     // uses for the global object.
   1885     JSGlobalProxy* proxy = JSGlobalProxy::cast(obj);
   1886     SetRootShortcutReference(proxy->map()->prototype());
   1887   } else if (obj->IsJSObject()) {
   1888     JSObject* js_obj = JSObject::cast(obj);
   1889     ExtractClosureReferences(js_obj, entry);
   1890     ExtractPropertyReferences(js_obj, entry);
   1891     ExtractElementReferences(js_obj, entry);
   1892     ExtractInternalReferences(js_obj, entry);
   1893     SetPropertyReference(
   1894         obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
   1895     if (obj->IsJSFunction()) {
   1896       JSFunction* js_fun = JSFunction::cast(js_obj);
   1897       Object* proto_or_map = js_fun->prototype_or_initial_map();
   1898       if (!proto_or_map->IsTheHole()) {
   1899         if (!proto_or_map->IsMap()) {
   1900           SetPropertyReference(
   1901               obj, entry,
   1902               heap_->prototype_symbol(), proto_or_map,
   1903               NULL,
   1904               JSFunction::kPrototypeOrInitialMapOffset);
   1905         } else {
   1906           SetPropertyReference(
   1907               obj, entry,
   1908               heap_->prototype_symbol(), js_fun->prototype());
   1909         }
   1910       }
   1911       SharedFunctionInfo* shared_info = js_fun->shared();
   1912       // JSFunction has either bindings or literals and never both.
   1913       bool bound = shared_info->bound();
   1914       TagObject(js_fun->literals_or_bindings(),
   1915                 bound ? "(function bindings)" : "(function literals)");
   1916       SetInternalReference(js_fun, entry,
   1917                            bound ? "bindings" : "literals",
   1918                            js_fun->literals_or_bindings(),
   1919                            JSFunction::kLiteralsOffset);
   1920       SetInternalReference(js_fun, entry,
   1921                            "shared", shared_info,
   1922                            JSFunction::kSharedFunctionInfoOffset);
   1923       TagObject(js_fun->unchecked_context(), "(context)");
   1924       SetInternalReference(js_fun, entry,
   1925                            "context", js_fun->unchecked_context(),
   1926                            JSFunction::kContextOffset);
   1927       for (int i = JSFunction::kNonWeakFieldsEndOffset;
   1928            i < JSFunction::kSize;
   1929            i += kPointerSize) {
   1930         SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
   1931       }
   1932     }
   1933     TagObject(js_obj->properties(), "(object properties)");
   1934     SetInternalReference(obj, entry,
   1935                          "properties", js_obj->properties(),
   1936                          JSObject::kPropertiesOffset);
   1937     TagObject(js_obj->elements(), "(object elements)");
   1938     SetInternalReference(obj, entry,
   1939                          "elements", js_obj->elements(),
   1940                          JSObject::kElementsOffset);
   1941   } else if (obj->IsString()) {
   1942     if (obj->IsConsString()) {
   1943       ConsString* cs = ConsString::cast(obj);
   1944       SetInternalReference(obj, entry, 1, cs->first());
   1945       SetInternalReference(obj, entry, 2, cs->second());
   1946     }
   1947     if (obj->IsSlicedString()) {
   1948       SlicedString* ss = SlicedString::cast(obj);
   1949       SetInternalReference(obj, entry, "parent", ss->parent());
   1950     }
   1951     extract_indexed_refs = false;
   1952   } else if (obj->IsGlobalContext()) {
   1953     Context* context = Context::cast(obj);
   1954     TagObject(context->jsfunction_result_caches(),
   1955               "(context func. result caches)");
   1956     TagObject(context->normalized_map_cache(), "(context norm. map cache)");
   1957     TagObject(context->runtime_context(), "(runtime context)");
   1958     TagObject(context->data(), "(context data)");
   1959     for (int i = Context::FIRST_WEAK_SLOT;
   1960          i < Context::GLOBAL_CONTEXT_SLOTS;
   1961          ++i) {
   1962       SetWeakReference(obj, entry,
   1963                        i, context->get(i),
   1964                        FixedArray::OffsetOfElementAt(i));
   1965     }
   1966   } else if (obj->IsMap()) {
   1967     Map* map = Map::cast(obj);
   1968     SetInternalReference(obj, entry,
   1969                          "prototype", map->prototype(), Map::kPrototypeOffset);
   1970     SetInternalReference(obj, entry,
   1971                          "constructor", map->constructor(),
   1972                          Map::kConstructorOffset);
   1973     if (!map->instance_descriptors()->IsEmpty()) {
   1974       TagObject(map->instance_descriptors(), "(map descriptors)");
   1975       SetInternalReference(obj, entry,
   1976                            "descriptors", map->instance_descriptors(),
   1977                            Map::kInstanceDescriptorsOrBitField3Offset);
   1978     }
   1979     if (map->prototype_transitions() != heap_->empty_fixed_array()) {
   1980       TagObject(map->prototype_transitions(), "(prototype transitions)");
   1981       SetInternalReference(obj,
   1982                            entry,
   1983                            "prototype_transitions",
   1984                            map->prototype_transitions(),
   1985                            Map::kPrototypeTransitionsOffset);
   1986     }
   1987     SetInternalReference(obj, entry,
   1988                          "code_cache", map->code_cache(),
   1989                          Map::kCodeCacheOffset);
   1990   } else if (obj->IsSharedFunctionInfo()) {
   1991     SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
   1992     SetInternalReference(obj, entry,
   1993                          "name", shared->name(),
   1994                          SharedFunctionInfo::kNameOffset);
   1995     SetInternalReference(obj, entry,
   1996                          "code", shared->unchecked_code(),
   1997                          SharedFunctionInfo::kCodeOffset);
   1998     TagObject(shared->scope_info(), "(function scope info)");
   1999     SetInternalReference(obj, entry,
   2000                          "scope_info", shared->scope_info(),
   2001                          SharedFunctionInfo::kScopeInfoOffset);
   2002     SetInternalReference(obj, entry,
   2003                          "instance_class_name", shared->instance_class_name(),
   2004                          SharedFunctionInfo::kInstanceClassNameOffset);
   2005     SetInternalReference(obj, entry,
   2006                          "script", shared->script(),
   2007                          SharedFunctionInfo::kScriptOffset);
   2008     SetWeakReference(obj, entry,
   2009                      1, shared->initial_map(),
   2010                      SharedFunctionInfo::kInitialMapOffset);
   2011   } else if (obj->IsScript()) {
   2012     Script* script = Script::cast(obj);
   2013     SetInternalReference(obj, entry,
   2014                          "source", script->source(),
   2015                          Script::kSourceOffset);
   2016     SetInternalReference(obj, entry,
   2017                          "name", script->name(),
   2018                          Script::kNameOffset);
   2019     SetInternalReference(obj, entry,
   2020                          "data", script->data(),
   2021                          Script::kDataOffset);
   2022     SetInternalReference(obj, entry,
   2023                          "context_data", script->context_data(),
   2024                          Script::kContextOffset);
   2025     TagObject(script->line_ends(), "(script line ends)");
   2026     SetInternalReference(obj, entry,
   2027                          "line_ends", script->line_ends(),
   2028                          Script::kLineEndsOffset);
   2029   } else if (obj->IsCodeCache()) {
   2030     CodeCache* code_cache = CodeCache::cast(obj);
   2031     TagObject(code_cache->default_cache(), "(default code cache)");
   2032     SetInternalReference(obj, entry,
   2033                          "default_cache", code_cache->default_cache(),
   2034                          CodeCache::kDefaultCacheOffset);
   2035     TagObject(code_cache->normal_type_cache(), "(code type cache)");
   2036     SetInternalReference(obj, entry,
   2037                          "type_cache", code_cache->normal_type_cache(),
   2038                          CodeCache::kNormalTypeCacheOffset);
   2039   } else if (obj->IsCode()) {
   2040     Code* code = Code::cast(obj);
   2041     TagObject(code->unchecked_relocation_info(), "(code relocation info)");
   2042     TagObject(code->unchecked_deoptimization_data(), "(code deopt data)");
   2043   }
   2044   if (extract_indexed_refs) {
   2045     SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
   2046     IndexedReferencesExtractor refs_extractor(this, obj, entry);
   2047     obj->Iterate(&refs_extractor);
   2048   }
   2049 }
   2050 
   2051 
   2052 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
   2053                                               HeapEntry* entry) {
   2054   if (!js_obj->IsJSFunction()) return;
   2055 
   2056   JSFunction* func = JSFunction::cast(js_obj);
   2057   Context* context = func->context();
   2058   ScopeInfo* scope_info = context->closure()->shared()->scope_info();
   2059 
   2060   if (func->shared()->bound()) {
   2061     FixedArray* bindings = func->function_bindings();
   2062     SetNativeBindReference(js_obj, entry, "bound_this",
   2063                            bindings->get(JSFunction::kBoundThisIndex));
   2064     SetNativeBindReference(js_obj, entry, "bound_function",
   2065                            bindings->get(JSFunction::kBoundFunctionIndex));
   2066     for (int i = JSFunction::kBoundArgumentsStartIndex;
   2067          i < bindings->length(); i++) {
   2068       const char* reference_name = collection_->names()->GetFormatted(
   2069           "bound_argument_%d",
   2070           i - JSFunction::kBoundArgumentsStartIndex);
   2071       SetNativeBindReference(js_obj, entry, reference_name,
   2072                              bindings->get(i));
   2073     }
   2074   } else {
   2075     // Add context allocated locals.
   2076     int context_locals = scope_info->ContextLocalCount();
   2077     for (int i = 0; i < context_locals; ++i) {
   2078       String* local_name = scope_info->ContextLocalName(i);
   2079       int idx = Context::MIN_CONTEXT_SLOTS + i;
   2080       SetClosureReference(js_obj, entry, local_name, context->get(idx));
   2081     }
   2082 
   2083     // Add function variable.
   2084     if (scope_info->HasFunctionName()) {
   2085       String* name = scope_info->FunctionName();
   2086       int idx = Context::MIN_CONTEXT_SLOTS + context_locals;
   2087 #ifdef DEBUG
   2088       VariableMode mode;
   2089       ASSERT(idx == scope_info->FunctionContextSlotIndex(name, &mode));
   2090 #endif
   2091       SetClosureReference(js_obj, entry, name, context->get(idx));
   2092     }
   2093   }
   2094 }
   2095 
   2096 
   2097 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
   2098                                                HeapEntry* entry) {
   2099   if (js_obj->HasFastProperties()) {
   2100     DescriptorArray* descs = js_obj->map()->instance_descriptors();
   2101     for (int i = 0; i < descs->number_of_descriptors(); i++) {
   2102       switch (descs->GetType(i)) {
   2103         case FIELD: {
   2104           int index = descs->GetFieldIndex(i);
   2105           if (index < js_obj->map()->inobject_properties()) {
   2106             SetPropertyReference(
   2107                 js_obj, entry,
   2108                 descs->GetKey(i), js_obj->InObjectPropertyAt(index),
   2109                 NULL,
   2110                 js_obj->GetInObjectPropertyOffset(index));
   2111           } else {
   2112             SetPropertyReference(
   2113                 js_obj, entry,
   2114                 descs->GetKey(i), js_obj->FastPropertyAt(index));
   2115           }
   2116           break;
   2117         }
   2118         case CONSTANT_FUNCTION:
   2119           SetPropertyReference(
   2120               js_obj, entry,
   2121               descs->GetKey(i), descs->GetConstantFunction(i));
   2122           break;
   2123         case CALLBACKS: {
   2124           Object* callback_obj = descs->GetValue(i);
   2125           if (callback_obj->IsAccessorPair()) {
   2126             AccessorPair* accessors = AccessorPair::cast(callback_obj);
   2127             if (Object* getter = accessors->getter()) {
   2128               SetPropertyReference(js_obj, entry, descs->GetKey(i),
   2129                                    getter, "get-%s");
   2130             }
   2131             if (Object* setter = accessors->setter()) {
   2132               SetPropertyReference(js_obj, entry, descs->GetKey(i),
   2133                                    setter, "set-%s");
   2134             }
   2135           }
   2136           break;
   2137         }
   2138         case NORMAL:  // only in slow mode
   2139         case HANDLER:  // only in lookup results, not in descriptors
   2140         case INTERCEPTOR:  // only in lookup results, not in descriptors
   2141         case MAP_TRANSITION:  // we do not care about transitions here...
   2142         case ELEMENTS_TRANSITION:
   2143         case CONSTANT_TRANSITION:
   2144         case NULL_DESCRIPTOR:  // ... and not about "holes"
   2145           break;
   2146       }
   2147     }
   2148   } else {
   2149     StringDictionary* dictionary = js_obj->property_dictionary();
   2150     int length = dictionary->Capacity();
   2151     for (int i = 0; i < length; ++i) {
   2152       Object* k = dictionary->KeyAt(i);
   2153       if (dictionary->IsKey(k)) {
   2154         Object* target = dictionary->ValueAt(i);
   2155         SetPropertyReference(
   2156             js_obj, entry, String::cast(k), target);
   2157         // We assume that global objects can only have slow properties.
   2158         if (target->IsJSGlobalPropertyCell()) {
   2159           SetPropertyShortcutReference(js_obj,
   2160                                        entry,
   2161                                        String::cast(k),
   2162                                        JSGlobalPropertyCell::cast(
   2163                                            target)->value());
   2164         }
   2165       }
   2166     }
   2167   }
   2168 }
   2169 
   2170 
   2171 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
   2172                                               HeapEntry* entry) {
   2173   if (js_obj->HasFastElements()) {
   2174     FixedArray* elements = FixedArray::cast(js_obj->elements());
   2175     int length = js_obj->IsJSArray() ?
   2176         Smi::cast(JSArray::cast(js_obj)->length())->value() :
   2177         elements->length();
   2178     for (int i = 0; i < length; ++i) {
   2179       if (!elements->get(i)->IsTheHole()) {
   2180         SetElementReference(js_obj, entry, i, elements->get(i));
   2181       }
   2182     }
   2183   } else if (js_obj->HasDictionaryElements()) {
   2184     SeededNumberDictionary* dictionary = js_obj->element_dictionary();
   2185     int length = dictionary->Capacity();
   2186     for (int i = 0; i < length; ++i) {
   2187       Object* k = dictionary->KeyAt(i);
   2188       if (dictionary->IsKey(k)) {
   2189         ASSERT(k->IsNumber());
   2190         uint32_t index = static_cast<uint32_t>(k->Number());
   2191         SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
   2192       }
   2193     }
   2194   }
   2195 }
   2196 
   2197 
   2198 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj,
   2199                                                HeapEntry* entry) {
   2200   int length = js_obj->GetInternalFieldCount();
   2201   for (int i = 0; i < length; ++i) {
   2202     Object* o = js_obj->GetInternalField(i);
   2203     SetInternalReference(
   2204         js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
   2205   }
   2206 }
   2207 
   2208 
   2209 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
   2210   Heap* heap = object->GetHeap();
   2211   if (object->IsJSFunction()) return heap->closure_symbol();
   2212   String* constructor_name = object->constructor_name();
   2213   if (constructor_name == heap->Object_symbol()) {
   2214     // Look up an immediate "constructor" property, if it is a function,
   2215     // return its name. This is for instances of binding objects, which
   2216     // have prototype constructor type "Object".
   2217     Object* constructor_prop = NULL;
   2218     LookupResult result(heap->isolate());
   2219     object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
   2220     if (result.IsProperty()) {
   2221       constructor_prop = result.GetLazyValue();
   2222     }
   2223     if (constructor_prop->IsJSFunction()) {
   2224       Object* maybe_name = JSFunction::cast(constructor_prop)->shared()->name();
   2225       if (maybe_name->IsString()) {
   2226         String* name = String::cast(maybe_name);
   2227         if (name->length() > 0) return name;
   2228       }
   2229     }
   2230   }
   2231   return object->constructor_name();
   2232 }
   2233 
   2234 
   2235 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
   2236   if (!obj->IsHeapObject()) return NULL;
   2237   return filler_->FindOrAddEntry(obj, this);
   2238 }
   2239 
   2240 
   2241 class RootsReferencesExtractor : public ObjectVisitor {
   2242  private:
   2243   struct IndexTag {
   2244     IndexTag(int index, VisitorSynchronization::SyncTag tag)
   2245         : index(index), tag(tag) { }
   2246     int index;
   2247     VisitorSynchronization::SyncTag tag;
   2248   };
   2249 
   2250  public:
   2251   RootsReferencesExtractor()
   2252       : collecting_all_references_(false),
   2253         previous_reference_count_(0) {
   2254   }
   2255 
   2256   void VisitPointers(Object** start, Object** end) {
   2257     if (collecting_all_references_) {
   2258       for (Object** p = start; p < end; p++) all_references_.Add(*p);
   2259     } else {
   2260       for (Object** p = start; p < end; p++) strong_references_.Add(*p);
   2261     }
   2262   }
   2263 
   2264   void SetCollectingAllReferences() { collecting_all_references_ = true; }
   2265 
   2266   void FillReferences(V8HeapExplorer* explorer) {
   2267     ASSERT(strong_references_.length() <= all_references_.length());
   2268     for (int i = 0; i < reference_tags_.length(); ++i) {
   2269       explorer->SetGcRootsReference(reference_tags_[i].tag);
   2270     }
   2271     int strong_index = 0, all_index = 0, tags_index = 0;
   2272     while (all_index < all_references_.length()) {
   2273       if (strong_index < strong_references_.length() &&
   2274           strong_references_[strong_index] == all_references_[all_index]) {
   2275         explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
   2276                                         false,
   2277                                         all_references_[all_index++]);
   2278         ++strong_index;
   2279       } else {
   2280         explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
   2281                                         true,
   2282                                         all_references_[all_index++]);
   2283       }
   2284       if (reference_tags_[tags_index].index == all_index) ++tags_index;
   2285     }
   2286   }
   2287 
   2288   void Synchronize(VisitorSynchronization::SyncTag tag) {
   2289     if (collecting_all_references_ &&
   2290         previous_reference_count_ != all_references_.length()) {
   2291       previous_reference_count_ = all_references_.length();
   2292       reference_tags_.Add(IndexTag(previous_reference_count_, tag));
   2293     }
   2294   }
   2295 
   2296  private:
   2297   bool collecting_all_references_;
   2298   List<Object*> strong_references_;
   2299   List<Object*> all_references_;
   2300   int previous_reference_count_;
   2301   List<IndexTag> reference_tags_;
   2302 };
   2303 
   2304 
   2305 bool V8HeapExplorer::IterateAndExtractReferences(
   2306     SnapshotFillerInterface* filler) {
   2307   HeapIterator iterator(HeapIterator::kFilterUnreachable);
   2308 
   2309   filler_ = filler;
   2310   bool interrupted = false;
   2311 
   2312   // Heap iteration with filtering must be finished in any case.
   2313   for (HeapObject* obj = iterator.next();
   2314        obj != NULL;
   2315        obj = iterator.next(), progress_->ProgressStep()) {
   2316     if (!interrupted) {
   2317       ExtractReferences(obj);
   2318       if (!progress_->ProgressReport(false)) interrupted = true;
   2319     }
   2320   }
   2321   if (interrupted) {
   2322     filler_ = NULL;
   2323     return false;
   2324   }
   2325   SetRootGcRootsReference();
   2326   RootsReferencesExtractor extractor;
   2327   heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
   2328   extractor.SetCollectingAllReferences();
   2329   heap_->IterateRoots(&extractor, VISIT_ALL);
   2330   extractor.FillReferences(this);
   2331   filler_ = NULL;
   2332   return progress_->ProgressReport(false);
   2333 }
   2334 
   2335 
   2336 bool V8HeapExplorer::IterateAndSetObjectNames(SnapshotFillerInterface* filler) {
   2337   HeapIterator iterator(HeapIterator::kFilterUnreachable);
   2338   filler_ = filler;
   2339   for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
   2340     SetObjectName(obj);
   2341   }
   2342   return true;
   2343 }
   2344 
   2345 
   2346 void V8HeapExplorer::SetObjectName(HeapObject* object) {
   2347   if (!object->IsJSObject() || object->IsJSRegExp() || object->IsJSFunction()) {
   2348     return;
   2349   }
   2350   const char* name = collection_->names()->GetName(
   2351       GetConstructorName(JSObject::cast(object)));
   2352   if (object->IsJSGlobalObject()) {
   2353     const char* tag = objects_tags_.GetTag(object);
   2354     if (tag != NULL) {
   2355       name = collection_->names()->GetFormatted("%s / %s", name, tag);
   2356     }
   2357   }
   2358   GetEntry(object)->set_name(name);
   2359 }
   2360 
   2361 
   2362 void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
   2363                                          HeapEntry* parent_entry,
   2364                                          String* reference_name,
   2365                                          Object* child_obj) {
   2366   HeapEntry* child_entry = GetEntry(child_obj);
   2367   if (child_entry != NULL) {
   2368     filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
   2369                                parent_obj,
   2370                                parent_entry,
   2371                                collection_->names()->GetName(reference_name),
   2372                                child_obj,
   2373                                child_entry);
   2374   }
   2375 }
   2376 
   2377 
   2378 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
   2379                                             HeapEntry* parent_entry,
   2380                                             const char* reference_name,
   2381                                             Object* child_obj) {
   2382   HeapEntry* child_entry = GetEntry(child_obj);
   2383   if (child_entry != NULL) {
   2384     filler_->SetNamedReference(HeapGraphEdge::kShortcut,
   2385                                parent_obj,
   2386                                parent_entry,
   2387                                reference_name,
   2388                                child_obj,
   2389                                child_entry);
   2390   }
   2391 }
   2392 
   2393 
   2394 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
   2395                                          HeapEntry* parent_entry,
   2396                                          int index,
   2397                                          Object* child_obj) {
   2398   HeapEntry* child_entry = GetEntry(child_obj);
   2399   if (child_entry != NULL) {
   2400     filler_->SetIndexedReference(HeapGraphEdge::kElement,
   2401                                  parent_obj,
   2402                                  parent_entry,
   2403                                  index,
   2404                                  child_obj,
   2405                                  child_entry);
   2406   }
   2407 }
   2408 
   2409 
   2410 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
   2411                                           HeapEntry* parent_entry,
   2412                                           const char* reference_name,
   2413                                           Object* child_obj,
   2414                                           int field_offset) {
   2415   HeapEntry* child_entry = GetEntry(child_obj);
   2416   if (child_entry != NULL) {
   2417     filler_->SetNamedReference(HeapGraphEdge::kInternal,
   2418                                parent_obj,
   2419                                parent_entry,
   2420                                reference_name,
   2421                                child_obj,
   2422                                child_entry);
   2423     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
   2424   }
   2425 }
   2426 
   2427 
   2428 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
   2429                                           HeapEntry* parent_entry,
   2430                                           int index,
   2431                                           Object* child_obj,
   2432                                           int field_offset) {
   2433   HeapEntry* child_entry = GetEntry(child_obj);
   2434   if (child_entry != NULL) {
   2435     filler_->SetNamedReference(HeapGraphEdge::kInternal,
   2436                                parent_obj,
   2437                                parent_entry,
   2438                                collection_->names()->GetName(index),
   2439                                child_obj,
   2440                                child_entry);
   2441     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
   2442   }
   2443 }
   2444 
   2445 
   2446 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
   2447                                         HeapEntry* parent_entry,
   2448                                         int index,
   2449                                         Object* child_obj) {
   2450   HeapEntry* child_entry = GetEntry(child_obj);
   2451   if (child_entry != NULL) {
   2452     filler_->SetIndexedReference(HeapGraphEdge::kHidden,
   2453                                  parent_obj,
   2454                                  parent_entry,
   2455                                  index,
   2456                                  child_obj,
   2457                                  child_entry);
   2458   }
   2459 }
   2460 
   2461 
   2462 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
   2463                                       HeapEntry* parent_entry,
   2464                                       int index,
   2465                                       Object* child_obj,
   2466                                       int field_offset) {
   2467   HeapEntry* child_entry = GetEntry(child_obj);
   2468   if (child_entry != NULL) {
   2469     filler_->SetIndexedReference(HeapGraphEdge::kWeak,
   2470                                  parent_obj,
   2471                                  parent_entry,
   2472                                  index,
   2473                                  child_obj,
   2474                                  child_entry);
   2475     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
   2476   }
   2477 }
   2478 
   2479 
   2480 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
   2481                                           HeapEntry* parent_entry,
   2482                                           String* reference_name,
   2483                                           Object* child_obj,
   2484                                           const char* name_format_string,
   2485                                           int field_offset) {
   2486   HeapEntry* child_entry = GetEntry(child_obj);
   2487   if (child_entry != NULL) {
   2488     HeapGraphEdge::Type type = reference_name->length() > 0 ?
   2489         HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
   2490     const char* name = name_format_string  != NULL ?
   2491         collection_->names()->GetFormatted(
   2492             name_format_string,
   2493             *reference_name->ToCString(DISALLOW_NULLS,
   2494                                        ROBUST_STRING_TRAVERSAL)) :
   2495         collection_->names()->GetName(reference_name);
   2496 
   2497     filler_->SetNamedReference(type,
   2498                                parent_obj,
   2499                                parent_entry,
   2500                                name,
   2501                                child_obj,
   2502                                child_entry);
   2503     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
   2504   }
   2505 }
   2506 
   2507 
   2508 void V8HeapExplorer::SetPropertyShortcutReference(HeapObject* parent_obj,
   2509                                                   HeapEntry* parent_entry,
   2510                                                   String* reference_name,
   2511                                                   Object* child_obj) {
   2512   HeapEntry* child_entry = GetEntry(child_obj);
   2513   if (child_entry != NULL) {
   2514     filler_->SetNamedReference(HeapGraphEdge::kShortcut,
   2515                                parent_obj,
   2516                                parent_entry,
   2517                                collection_->names()->GetName(reference_name),
   2518                                child_obj,
   2519                                child_entry);
   2520   }
   2521 }
   2522 
   2523 
   2524 void V8HeapExplorer::SetRootGcRootsReference() {
   2525   filler_->SetIndexedAutoIndexReference(
   2526       HeapGraphEdge::kElement,
   2527       kInternalRootObject, snapshot_->root(),
   2528       kGcRootsObject, snapshot_->gc_roots());
   2529 }
   2530 
   2531 
   2532 void V8HeapExplorer::SetRootShortcutReference(Object* child_obj) {
   2533   HeapEntry* child_entry = GetEntry(child_obj);
   2534   ASSERT(child_entry != NULL);
   2535   filler_->SetNamedAutoIndexReference(
   2536       HeapGraphEdge::kShortcut,
   2537       kInternalRootObject, snapshot_->root(),
   2538       child_obj, child_entry);
   2539 }
   2540 
   2541 
   2542 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
   2543   filler_->SetIndexedAutoIndexReference(
   2544       HeapGraphEdge::kElement,
   2545       kGcRootsObject, snapshot_->gc_roots(),
   2546       GetNthGcSubrootObject(tag), snapshot_->gc_subroot(tag));
   2547 }
   2548 
   2549 
   2550 void V8HeapExplorer::SetGcSubrootReference(
   2551     VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
   2552   HeapEntry* child_entry = GetEntry(child_obj);
   2553   if (child_entry != NULL) {
   2554     filler_->SetIndexedAutoIndexReference(
   2555         is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
   2556         GetNthGcSubrootObject(tag), snapshot_->gc_subroot(tag),
   2557         child_obj, child_entry);
   2558   }
   2559 }
   2560 
   2561 
   2562 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
   2563   if (obj->IsHeapObject() &&
   2564       !obj->IsOddball() &&
   2565       obj != heap_->raw_unchecked_empty_byte_array() &&
   2566       obj != heap_->raw_unchecked_empty_fixed_array() &&
   2567       obj != heap_->raw_unchecked_empty_descriptor_array()) {
   2568     objects_tags_.SetTag(obj, tag);
   2569   }
   2570 }
   2571 
   2572 
   2573 class GlobalObjectsEnumerator : public ObjectVisitor {
   2574  public:
   2575   virtual void VisitPointers(Object** start, Object** end) {
   2576     for (Object** p = start; p < end; p++) {
   2577       if ((*p)->IsGlobalContext()) {
   2578         Context* context = Context::cast(*p);
   2579         JSObject* proxy = context->global_proxy();
   2580         if (proxy->IsJSGlobalProxy()) {
   2581           Object* global = proxy->map()->prototype();
   2582           if (global->IsJSGlobalObject()) {
   2583             objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
   2584           }
   2585         }
   2586       }
   2587     }
   2588   }
   2589   int count() { return objects_.length(); }
   2590   Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
   2591 
   2592  private:
   2593   List<Handle<JSGlobalObject> > objects_;
   2594 };
   2595 
   2596 
   2597 // Modifies heap. Must not be run during heap traversal.
   2598 void V8HeapExplorer::TagGlobalObjects() {
   2599   HandleScope scope;
   2600   Isolate* isolate = Isolate::Current();
   2601   GlobalObjectsEnumerator enumerator;
   2602   isolate->global_handles()->IterateAllRoots(&enumerator);
   2603   Handle<String> document_string =
   2604       isolate->factory()->NewStringFromAscii(CStrVector("document"));
   2605   Handle<String> url_string =
   2606       isolate->factory()->NewStringFromAscii(CStrVector("URL"));
   2607   const char** urls = NewArray<const char*>(enumerator.count());
   2608   for (int i = 0, l = enumerator.count(); i < l; ++i) {
   2609     urls[i] = NULL;
   2610     HandleScope scope;
   2611     Handle<JSGlobalObject> global_obj = enumerator.at(i);
   2612     Object* obj_document;
   2613     if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
   2614        obj_document->IsJSObject()) {
   2615       JSObject* document = JSObject::cast(obj_document);
   2616       Object* obj_url;
   2617       if (document->GetProperty(*url_string)->ToObject(&obj_url) &&
   2618           obj_url->IsString()) {
   2619         urls[i] = collection_->names()->GetName(String::cast(obj_url));
   2620       }
   2621     }
   2622   }
   2623 
   2624   AssertNoAllocation no_allocation;
   2625   for (int i = 0, l = enumerator.count(); i < l; ++i) {
   2626     objects_tags_.SetTag(*enumerator.at(i), urls[i]);
   2627   }
   2628 
   2629   DeleteArray(urls);
   2630 }
   2631 
   2632 
   2633 class GlobalHandlesExtractor : public ObjectVisitor {
   2634  public:
   2635   explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
   2636       : explorer_(explorer) {}
   2637   virtual ~GlobalHandlesExtractor() {}
   2638   virtual void VisitPointers(Object** start, Object** end) {
   2639     UNREACHABLE();
   2640   }
   2641   virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
   2642     explorer_->VisitSubtreeWrapper(p, class_id);
   2643   }
   2644  private:
   2645   NativeObjectsExplorer* explorer_;
   2646 };
   2647 
   2648 
   2649 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
   2650  public:
   2651   BasicHeapEntriesAllocator(
   2652       HeapSnapshot* snapshot,
   2653       HeapEntry::Type entries_type)
   2654     : snapshot_(snapshot),
   2655       collection_(snapshot_->collection()),
   2656       entries_type_(entries_type) {
   2657   }
   2658   virtual HeapEntry* AllocateEntry(
   2659       HeapThing ptr, int children_count, int retainers_count);
   2660  private:
   2661   HeapSnapshot* snapshot_;
   2662   HeapSnapshotsCollection* collection_;
   2663   HeapEntry::Type entries_type_;
   2664 };
   2665 
   2666 
   2667 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(
   2668     HeapThing ptr, int children_count, int retainers_count) {
   2669   v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
   2670   intptr_t elements = info->GetElementCount();
   2671   intptr_t size = info->GetSizeInBytes();
   2672   return snapshot_->AddEntry(
   2673       entries_type_,
   2674       elements != -1 ?
   2675           collection_->names()->GetFormatted(
   2676               "%s / %" V8_PTR_PREFIX "d entries",
   2677               info->GetLabel(),
   2678               info->GetElementCount()) :
   2679           collection_->names()->GetCopy(info->GetLabel()),
   2680       HeapObjectsMap::GenerateId(info),
   2681       size != -1 ? static_cast<int>(size) : 0,
   2682       children_count,
   2683       retainers_count);
   2684 }
   2685 
   2686 
   2687 NativeObjectsExplorer::NativeObjectsExplorer(
   2688     HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
   2689     : snapshot_(snapshot),
   2690       collection_(snapshot_->collection()),
   2691       progress_(progress),
   2692       embedder_queried_(false),
   2693       objects_by_info_(RetainedInfosMatch),
   2694       native_groups_(StringsMatch),
   2695       filler_(NULL) {
   2696   synthetic_entries_allocator_ =
   2697       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
   2698   native_entries_allocator_ =
   2699       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
   2700 }
   2701 
   2702 
   2703 NativeObjectsExplorer::~NativeObjectsExplorer() {
   2704   for (HashMap::Entry* p = objects_by_info_.Start();
   2705        p != NULL;
   2706        p = objects_by_info_.Next(p)) {
   2707     v8::RetainedObjectInfo* info =
   2708         reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
   2709     info->Dispose();
   2710     List<HeapObject*>* objects =
   2711         reinterpret_cast<List<HeapObject*>* >(p->value);
   2712     delete objects;
   2713   }
   2714   for (HashMap::Entry* p = native_groups_.Start();
   2715        p != NULL;
   2716        p = native_groups_.Next(p)) {
   2717     v8::RetainedObjectInfo* info =
   2718         reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
   2719     info->Dispose();
   2720   }
   2721   delete synthetic_entries_allocator_;
   2722   delete native_entries_allocator_;
   2723 }
   2724 
   2725 
   2726 int NativeObjectsExplorer::EstimateObjectsCount() {
   2727   FillRetainedObjects();
   2728   return objects_by_info_.occupancy();
   2729 }
   2730 
   2731 
   2732 void NativeObjectsExplorer::FillRetainedObjects() {
   2733   if (embedder_queried_) return;
   2734   Isolate* isolate = Isolate::Current();
   2735   // Record objects that are joined into ObjectGroups.
   2736   isolate->heap()->CallGlobalGCPrologueCallback();
   2737   List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
   2738   for (int i = 0; i < groups->length(); ++i) {
   2739     ObjectGroup* group = groups->at(i);
   2740     if (group->info_ == NULL) continue;
   2741     List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
   2742     for (size_t j = 0; j < group->length_; ++j) {
   2743       HeapObject* obj = HeapObject::cast(*group->objects_[j]);
   2744       list->Add(obj);
   2745       in_groups_.Insert(obj);
   2746     }
   2747     group->info_ = NULL;  // Acquire info object ownership.
   2748   }
   2749   isolate->global_handles()->RemoveObjectGroups();
   2750   isolate->heap()->CallGlobalGCEpilogueCallback();
   2751   // Record objects that are not in ObjectGroups, but have class ID.
   2752   GlobalHandlesExtractor extractor(this);
   2753   isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
   2754   embedder_queried_ = true;
   2755 }
   2756 
   2757 void NativeObjectsExplorer::FillImplicitReferences() {
   2758   Isolate* isolate = Isolate::Current();
   2759   List<ImplicitRefGroup*>* groups =
   2760       isolate->global_handles()->implicit_ref_groups();
   2761   for (int i = 0; i < groups->length(); ++i) {
   2762     ImplicitRefGroup* group = groups->at(i);
   2763     HeapObject* parent = *group->parent_;
   2764     HeapEntry* parent_entry =
   2765         filler_->FindOrAddEntry(parent, native_entries_allocator_);
   2766     ASSERT(parent_entry != NULL);
   2767     Object*** children = group->children_;
   2768     for (size_t j = 0; j < group->length_; ++j) {
   2769       Object* child = *children[j];
   2770       HeapEntry* child_entry =
   2771           filler_->FindOrAddEntry(child, native_entries_allocator_);
   2772       filler_->SetNamedReference(
   2773           HeapGraphEdge::kInternal,
   2774           parent, parent_entry,
   2775           "native",
   2776           child, child_entry);
   2777     }
   2778   }
   2779 }
   2780 
   2781 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
   2782     v8::RetainedObjectInfo* info) {
   2783   HashMap::Entry* entry =
   2784       objects_by_info_.Lookup(info, InfoHash(info), true);
   2785   if (entry->value != NULL) {
   2786     info->Dispose();
   2787   } else {
   2788     entry->value = new List<HeapObject*>(4);
   2789   }
   2790   return reinterpret_cast<List<HeapObject*>* >(entry->value);
   2791 }
   2792 
   2793 
   2794 bool NativeObjectsExplorer::IterateAndExtractReferences(
   2795     SnapshotFillerInterface* filler) {
   2796   filler_ = filler;
   2797   FillRetainedObjects();
   2798   FillImplicitReferences();
   2799   if (EstimateObjectsCount() > 0) {
   2800     for (HashMap::Entry* p = objects_by_info_.Start();
   2801          p != NULL;
   2802          p = objects_by_info_.Next(p)) {
   2803       v8::RetainedObjectInfo* info =
   2804           reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
   2805       SetNativeRootReference(info);
   2806       List<HeapObject*>* objects =
   2807           reinterpret_cast<List<HeapObject*>* >(p->value);
   2808       for (int i = 0; i < objects->length(); ++i) {
   2809         SetWrapperNativeReferences(objects->at(i), info);
   2810       }
   2811     }
   2812     SetRootNativeRootsReference();
   2813   }
   2814   filler_ = NULL;
   2815   return true;
   2816 }
   2817 
   2818 
   2819 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
   2820  public:
   2821   explicit NativeGroupRetainedObjectInfo(const char* label)
   2822       : disposed_(false),
   2823         hash_(reinterpret_cast<intptr_t>(label)),
   2824         label_(label) {
   2825   }
   2826 
   2827   virtual ~NativeGroupRetainedObjectInfo() {}
   2828   virtual void Dispose() {
   2829     CHECK(!disposed_);
   2830     disposed_ = true;
   2831     delete this;
   2832   }
   2833   virtual bool IsEquivalent(RetainedObjectInfo* other) {
   2834     return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
   2835   }
   2836   virtual intptr_t GetHash() { return hash_; }
   2837   virtual const char* GetLabel() { return label_; }
   2838 
   2839  private:
   2840   bool disposed_;
   2841   intptr_t hash_;
   2842   const char* label_;
   2843 };
   2844 
   2845 
   2846 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
   2847     const char* label) {
   2848   const char* label_copy = collection_->names()->GetCopy(label);
   2849   uint32_t hash = HashSequentialString(label_copy,
   2850                                        static_cast<int>(strlen(label_copy)),
   2851                                        HEAP->HashSeed());
   2852   HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
   2853                                                 hash, true);
   2854   if (entry->value == NULL)
   2855     entry->value = new NativeGroupRetainedObjectInfo(label);
   2856   return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
   2857 }
   2858 
   2859 
   2860 void NativeObjectsExplorer::SetNativeRootReference(
   2861     v8::RetainedObjectInfo* info) {
   2862   HeapEntry* child_entry =
   2863       filler_->FindOrAddEntry(info, native_entries_allocator_);
   2864   ASSERT(child_entry != NULL);
   2865   NativeGroupRetainedObjectInfo* group_info =
   2866       FindOrAddGroupInfo(info->GetGroupLabel());
   2867   HeapEntry* group_entry =
   2868       filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
   2869   filler_->SetNamedAutoIndexReference(
   2870       HeapGraphEdge::kInternal,
   2871       group_info, group_entry,
   2872       info, child_entry);
   2873 }
   2874 
   2875 
   2876 void NativeObjectsExplorer::SetWrapperNativeReferences(
   2877     HeapObject* wrapper, v8::RetainedObjectInfo* info) {
   2878   HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
   2879   ASSERT(wrapper_entry != NULL);
   2880   HeapEntry* info_entry =
   2881       filler_->FindOrAddEntry(info, native_entries_allocator_);
   2882   ASSERT(info_entry != NULL);
   2883   filler_->SetNamedReference(HeapGraphEdge::kInternal,
   2884                              wrapper, wrapper_entry,
   2885                              "native",
   2886                              info, info_entry);
   2887   filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
   2888                                         info, info_entry,
   2889                                         wrapper, wrapper_entry);
   2890 }
   2891 
   2892 
   2893 void NativeObjectsExplorer::SetRootNativeRootsReference() {
   2894   for (HashMap::Entry* entry = native_groups_.Start();
   2895        entry;
   2896        entry = native_groups_.Next(entry)) {
   2897     NativeGroupRetainedObjectInfo* group_info =
   2898         static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
   2899     HeapEntry* group_entry =
   2900         filler_->FindOrAddEntry(group_info, native_entries_allocator_);
   2901     ASSERT(group_entry != NULL);
   2902     filler_->SetIndexedAutoIndexReference(
   2903         HeapGraphEdge::kElement,
   2904         V8HeapExplorer::kInternalRootObject, snapshot_->root(),
   2905         group_info, group_entry);
   2906   }
   2907 }
   2908 
   2909 
   2910 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
   2911   if (in_groups_.Contains(*p)) return;
   2912   Isolate* isolate = Isolate::Current();
   2913   v8::RetainedObjectInfo* info =
   2914       isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
   2915   if (info == NULL) return;
   2916   GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
   2917 }
   2918 
   2919 
   2920 class SnapshotCounter : public SnapshotFillerInterface {
   2921  public:
   2922   explicit SnapshotCounter(HeapEntriesMap* entries) : entries_(entries) { }
   2923   HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
   2924     entries_->Pair(ptr, allocator, HeapEntriesMap::kHeapEntryPlaceholder);
   2925     return HeapEntriesMap::kHeapEntryPlaceholder;
   2926   }
   2927   HeapEntry* FindEntry(HeapThing ptr) {
   2928     return entries_->Map(ptr);
   2929   }
   2930   HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
   2931     HeapEntry* entry = FindEntry(ptr);
   2932     return entry != NULL ? entry : AddEntry(ptr, allocator);
   2933   }
   2934   void SetIndexedReference(HeapGraphEdge::Type,
   2935                            HeapThing parent_ptr,
   2936                            HeapEntry*,
   2937                            int,
   2938                            HeapThing child_ptr,
   2939                            HeapEntry*) {
   2940     entries_->CountReference(parent_ptr, child_ptr);
   2941   }
   2942   void SetIndexedAutoIndexReference(HeapGraphEdge::Type,
   2943                                     HeapThing parent_ptr,
   2944                                     HeapEntry*,
   2945                                     HeapThing child_ptr,
   2946                                     HeapEntry*) {
   2947     entries_->CountReference(parent_ptr, child_ptr);
   2948   }
   2949   void SetNamedReference(HeapGraphEdge::Type,
   2950                          HeapThing parent_ptr,
   2951                          HeapEntry*,
   2952                          const char*,
   2953                          HeapThing child_ptr,
   2954                          HeapEntry*) {
   2955     entries_->CountReference(parent_ptr, child_ptr);
   2956   }
   2957   void SetNamedAutoIndexReference(HeapGraphEdge::Type,
   2958                                   HeapThing parent_ptr,
   2959                                   HeapEntry*,
   2960                                   HeapThing child_ptr,
   2961                                   HeapEntry*) {
   2962     entries_->CountReference(parent_ptr, child_ptr);
   2963   }
   2964 
   2965  private:
   2966   HeapEntriesMap* entries_;
   2967 };
   2968 
   2969 
   2970 class SnapshotFiller : public SnapshotFillerInterface {
   2971  public:
   2972   explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
   2973       : snapshot_(snapshot),
   2974         collection_(snapshot->collection()),
   2975         entries_(entries) { }
   2976   HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
   2977     UNREACHABLE();
   2978     return NULL;
   2979   }
   2980   HeapEntry* FindEntry(HeapThing ptr) {
   2981     return entries_->Map(ptr);
   2982   }
   2983   HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
   2984     HeapEntry* entry = FindEntry(ptr);
   2985     return entry != NULL ? entry : AddEntry(ptr, allocator);
   2986   }
   2987   void SetIndexedReference(HeapGraphEdge::Type type,
   2988                            HeapThing parent_ptr,
   2989                            HeapEntry* parent_entry,
   2990                            int index,
   2991                            HeapThing child_ptr,
   2992                            HeapEntry* child_entry) {
   2993     int child_index, retainer_index;
   2994     entries_->CountReference(
   2995         parent_ptr, child_ptr, &child_index, &retainer_index);
   2996     parent_entry->SetIndexedReference(
   2997         type, child_index, index, child_entry, retainer_index);
   2998   }
   2999   void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
   3000                                     HeapThing parent_ptr,
   3001                                     HeapEntry* parent_entry,
   3002                                     HeapThing child_ptr,
   3003                                     HeapEntry* child_entry) {
   3004     int child_index, retainer_index;
   3005     entries_->CountReference(
   3006         parent_ptr, child_ptr, &child_index, &retainer_index);
   3007     parent_entry->SetIndexedReference(
   3008         type, child_index, child_index + 1, child_entry, retainer_index);
   3009   }
   3010   void SetNamedReference(HeapGraphEdge::Type type,
   3011                          HeapThing parent_ptr,
   3012                          HeapEntry* parent_entry,
   3013                          const char* reference_name,
   3014                          HeapThing child_ptr,
   3015                          HeapEntry* child_entry) {
   3016     int child_index, retainer_index;
   3017     entries_->CountReference(
   3018         parent_ptr, child_ptr, &child_index, &retainer_index);
   3019     parent_entry->SetNamedReference(
   3020         type, child_index, reference_name, child_entry, retainer_index);
   3021   }
   3022   void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
   3023                                   HeapThing parent_ptr,
   3024                                   HeapEntry* parent_entry,
   3025                                   HeapThing child_ptr,
   3026                                   HeapEntry* child_entry) {
   3027     int child_index, retainer_index;
   3028     entries_->CountReference(
   3029         parent_ptr, child_ptr, &child_index, &retainer_index);
   3030     parent_entry->SetNamedReference(type,
   3031                               child_index,
   3032                               collection_->names()->GetName(child_index + 1),
   3033                               child_entry,
   3034                               retainer_index);
   3035   }
   3036 
   3037  private:
   3038   HeapSnapshot* snapshot_;
   3039   HeapSnapshotsCollection* collection_;
   3040   HeapEntriesMap* entries_;
   3041 };
   3042 
   3043 
   3044 HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
   3045                                              v8::ActivityControl* control)
   3046     : snapshot_(snapshot),
   3047       control_(control),
   3048       v8_heap_explorer_(snapshot_, this),
   3049       dom_explorer_(snapshot_, this) {
   3050 }
   3051 
   3052 
   3053 bool HeapSnapshotGenerator::GenerateSnapshot() {
   3054   v8_heap_explorer_.TagGlobalObjects();
   3055 
   3056   // TODO(1562) Profiler assumes that any object that is in the heap after
   3057   // full GC is reachable from the root when computing dominators.
   3058   // This is not true for weakly reachable objects.
   3059   // As a temporary solution we call GC twice.
   3060   Isolate::Current()->heap()->CollectAllGarbage(
   3061       Heap::kMakeHeapIterableMask,
   3062       "HeapSnapshotGenerator::GenerateSnapshot");
   3063   Isolate::Current()->heap()->CollectAllGarbage(
   3064       Heap::kMakeHeapIterableMask,
   3065       "HeapSnapshotGenerator::GenerateSnapshot");
   3066 
   3067 #ifdef DEBUG
   3068   Heap* debug_heap = Isolate::Current()->heap();
   3069   ASSERT(!debug_heap->old_data_space()->was_swept_conservatively());
   3070   ASSERT(!debug_heap->old_pointer_space()->was_swept_conservatively());
   3071   ASSERT(!debug_heap->code_space()->was_swept_conservatively());
   3072   ASSERT(!debug_heap->cell_space()->was_swept_conservatively());
   3073   ASSERT(!debug_heap->map_space()->was_swept_conservatively());
   3074 #endif
   3075 
   3076   // The following code uses heap iterators, so we want the heap to be
   3077   // stable. It should follow TagGlobalObjects as that can allocate.
   3078   AssertNoAllocation no_alloc;
   3079 
   3080 #ifdef DEBUG
   3081   debug_heap->Verify();
   3082 #endif
   3083 
   3084   SetProgressTotal(2);  // 2 passes.
   3085 
   3086 #ifdef DEBUG
   3087   debug_heap->Verify();
   3088 #endif
   3089 
   3090   // Pass 1. Iterate heap contents to count entries and references.
   3091   if (!CountEntriesAndReferences()) return false;
   3092 
   3093 #ifdef DEBUG
   3094   debug_heap->Verify();
   3095 #endif
   3096 
   3097   // Allocate memory for entries and references.
   3098   snapshot_->AllocateEntries(entries_.entries_count(),
   3099                              entries_.total_children_count(),
   3100                              entries_.total_retainers_count());
   3101 
   3102   // Allocate heap objects to entries hash map.
   3103   entries_.AllocateEntries();
   3104 
   3105   // Pass 2. Fill references.
   3106   if (!FillReferences()) return false;
   3107 
   3108   if (!SetEntriesDominators()) return false;
   3109   if (!CalculateRetainedSizes()) return false;
   3110 
   3111   progress_counter_ = progress_total_;
   3112   if (!ProgressReport(true)) return false;
   3113   return true;
   3114 }
   3115 
   3116 
   3117 void HeapSnapshotGenerator::ProgressStep() {
   3118   ++progress_counter_;
   3119 }
   3120 
   3121 
   3122 bool HeapSnapshotGenerator::ProgressReport(bool force) {
   3123   const int kProgressReportGranularity = 10000;
   3124   if (control_ != NULL
   3125       && (force || progress_counter_ % kProgressReportGranularity == 0)) {
   3126       return
   3127           control_->ReportProgressValue(progress_counter_, progress_total_) ==
   3128           v8::ActivityControl::kContinue;
   3129   }
   3130   return true;
   3131 }
   3132 
   3133 
   3134 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
   3135   if (control_ == NULL) return;
   3136   HeapIterator iterator(HeapIterator::kFilterUnreachable);
   3137   progress_total_ = (
   3138       v8_heap_explorer_.EstimateObjectsCount(&iterator) +
   3139       dom_explorer_.EstimateObjectsCount()) * iterations_count;
   3140   progress_counter_ = 0;
   3141 }
   3142 
   3143 
   3144 bool HeapSnapshotGenerator::CountEntriesAndReferences() {
   3145   SnapshotCounter counter(&entries_);
   3146   v8_heap_explorer_.AddRootEntries(&counter);
   3147   return v8_heap_explorer_.IterateAndExtractReferences(&counter)
   3148       && dom_explorer_.IterateAndExtractReferences(&counter);
   3149 }
   3150 
   3151 
   3152 bool HeapSnapshotGenerator::FillReferences() {
   3153   SnapshotFiller filler(snapshot_, &entries_);
   3154   // IterateAndExtractReferences cannot set object names because
   3155   // it makes call to JSObject::LocalLookupRealNamedProperty which
   3156   // in turn may relocate objects in property maps thus changing the heap
   3157   // layout and affecting retainer counts. This is not acceptable because
   3158   // number of retainers must not change between count and fill passes.
   3159   // To avoid this there's a separate postpass that set object names.
   3160   return v8_heap_explorer_.IterateAndExtractReferences(&filler)
   3161       && dom_explorer_.IterateAndExtractReferences(&filler)
   3162       && v8_heap_explorer_.IterateAndSetObjectNames(&filler);
   3163 }
   3164 
   3165 
   3166 void HeapSnapshotGenerator::FillReversePostorderIndexes(
   3167     Vector<HeapEntry*>* entries) {
   3168   snapshot_->ClearPaint();
   3169   int current_entry = 0;
   3170   List<HeapEntry*> nodes_to_visit;
   3171   nodes_to_visit.Add(snapshot_->root());
   3172   snapshot_->root()->paint();
   3173   while (!nodes_to_visit.is_empty()) {
   3174     HeapEntry* entry = nodes_to_visit.last();
   3175     Vector<HeapGraphEdge> children = entry->children();
   3176     bool has_new_edges = false;
   3177     for (int i = 0; i < children.length(); ++i) {
   3178       if (children[i].type() == HeapGraphEdge::kShortcut) continue;
   3179       HeapEntry* child = children[i].to();
   3180       if (!child->painted()) {
   3181         nodes_to_visit.Add(child);
   3182         child->paint();
   3183         has_new_edges = true;
   3184       }
   3185     }
   3186     if (!has_new_edges) {
   3187       entry->set_ordered_index(current_entry);
   3188       (*entries)[current_entry++] = entry;
   3189       nodes_to_visit.RemoveLast();
   3190     }
   3191   }
   3192   ASSERT_EQ(current_entry, entries->length());
   3193 }
   3194 
   3195 
   3196 static int Intersect(int i1, int i2, const Vector<int>& dominators) {
   3197   int finger1 = i1, finger2 = i2;
   3198   while (finger1 != finger2) {
   3199     while (finger1 < finger2) finger1 = dominators[finger1];
   3200     while (finger2 < finger1) finger2 = dominators[finger2];
   3201   }
   3202   return finger1;
   3203 }
   3204 
   3205 
   3206 // The algorithm is based on the article:
   3207 // K. Cooper, T. Harvey and K. Kennedy "A Simple, Fast Dominance Algorithm"
   3208 // Softw. Pract. Exper. 4 (2001), pp. 1-10.
   3209 bool HeapSnapshotGenerator::BuildDominatorTree(
   3210     const Vector<HeapEntry*>& entries,
   3211     Vector<int>* dominators) {
   3212   if (entries.length() == 0) return true;
   3213   const int entries_length = entries.length(), root_index = entries_length - 1;
   3214   static const int kNoDominator = -1;
   3215   for (int i = 0; i < root_index; ++i) (*dominators)[i] = kNoDominator;
   3216   (*dominators)[root_index] = root_index;
   3217 
   3218   // The affected array is used to mark entries which dominators
   3219   // have to be racalculated because of changes in their retainers.
   3220   ScopedVector<bool> affected(entries_length);
   3221   for (int i = 0; i < affected.length(); ++i) affected[i] = false;
   3222   // Mark the root direct children as affected.
   3223   Vector<HeapGraphEdge> children = entries[root_index]->children();
   3224   for (int i = 0; i < children.length(); ++i) {
   3225     affected[children[i].to()->ordered_index()] = true;
   3226   }
   3227 
   3228   bool changed = true;
   3229   while (changed) {
   3230     changed = false;
   3231     if (!ProgressReport(true)) return false;
   3232     for (int i = root_index - 1; i >= 0; --i) {
   3233       if (!affected[i]) continue;
   3234       affected[i] = false;
   3235       // If dominator of the entry has already been set to root,
   3236       // then it can't propagate any further.
   3237       if ((*dominators)[i] == root_index) continue;
   3238       int new_idom_index = kNoDominator;
   3239       Vector<HeapGraphEdge*> rets = entries[i]->retainers();
   3240       for (int j = 0; j < rets.length(); ++j) {
   3241         if (rets[j]->type() == HeapGraphEdge::kShortcut) continue;
   3242         int ret_index = rets[j]->From()->ordered_index();
   3243         if (dominators->at(ret_index) != kNoDominator) {
   3244           new_idom_index = new_idom_index == kNoDominator
   3245               ? ret_index
   3246               : Intersect(ret_index, new_idom_index, *dominators);
   3247           // If idom has already reached the root, it doesn't make sense
   3248           // to check other retainers.
   3249           if (new_idom_index == root_index) break;
   3250         }
   3251       }
   3252       if (new_idom_index != kNoDominator
   3253           && dominators->at(i) != new_idom_index) {
   3254         (*dominators)[i] = new_idom_index;
   3255         changed = true;
   3256         Vector<HeapGraphEdge> children = entries[i]->children();
   3257         for (int j = 0; j < children.length(); ++j) {
   3258           affected[children[j].to()->ordered_index()] = true;
   3259         }
   3260       }
   3261     }
   3262   }
   3263   return true;
   3264 }
   3265 
   3266 
   3267 bool HeapSnapshotGenerator::SetEntriesDominators() {
   3268   // This array is used for maintaining reverse postorder of nodes.
   3269   ScopedVector<HeapEntry*> ordered_entries(snapshot_->entries()->length());
   3270   FillReversePostorderIndexes(&ordered_entries);
   3271   ScopedVector<int> dominators(ordered_entries.length());
   3272   if (!BuildDominatorTree(ordered_entries, &dominators)) return false;
   3273   for (int i = 0; i < ordered_entries.length(); ++i) {
   3274     ASSERT(dominators[i] >= 0);
   3275     ordered_entries[i]->set_dominator(ordered_entries[dominators[i]]);
   3276   }
   3277   return true;
   3278 }
   3279 
   3280 
   3281 bool HeapSnapshotGenerator::CalculateRetainedSizes() {
   3282   // As for the dominators tree we only know parent nodes, not
   3283   // children, to sum up total sizes we "bubble" node's self size
   3284   // adding it to all of its parents.
   3285   List<HeapEntry*>& entries = *snapshot_->entries();
   3286   for (int i = 0; i < entries.length(); ++i) {
   3287     HeapEntry* entry = entries[i];
   3288     entry->set_retained_size(entry->self_size());
   3289   }
   3290   for (int i = 0; i < entries.length(); ++i) {
   3291     HeapEntry* entry = entries[i];
   3292     int entry_size = entry->self_size();
   3293     for (HeapEntry* dominator = entry->dominator();
   3294          dominator != entry;
   3295          entry = dominator, dominator = entry->dominator()) {
   3296       dominator->add_retained_size(entry_size);
   3297     }
   3298   }
   3299   return true;
   3300 }
   3301 
   3302 
   3303 template<int bytes> struct MaxDecimalDigitsIn;
   3304 template<> struct MaxDecimalDigitsIn<4> {
   3305   static const int kSigned = 11;
   3306   static const int kUnsigned = 10;
   3307 };
   3308 template<> struct MaxDecimalDigitsIn<8> {
   3309   static const int kSigned = 20;
   3310   static const int kUnsigned = 20;
   3311 };
   3312 
   3313 
   3314 class OutputStreamWriter {
   3315  public:
   3316   explicit OutputStreamWriter(v8::OutputStream* stream)
   3317       : stream_(stream),
   3318         chunk_size_(stream->GetChunkSize()),
   3319         chunk_(chunk_size_),
   3320         chunk_pos_(0),
   3321         aborted_(false) {
   3322     ASSERT(chunk_size_ > 0);
   3323   }
   3324   bool aborted() { return aborted_; }
   3325   void AddCharacter(char c) {
   3326     ASSERT(c != '\0');
   3327     ASSERT(chunk_pos_ < chunk_size_);
   3328     chunk_[chunk_pos_++] = c;
   3329     MaybeWriteChunk();
   3330   }
   3331   void AddString(const char* s) {
   3332     AddSubstring(s, StrLength(s));
   3333   }
   3334   void AddSubstring(const char* s, int n) {
   3335     if (n <= 0) return;
   3336     ASSERT(static_cast<size_t>(n) <= strlen(s));
   3337     const char* s_end = s + n;
   3338     while (s < s_end) {
   3339       int s_chunk_size = Min(
   3340           chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
   3341       ASSERT(s_chunk_size > 0);
   3342       memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
   3343       s += s_chunk_size;
   3344       chunk_pos_ += s_chunk_size;
   3345       MaybeWriteChunk();
   3346     }
   3347   }
   3348   void AddNumber(int n) { AddNumberImpl<int>(n, "%d"); }
   3349   void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
   3350   void AddNumber(uint64_t n) { AddNumberImpl<uint64_t>(n, "%llu"); }
   3351   void Finalize() {
   3352     if (aborted_) return;
   3353     ASSERT(chunk_pos_ < chunk_size_);
   3354     if (chunk_pos_ != 0) {
   3355       WriteChunk();
   3356     }
   3357     stream_->EndOfStream();
   3358   }
   3359 
   3360  private:
   3361   template<typename T>
   3362   void AddNumberImpl(T n, const char* format) {
   3363     // Buffer for the longest value plus trailing \0
   3364     static const int kMaxNumberSize =
   3365         MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
   3366     if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
   3367       int result = OS::SNPrintF(
   3368           chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
   3369       ASSERT(result != -1);
   3370       chunk_pos_ += result;
   3371       MaybeWriteChunk();
   3372     } else {
   3373       EmbeddedVector<char, kMaxNumberSize> buffer;
   3374       int result = OS::SNPrintF(buffer, format, n);
   3375       USE(result);
   3376       ASSERT(result != -1);
   3377       AddString(buffer.start());
   3378     }
   3379   }
   3380   void MaybeWriteChunk() {
   3381     ASSERT(chunk_pos_ <= chunk_size_);
   3382     if (chunk_pos_ == chunk_size_) {
   3383       WriteChunk();
   3384     }
   3385   }
   3386   void WriteChunk() {
   3387     if (aborted_) return;
   3388     if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
   3389         v8::OutputStream::kAbort) aborted_ = true;
   3390     chunk_pos_ = 0;
   3391   }
   3392 
   3393   v8::OutputStream* stream_;
   3394   int chunk_size_;
   3395   ScopedVector<char> chunk_;
   3396   int chunk_pos_;
   3397   bool aborted_;
   3398 };
   3399 
   3400 
   3401 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
   3402   ASSERT(writer_ == NULL);
   3403   writer_ = new OutputStreamWriter(stream);
   3404 
   3405   HeapSnapshot* original_snapshot = NULL;
   3406   if (snapshot_->raw_entries_size() >=
   3407       SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
   3408     // The snapshot is too big. Serialize a fake snapshot.
   3409     original_snapshot = snapshot_;
   3410     snapshot_ = CreateFakeSnapshot();
   3411   }
   3412   // Since nodes graph is cyclic, we need the first pass to enumerate
   3413   // them. Strings can be serialized in one pass.
   3414   EnumerateNodes();
   3415   SerializeImpl();
   3416 
   3417   delete writer_;
   3418   writer_ = NULL;
   3419 
   3420   if (original_snapshot != NULL) {
   3421     delete snapshot_;
   3422     snapshot_ = original_snapshot;
   3423   }
   3424 }
   3425 
   3426 
   3427 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
   3428   HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
   3429                                           HeapSnapshot::kFull,
   3430                                           snapshot_->title(),
   3431                                           snapshot_->uid());
   3432   result->AllocateEntries(2, 1, 0);
   3433   HeapEntry* root = result->AddRootEntry(1);
   3434   const char* text = snapshot_->collection()->names()->GetFormatted(
   3435       "The snapshot is too big. "
   3436       "Maximum snapshot size is %"  V8_PTR_PREFIX "u MB. "
   3437       "Actual snapshot size is %"  V8_PTR_PREFIX "u MB.",
   3438       SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
   3439       (snapshot_->raw_entries_size() + MB - 1) / MB);
   3440   HeapEntry* message = result->AddEntry(
   3441       HeapEntry::kString, text, 0, 4, 0, 0);
   3442   root->SetUnidirElementReference(0, 1, message);
   3443   result->SetDominatorsToSelf();
   3444   return result;
   3445 }
   3446 
   3447 
   3448 void HeapSnapshotJSONSerializer::SerializeImpl() {
   3449   writer_->AddCharacter('{');
   3450   writer_->AddString("\"snapshot\":{");
   3451   SerializeSnapshot();
   3452   if (writer_->aborted()) return;
   3453   writer_->AddString("},\n");
   3454   writer_->AddString("\"nodes\":[");
   3455   SerializeNodes();
   3456   if (writer_->aborted()) return;
   3457   writer_->AddString("],\n");
   3458   writer_->AddString("\"strings\":[");
   3459   SerializeStrings();
   3460   if (writer_->aborted()) return;
   3461   writer_->AddCharacter(']');
   3462   writer_->AddCharacter('}');
   3463   writer_->Finalize();
   3464 }
   3465 
   3466 
   3467 class HeapSnapshotJSONSerializerEnumerator {
   3468  public:
   3469   explicit HeapSnapshotJSONSerializerEnumerator(HeapSnapshotJSONSerializer* s)
   3470       : s_(s) {
   3471   }
   3472   void Apply(HeapEntry** entry) {
   3473     s_->GetNodeId(*entry);
   3474   }
   3475  private:
   3476   HeapSnapshotJSONSerializer* s_;
   3477 };
   3478 
   3479 void HeapSnapshotJSONSerializer::EnumerateNodes() {
   3480   GetNodeId(snapshot_->root());  // Make sure root gets the first id.
   3481   HeapSnapshotJSONSerializerEnumerator iter(this);
   3482   snapshot_->IterateEntries(&iter);
   3483 }
   3484 
   3485 
   3486 int HeapSnapshotJSONSerializer::GetNodeId(HeapEntry* entry) {
   3487   HashMap::Entry* cache_entry = nodes_.Lookup(entry, ObjectHash(entry), true);
   3488   if (cache_entry->value == NULL) {
   3489     cache_entry->value = reinterpret_cast<void*>(next_node_id_++);
   3490   }
   3491   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
   3492 }
   3493 
   3494 
   3495 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
   3496   HashMap::Entry* cache_entry = strings_.Lookup(
   3497       const_cast<char*>(s), ObjectHash(s), true);
   3498   if (cache_entry->value == NULL) {
   3499     cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
   3500   }
   3501   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
   3502 }
   3503 
   3504 
   3505 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge) {
   3506   // The buffer needs space for 3 ints, 3 commas and \0
   3507   static const int kBufferSize =
   3508       MaxDecimalDigitsIn<sizeof(int)>::kSigned * 3 + 3 + 1;  // NOLINT
   3509   EmbeddedVector<char, kBufferSize> buffer;
   3510   int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
   3511       || edge->type() == HeapGraphEdge::kHidden
   3512       || edge->type() == HeapGraphEdge::kWeak
   3513       ? edge->index() : GetStringId(edge->name());
   3514   STATIC_CHECK(sizeof(int) == sizeof(edge->type()));  // NOLINT
   3515   STATIC_CHECK(sizeof(int) == sizeof(edge_name_or_index));  // NOLINT
   3516   STATIC_CHECK(sizeof(int) == sizeof(GetNodeId(edge->to())));  // NOLINT
   3517   int result = OS::SNPrintF(buffer, ",%d,%d,%d",
   3518       edge->type(), edge_name_or_index, GetNodeId(edge->to()));
   3519   USE(result);
   3520   ASSERT(result != -1);
   3521   writer_->AddString(buffer.start());
   3522 }
   3523 
   3524 
   3525 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
   3526   // The buffer needs space for 6 ints, 1 uint32_t, 7 commas, \n and \0
   3527   static const int kBufferSize =
   3528       6 * MaxDecimalDigitsIn<sizeof(int)>::kSigned  // NOLINT
   3529       + MaxDecimalDigitsIn<sizeof(uint32_t)>::kUnsigned  // NOLINT
   3530       + 7 + 1 + 1;
   3531   EmbeddedVector<char, kBufferSize> buffer;
   3532   Vector<HeapGraphEdge> children = entry->children();
   3533   STATIC_CHECK(sizeof(int) == sizeof(entry->type()));  // NOLINT
   3534   STATIC_CHECK(sizeof(int) == sizeof(GetStringId(entry->name())));  // NOLINT
   3535   STATIC_CHECK(sizeof(unsigned) == sizeof(entry->id()));  // NOLINT
   3536   STATIC_CHECK(sizeof(int) == sizeof(entry->self_size()));  // NOLINT
   3537   STATIC_CHECK(sizeof(int) == sizeof(entry->retained_size()));  // NOLINT
   3538   STATIC_CHECK(sizeof(int) == sizeof(GetNodeId(entry->dominator())));  // NOLINT
   3539   STATIC_CHECK(sizeof(int) == sizeof(children.length()));  // NOLINT
   3540   int result = OS::SNPrintF(buffer, "\n,%d,%d,%u,%d,%d,%d,%d",
   3541       entry->type(),
   3542       GetStringId(entry->name()),
   3543       entry->id(),
   3544       entry->self_size(),
   3545       entry->retained_size(),
   3546       GetNodeId(entry->dominator()),
   3547       children.length());
   3548   USE(result);
   3549   ASSERT(result != -1);
   3550   writer_->AddString(buffer.start());
   3551   for (int i = 0; i < children.length(); ++i) {
   3552     SerializeEdge(&children[i]);
   3553     if (writer_->aborted()) return;
   3554   }
   3555 }
   3556 
   3557 
   3558 void HeapSnapshotJSONSerializer::SerializeNodes() {
   3559   // The first (zero) item of nodes array is an object describing node
   3560   // serialization layout.  We use a set of macros to improve
   3561   // readability.
   3562 #define JSON_A(s) "["s"]"
   3563 #define JSON_O(s) "{"s"}"
   3564 #define JSON_S(s) "\""s"\""
   3565   writer_->AddString(JSON_O(
   3566     JSON_S("fields") ":" JSON_A(
   3567         JSON_S("type")
   3568         "," JSON_S("name")
   3569         "," JSON_S("id")
   3570         "," JSON_S("self_size")
   3571         "," JSON_S("retained_size")
   3572         "," JSON_S("dominator")
   3573         "," JSON_S("children_count")
   3574         "," JSON_S("children"))
   3575     "," JSON_S("types") ":" JSON_A(
   3576         JSON_A(
   3577             JSON_S("hidden")
   3578             "," JSON_S("array")
   3579             "," JSON_S("string")
   3580             "," JSON_S("object")
   3581             "," JSON_S("code")
   3582             "," JSON_S("closure")
   3583             "," JSON_S("regexp")
   3584             "," JSON_S("number")
   3585             "," JSON_S("native")
   3586             "," JSON_S("synthetic"))
   3587         "," JSON_S("string")
   3588         "," JSON_S("number")
   3589         "," JSON_S("number")
   3590         "," JSON_S("number")
   3591         "," JSON_S("number")
   3592         "," JSON_S("number")
   3593         "," JSON_O(
   3594             JSON_S("fields") ":" JSON_A(
   3595                 JSON_S("type")
   3596                 "," JSON_S("name_or_index")
   3597                 "," JSON_S("to_node"))
   3598             "," JSON_S("types") ":" JSON_A(
   3599                 JSON_A(
   3600                     JSON_S("context")
   3601                     "," JSON_S("element")
   3602                     "," JSON_S("property")
   3603                     "," JSON_S("internal")
   3604                     "," JSON_S("hidden")
   3605                     "," JSON_S("shortcut")
   3606                     "," JSON_S("weak"))
   3607                 "," JSON_S("string_or_number")
   3608                 "," JSON_S("node"))))));
   3609 #undef JSON_S
   3610 #undef JSON_O
   3611 #undef JSON_A
   3612 
   3613   const int node_fields_count = 7;
   3614   // type,name,id,self_size,retained_size,dominator,children_count.
   3615   const int edge_fields_count = 3;  // type,name|index,to_node.
   3616   List<HashMap::Entry*> sorted_nodes;
   3617   SortHashMap(&nodes_, &sorted_nodes);
   3618   // Rewrite node ids, so they refer to actual array positions.
   3619   if (sorted_nodes.length() > 1) {
   3620     // Nodes start from array index 1.
   3621     int prev_value = 1;
   3622     sorted_nodes[0]->value = reinterpret_cast<void*>(prev_value);
   3623     for (int i = 1; i < sorted_nodes.length(); ++i) {
   3624       HeapEntry* prev_heap_entry =
   3625           reinterpret_cast<HeapEntry*>(sorted_nodes[i-1]->key);
   3626       prev_value += node_fields_count +
   3627           prev_heap_entry->children().length() * edge_fields_count;
   3628       sorted_nodes[i]->value = reinterpret_cast<void*>(prev_value);
   3629     }
   3630   }
   3631   for (int i = 0; i < sorted_nodes.length(); ++i) {
   3632     SerializeNode(reinterpret_cast<HeapEntry*>(sorted_nodes[i]->key));
   3633     if (writer_->aborted()) return;
   3634   }
   3635 }
   3636 
   3637 
   3638 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
   3639   writer_->AddString("\"title\":\"");
   3640   writer_->AddString(snapshot_->title());
   3641   writer_->AddString("\"");
   3642   writer_->AddString(",\"uid\":");
   3643   writer_->AddNumber(snapshot_->uid());
   3644 }
   3645 
   3646 
   3647 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
   3648   static const char hex_chars[] = "0123456789ABCDEF";
   3649   w->AddString("\\u");
   3650   w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
   3651   w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
   3652   w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
   3653   w->AddCharacter(hex_chars[u & 0xf]);
   3654 }
   3655 
   3656 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
   3657   writer_->AddCharacter('\n');
   3658   writer_->AddCharacter('\"');
   3659   for ( ; *s != '\0'; ++s) {
   3660     switch (*s) {
   3661       case '\b':
   3662         writer_->AddString("\\b");
   3663         continue;
   3664       case '\f':
   3665         writer_->AddString("\\f");
   3666         continue;
   3667       case '\n':
   3668         writer_->AddString("\\n");
   3669         continue;
   3670       case '\r':
   3671         writer_->AddString("\\r");
   3672         continue;
   3673       case '\t':
   3674         writer_->AddString("\\t");
   3675         continue;
   3676       case '\"':
   3677       case '\\':
   3678         writer_->AddCharacter('\\');
   3679         writer_->AddCharacter(*s);
   3680         continue;
   3681       default:
   3682         if (*s > 31 && *s < 128) {
   3683           writer_->AddCharacter(*s);
   3684         } else if (*s <= 31) {
   3685           // Special character with no dedicated literal.
   3686           WriteUChar(writer_, *s);
   3687         } else {
   3688           // Convert UTF-8 into \u UTF-16 literal.
   3689           unsigned length = 1, cursor = 0;
   3690           for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
   3691           unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
   3692           if (c != unibrow::Utf8::kBadChar) {
   3693             WriteUChar(writer_, c);
   3694             ASSERT(cursor != 0);
   3695             s += cursor - 1;
   3696           } else {
   3697             writer_->AddCharacter('?');
   3698           }
   3699         }
   3700     }
   3701   }
   3702   writer_->AddCharacter('\"');
   3703 }
   3704 
   3705 
   3706 void HeapSnapshotJSONSerializer::SerializeStrings() {
   3707   List<HashMap::Entry*> sorted_strings;
   3708   SortHashMap(&strings_, &sorted_strings);
   3709   writer_->AddString("\"<dummy>\"");
   3710   for (int i = 0; i < sorted_strings.length(); ++i) {
   3711     writer_->AddCharacter(',');
   3712     SerializeString(
   3713         reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
   3714     if (writer_->aborted()) return;
   3715   }
   3716 }
   3717 
   3718 
   3719 template<typename T>
   3720 inline static int SortUsingEntryValue(const T* x, const T* y) {
   3721   uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
   3722   uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
   3723   if (x_uint > y_uint) {
   3724     return 1;
   3725   } else if (x_uint == y_uint) {
   3726     return 0;
   3727   } else {
   3728     return -1;
   3729   }
   3730 }
   3731 
   3732 
   3733 void HeapSnapshotJSONSerializer::SortHashMap(
   3734     HashMap* map, List<HashMap::Entry*>* sorted_entries) {
   3735   for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
   3736     sorted_entries->Add(p);
   3737   sorted_entries->Sort(SortUsingEntryValue);
   3738 }
   3739 
   3740 } }  // namespace v8::internal
   3741