1 // Copyright 2009-2010 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_HEAP_PROFILER_H_ 29 #define V8_HEAP_PROFILER_H_ 30 31 #include "isolate.h" 32 #include "zone-inl.h" 33 34 namespace v8 { 35 namespace internal { 36 37 #ifdef ENABLE_LOGGING_AND_PROFILING 38 39 class HeapSnapshot; 40 class HeapSnapshotsCollection; 41 42 #define HEAP_PROFILE(heap, call) \ 43 do { \ 44 v8::internal::HeapProfiler* profiler = heap->isolate()->heap_profiler(); \ 45 if (profiler != NULL && profiler->is_profiling()) { \ 46 profiler->call; \ 47 } \ 48 } while (false) 49 #else 50 #define HEAP_PROFILE(heap, call) ((void) 0) 51 #endif // ENABLE_LOGGING_AND_PROFILING 52 53 // The HeapProfiler writes data to the log files, which can be postprocessed 54 // to generate .hp files for use by the GHC/Valgrind tool hp2ps. 55 class HeapProfiler { 56 public: 57 static void Setup(); 58 static void TearDown(); 59 60 #ifdef ENABLE_LOGGING_AND_PROFILING 61 static HeapSnapshot* TakeSnapshot(const char* name, 62 int type, 63 v8::ActivityControl* control); 64 static HeapSnapshot* TakeSnapshot(String* name, 65 int type, 66 v8::ActivityControl* control); 67 static int GetSnapshotsCount(); 68 static HeapSnapshot* GetSnapshot(int index); 69 static HeapSnapshot* FindSnapshot(unsigned uid); 70 static void DeleteAllSnapshots(); 71 72 void ObjectMoveEvent(Address from, Address to); 73 74 void DefineWrapperClass( 75 uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback); 76 77 v8::RetainedObjectInfo* ExecuteWrapperClassCallback(uint16_t class_id, 78 Object** wrapper); 79 INLINE(bool is_profiling()) { 80 return snapshots_->is_tracking_objects(); 81 } 82 83 // Obsolete interface. 84 // Write a single heap sample to the log file. 85 static void WriteSample(); 86 87 private: 88 HeapProfiler(); 89 ~HeapProfiler(); 90 HeapSnapshot* TakeSnapshotImpl(const char* name, 91 int type, 92 v8::ActivityControl* control); 93 HeapSnapshot* TakeSnapshotImpl(String* name, 94 int type, 95 v8::ActivityControl* control); 96 void ResetSnapshots(); 97 98 HeapSnapshotsCollection* snapshots_; 99 unsigned next_snapshot_uid_; 100 List<v8::HeapProfiler::WrapperInfoCallback> wrapper_callbacks_; 101 102 #endif // ENABLE_LOGGING_AND_PROFILING 103 }; 104 105 106 #ifdef ENABLE_LOGGING_AND_PROFILING 107 108 // JSObjectsCluster describes a group of JS objects that are 109 // considered equivalent in terms of a particular profile. 110 class JSObjectsCluster BASE_EMBEDDED { 111 public: 112 // These special cases are used in retainer profile. 113 enum SpecialCase { 114 ROOTS = 1, 115 GLOBAL_PROPERTY = 2, 116 CODE = 3, 117 SELF = 100 // This case is used in ClustersCoarser only. 118 }; 119 120 JSObjectsCluster() : constructor_(NULL), instance_(NULL) {} 121 explicit JSObjectsCluster(String* constructor) 122 : constructor_(constructor), instance_(NULL) {} 123 explicit JSObjectsCluster(SpecialCase special) 124 : constructor_(FromSpecialCase(special)), instance_(NULL) {} 125 JSObjectsCluster(String* constructor, Object* instance) 126 : constructor_(constructor), instance_(instance) {} 127 128 static int CompareConstructors(const JSObjectsCluster& a, 129 const JSObjectsCluster& b) { 130 // Strings are unique, so it is sufficient to compare their pointers. 131 return a.constructor_ == b.constructor_ ? 0 132 : (a.constructor_ < b.constructor_ ? -1 : 1); 133 } 134 static int Compare(const JSObjectsCluster& a, const JSObjectsCluster& b) { 135 // Strings are unique, so it is sufficient to compare their pointers. 136 const int cons_cmp = CompareConstructors(a, b); 137 return cons_cmp == 0 ? 138 (a.instance_ == b.instance_ ? 0 : (a.instance_ < b.instance_ ? -1 : 1)) 139 : cons_cmp; 140 } 141 static int Compare(const JSObjectsCluster* a, const JSObjectsCluster* b) { 142 return Compare(*a, *b); 143 } 144 145 bool is_null() const { return constructor_ == NULL; } 146 bool can_be_coarsed() const { return instance_ != NULL; } 147 String* constructor() const { return constructor_; } 148 Object* instance() const { return instance_; } 149 150 const char* GetSpecialCaseName() const; 151 void Print(StringStream* accumulator) const; 152 // Allows null clusters to be printed. 153 void DebugPrint(StringStream* accumulator) const; 154 155 private: 156 static String* FromSpecialCase(SpecialCase special) { 157 // We use symbols that are illegal JS identifiers to identify special cases. 158 // Their actual value is irrelevant for us. 159 switch (special) { 160 case ROOTS: return HEAP->result_symbol(); 161 case GLOBAL_PROPERTY: return HEAP->code_symbol(); 162 case CODE: return HEAP->arguments_shadow_symbol(); 163 case SELF: return HEAP->catch_var_symbol(); 164 default: 165 UNREACHABLE(); 166 return NULL; 167 } 168 } 169 170 String* constructor_; 171 Object* instance_; 172 }; 173 174 175 struct JSObjectsClusterTreeConfig { 176 typedef JSObjectsCluster Key; 177 typedef NumberAndSizeInfo Value; 178 static const Key kNoKey; 179 static const Value kNoValue; 180 static int Compare(const Key& a, const Key& b) { 181 return Key::Compare(a, b); 182 } 183 }; 184 typedef ZoneSplayTree<JSObjectsClusterTreeConfig> JSObjectsClusterTree; 185 186 187 // ConstructorHeapProfile is responsible for gathering and logging 188 // "constructor profile" of JS objects allocated on heap. 189 // It is run during garbage collection cycle, thus it doesn't need 190 // to use handles. 191 class ConstructorHeapProfile BASE_EMBEDDED { 192 public: 193 ConstructorHeapProfile(); 194 virtual ~ConstructorHeapProfile() {} 195 void CollectStats(HeapObject* obj); 196 void PrintStats(); 197 198 template<class Callback> 199 void ForEach(Callback* callback) { js_objects_info_tree_.ForEach(callback); } 200 // Used by ZoneSplayTree::ForEach. Made virtual to allow overriding in tests. 201 virtual void Call(const JSObjectsCluster& cluster, 202 const NumberAndSizeInfo& number_and_size); 203 204 private: 205 ZoneScope zscope_; 206 JSObjectsClusterTree js_objects_info_tree_; 207 }; 208 209 210 // JSObjectsRetainerTree is used to represent retainer graphs using 211 // adjacency list form: 212 // 213 // Cluster -> (Cluster -> NumberAndSizeInfo) 214 // 215 // Subordinate splay trees are stored by pointer. They are zone-allocated, 216 // so it isn't needed to manage their lifetime. 217 // 218 struct JSObjectsRetainerTreeConfig { 219 typedef JSObjectsCluster Key; 220 typedef JSObjectsClusterTree* Value; 221 static const Key kNoKey; 222 static const Value kNoValue; 223 static int Compare(const Key& a, const Key& b) { 224 return Key::Compare(a, b); 225 } 226 }; 227 typedef ZoneSplayTree<JSObjectsRetainerTreeConfig> JSObjectsRetainerTree; 228 229 230 class ClustersCoarser BASE_EMBEDDED { 231 public: 232 ClustersCoarser(); 233 234 // Processes a given retainer graph. 235 void Process(JSObjectsRetainerTree* tree); 236 237 // Returns an equivalent cluster (can be the cluster itself). 238 // If the given cluster doesn't have an equivalent, returns null cluster. 239 JSObjectsCluster GetCoarseEquivalent(const JSObjectsCluster& cluster); 240 // Returns whether a cluster can be substitued with an equivalent and thus, 241 // skipped in some cases. 242 bool HasAnEquivalent(const JSObjectsCluster& cluster); 243 244 // Used by JSObjectsRetainerTree::ForEach. 245 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree); 246 void Call(const JSObjectsCluster& cluster, 247 const NumberAndSizeInfo& number_and_size); 248 249 private: 250 // Stores a list of back references for a cluster. 251 struct ClusterBackRefs { 252 explicit ClusterBackRefs(const JSObjectsCluster& cluster_); 253 ClusterBackRefs(const ClusterBackRefs& src); 254 ClusterBackRefs& operator=(const ClusterBackRefs& src); 255 256 static int Compare(const ClusterBackRefs& a, const ClusterBackRefs& b); 257 void SortRefs() { refs.Sort(JSObjectsCluster::Compare); } 258 static void SortRefsIterator(ClusterBackRefs* ref) { ref->SortRefs(); } 259 260 JSObjectsCluster cluster; 261 ZoneList<JSObjectsCluster> refs; 262 }; 263 typedef ZoneList<ClusterBackRefs> SimilarityList; 264 265 // A tree for storing a list of equivalents for a cluster. 266 struct ClusterEqualityConfig { 267 typedef JSObjectsCluster Key; 268 typedef JSObjectsCluster Value; 269 static const Key kNoKey; 270 static const Value kNoValue; 271 static int Compare(const Key& a, const Key& b) { 272 return Key::Compare(a, b); 273 } 274 }; 275 typedef ZoneSplayTree<ClusterEqualityConfig> EqualityTree; 276 277 static int ClusterBackRefsCmp(const ClusterBackRefs* a, 278 const ClusterBackRefs* b) { 279 return ClusterBackRefs::Compare(*a, *b); 280 } 281 int DoProcess(JSObjectsRetainerTree* tree); 282 int FillEqualityTree(); 283 284 static const int kInitialBackrefsListCapacity = 2; 285 static const int kInitialSimilarityListCapacity = 2000; 286 // Number of passes for finding equivalents. Limits the length of paths 287 // that can be considered equivalent. 288 static const int kMaxPassesCount = 10; 289 290 ZoneScope zscope_; 291 SimilarityList sim_list_; 292 EqualityTree eq_tree_; 293 ClusterBackRefs* current_pair_; 294 JSObjectsRetainerTree* current_set_; 295 const JSObjectsCluster* self_; 296 }; 297 298 299 // RetainerHeapProfile is responsible for gathering and logging 300 // "retainer profile" of JS objects allocated on heap. 301 // It is run during garbage collection cycle, thus it doesn't need 302 // to use handles. 303 class RetainerTreeAggregator; 304 305 class RetainerHeapProfile BASE_EMBEDDED { 306 public: 307 class Printer { 308 public: 309 virtual ~Printer() {} 310 virtual void PrintRetainers(const JSObjectsCluster& cluster, 311 const StringStream& retainers) = 0; 312 }; 313 314 RetainerHeapProfile(); 315 ~RetainerHeapProfile(); 316 317 RetainerTreeAggregator* aggregator() { return aggregator_; } 318 ClustersCoarser* coarser() { return &coarser_; } 319 JSObjectsRetainerTree* retainers_tree() { return &retainers_tree_; } 320 321 void CollectStats(HeapObject* obj); 322 void CoarseAndAggregate(); 323 void PrintStats(); 324 void DebugPrintStats(Printer* printer); 325 void StoreReference(const JSObjectsCluster& cluster, HeapObject* ref); 326 327 private: 328 ZoneScope zscope_; 329 JSObjectsRetainerTree retainers_tree_; 330 ClustersCoarser coarser_; 331 RetainerTreeAggregator* aggregator_; 332 }; 333 334 335 class AggregatedHeapSnapshot { 336 public: 337 AggregatedHeapSnapshot(); 338 ~AggregatedHeapSnapshot(); 339 340 HistogramInfo* info() { return info_; } 341 ConstructorHeapProfile* js_cons_profile() { return &js_cons_profile_; } 342 RetainerHeapProfile* js_retainer_profile() { return &js_retainer_profile_; } 343 344 private: 345 HistogramInfo* info_; 346 ConstructorHeapProfile js_cons_profile_; 347 RetainerHeapProfile js_retainer_profile_; 348 }; 349 350 351 class HeapEntriesMap; 352 class HeapEntriesAllocator; 353 354 class AggregatedHeapSnapshotGenerator { 355 public: 356 explicit AggregatedHeapSnapshotGenerator(AggregatedHeapSnapshot* snapshot); 357 void GenerateSnapshot(); 358 void FillHeapSnapshot(HeapSnapshot* snapshot); 359 360 static const int kAllStringsType = LAST_TYPE + 1; 361 362 private: 363 void CalculateStringsStats(); 364 void CollectStats(HeapObject* obj); 365 template<class Iterator> 366 void IterateRetainers( 367 HeapEntriesAllocator* allocator, HeapEntriesMap* entries_map); 368 369 AggregatedHeapSnapshot* agg_snapshot_; 370 }; 371 372 373 class ProducerHeapProfile { 374 public: 375 void Setup(); 376 void RecordJSObjectAllocation(Object* obj) { 377 if (FLAG_log_producers) DoRecordJSObjectAllocation(obj); 378 } 379 380 private: 381 ProducerHeapProfile() : can_log_(false) { } 382 383 void DoRecordJSObjectAllocation(Object* obj); 384 Isolate* isolate_; 385 bool can_log_; 386 387 friend class Isolate; 388 389 DISALLOW_COPY_AND_ASSIGN(ProducerHeapProfile); 390 }; 391 392 #endif // ENABLE_LOGGING_AND_PROFILING 393 394 } } // namespace v8::internal 395 396 #endif // V8_HEAP_PROFILER_H_ 397