1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COUNTERS_H_ 6 #define V8_COUNTERS_H_ 7 8 #include "include/v8.h" 9 #include "src/allocation.h" 10 #include "src/base/atomic-utils.h" 11 #include "src/base/platform/elapsed-timer.h" 12 #include "src/base/platform/time.h" 13 #include "src/builtins/builtins.h" 14 #include "src/globals.h" 15 #include "src/isolate.h" 16 #include "src/objects.h" 17 #include "src/runtime/runtime.h" 18 #include "src/tracing/trace-event.h" 19 #include "src/tracing/traced-value.h" 20 #include "src/tracing/tracing-category-observer.h" 21 22 namespace v8 { 23 namespace internal { 24 25 // StatsCounters is an interface for plugging into external 26 // counters for monitoring. Counters can be looked up and 27 // manipulated by name. 28 29 class StatsTable { 30 public: 31 // Register an application-defined function where 32 // counters can be looked up. 33 void SetCounterFunction(CounterLookupCallback f) { 34 lookup_function_ = f; 35 } 36 37 // Register an application-defined function to create 38 // a histogram for passing to the AddHistogramSample function 39 void SetCreateHistogramFunction(CreateHistogramCallback f) { 40 create_histogram_function_ = f; 41 } 42 43 // Register an application-defined function to add a sample 44 // to a histogram created with CreateHistogram function 45 void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) { 46 add_histogram_sample_function_ = f; 47 } 48 49 bool HasCounterFunction() const { 50 return lookup_function_ != NULL; 51 } 52 53 // Lookup the location of a counter by name. If the lookup 54 // is successful, returns a non-NULL pointer for writing the 55 // value of the counter. Each thread calling this function 56 // may receive a different location to store it's counter. 57 // The return value must not be cached and re-used across 58 // threads, although a single thread is free to cache it. 59 int* FindLocation(const char* name) { 60 if (!lookup_function_) return NULL; 61 return lookup_function_(name); 62 } 63 64 // Create a histogram by name. If the create is successful, 65 // returns a non-NULL pointer for use with AddHistogramSample 66 // function. min and max define the expected minimum and maximum 67 // sample values. buckets is the maximum number of buckets 68 // that the samples will be grouped into. 69 void* CreateHistogram(const char* name, 70 int min, 71 int max, 72 size_t buckets) { 73 if (!create_histogram_function_) return NULL; 74 return create_histogram_function_(name, min, max, buckets); 75 } 76 77 // Add a sample to a histogram created with the CreateHistogram 78 // function. 79 void AddHistogramSample(void* histogram, int sample) { 80 if (!add_histogram_sample_function_) return; 81 return add_histogram_sample_function_(histogram, sample); 82 } 83 84 private: 85 StatsTable(); 86 87 CounterLookupCallback lookup_function_; 88 CreateHistogramCallback create_histogram_function_; 89 AddHistogramSampleCallback add_histogram_sample_function_; 90 91 friend class Isolate; 92 93 DISALLOW_COPY_AND_ASSIGN(StatsTable); 94 }; 95 96 // StatsCounters are dynamically created values which can be tracked in 97 // the StatsTable. They are designed to be lightweight to create and 98 // easy to use. 99 // 100 // Internally, a counter represents a value in a row of a StatsTable. 101 // The row has a 32bit value for each process/thread in the table and also 102 // a name (stored in the table metadata). Since the storage location can be 103 // thread-specific, this class cannot be shared across threads. 104 class StatsCounter { 105 public: 106 StatsCounter() { } 107 explicit StatsCounter(Isolate* isolate, const char* name) 108 : isolate_(isolate), name_(name), ptr_(NULL), lookup_done_(false) { } 109 110 // Sets the counter to a specific value. 111 void Set(int value) { 112 int* loc = GetPtr(); 113 if (loc) *loc = value; 114 } 115 116 // Increments the counter. 117 void Increment() { 118 int* loc = GetPtr(); 119 if (loc) (*loc)++; 120 } 121 122 void Increment(int value) { 123 int* loc = GetPtr(); 124 if (loc) 125 (*loc) += value; 126 } 127 128 // Decrements the counter. 129 void Decrement() { 130 int* loc = GetPtr(); 131 if (loc) (*loc)--; 132 } 133 134 void Decrement(int value) { 135 int* loc = GetPtr(); 136 if (loc) (*loc) -= value; 137 } 138 139 // Is this counter enabled? 140 // Returns false if table is full. 141 bool Enabled() { 142 return GetPtr() != NULL; 143 } 144 145 // Get the internal pointer to the counter. This is used 146 // by the code generator to emit code that manipulates a 147 // given counter without calling the runtime system. 148 int* GetInternalPointer() { 149 int* loc = GetPtr(); 150 DCHECK(loc != NULL); 151 return loc; 152 } 153 154 // Reset the cached internal pointer. 155 void Reset() { lookup_done_ = false; } 156 157 protected: 158 // Returns the cached address of this counter location. 159 int* GetPtr() { 160 if (lookup_done_) return ptr_; 161 lookup_done_ = true; 162 ptr_ = FindLocationInStatsTable(); 163 return ptr_; 164 } 165 166 private: 167 int* FindLocationInStatsTable() const; 168 169 Isolate* isolate_; 170 const char* name_; 171 int* ptr_; 172 bool lookup_done_; 173 }; 174 175 // A Histogram represents a dynamically created histogram in the StatsTable. 176 // It will be registered with the histogram system on first use. 177 class Histogram { 178 public: 179 Histogram() { } 180 Histogram(const char* name, 181 int min, 182 int max, 183 int num_buckets, 184 Isolate* isolate) 185 : name_(name), 186 min_(min), 187 max_(max), 188 num_buckets_(num_buckets), 189 histogram_(NULL), 190 lookup_done_(false), 191 isolate_(isolate) { } 192 193 // Add a single sample to this histogram. 194 void AddSample(int sample); 195 196 // Returns true if this histogram is enabled. 197 bool Enabled() { 198 return GetHistogram() != NULL; 199 } 200 201 // Reset the cached internal pointer. 202 void Reset() { 203 lookup_done_ = false; 204 } 205 206 const char* name() { return name_; } 207 208 protected: 209 // Returns the handle to the histogram. 210 void* GetHistogram() { 211 if (!lookup_done_) { 212 lookup_done_ = true; 213 histogram_ = CreateHistogram(); 214 } 215 return histogram_; 216 } 217 218 Isolate* isolate() const { return isolate_; } 219 220 private: 221 void* CreateHistogram() const; 222 223 const char* name_; 224 int min_; 225 int max_; 226 int num_buckets_; 227 void* histogram_; 228 bool lookup_done_; 229 Isolate* isolate_; 230 }; 231 232 // A HistogramTimer allows distributions of results to be created. 233 class HistogramTimer : public Histogram { 234 public: 235 enum Resolution { 236 MILLISECOND, 237 MICROSECOND 238 }; 239 240 HistogramTimer() {} 241 HistogramTimer(const char* name, int min, int max, Resolution resolution, 242 int num_buckets, Isolate* isolate) 243 : Histogram(name, min, max, num_buckets, isolate), 244 resolution_(resolution) {} 245 246 // Start the timer. 247 void Start(); 248 249 // Stop the timer and record the results. 250 void Stop(); 251 252 // Returns true if the timer is running. 253 bool Running() { 254 return Enabled() && timer_.IsStarted(); 255 } 256 257 // TODO(bmeurer): Remove this when HistogramTimerScope is fixed. 258 #ifdef DEBUG 259 base::ElapsedTimer* timer() { return &timer_; } 260 #endif 261 262 private: 263 base::ElapsedTimer timer_; 264 Resolution resolution_; 265 }; 266 267 // Helper class for scoping a HistogramTimer. 268 // TODO(bmeurer): The ifdeffery is an ugly hack around the fact that the 269 // Parser is currently reentrant (when it throws an error, we call back 270 // into JavaScript and all bets are off), but ElapsedTimer is not 271 // reentry-safe. Fix this properly and remove |allow_nesting|. 272 class HistogramTimerScope BASE_EMBEDDED { 273 public: 274 explicit HistogramTimerScope(HistogramTimer* timer, 275 bool allow_nesting = false) 276 #ifdef DEBUG 277 : timer_(timer), 278 skipped_timer_start_(false) { 279 if (timer_->timer()->IsStarted() && allow_nesting) { 280 skipped_timer_start_ = true; 281 } else { 282 timer_->Start(); 283 } 284 } 285 #else 286 : timer_(timer) { 287 timer_->Start(); 288 } 289 #endif 290 ~HistogramTimerScope() { 291 #ifdef DEBUG 292 if (!skipped_timer_start_) { 293 timer_->Stop(); 294 } 295 #else 296 timer_->Stop(); 297 #endif 298 } 299 300 private: 301 HistogramTimer* timer_; 302 #ifdef DEBUG 303 bool skipped_timer_start_; 304 #endif 305 }; 306 307 308 // A histogram timer that can aggregate events within a larger scope. 309 // 310 // Intended use of this timer is to have an outer (aggregating) and an inner 311 // (to be aggregated) scope, where the inner scope measure the time of events, 312 // and all those inner scope measurements will be summed up by the outer scope. 313 // An example use might be to aggregate the time spent in lazy compilation 314 // while running a script. 315 // 316 // Helpers: 317 // - AggregatingHistogramTimerScope, the "outer" scope within which 318 // times will be summed up. 319 // - AggregatedHistogramTimerScope, the "inner" scope which defines the 320 // events to be timed. 321 class AggregatableHistogramTimer : public Histogram { 322 public: 323 AggregatableHistogramTimer() {} 324 AggregatableHistogramTimer(const char* name, int min, int max, 325 int num_buckets, Isolate* isolate) 326 : Histogram(name, min, max, num_buckets, isolate) {} 327 328 // Start/stop the "outer" scope. 329 void Start() { time_ = base::TimeDelta(); } 330 void Stop() { AddSample(static_cast<int>(time_.InMicroseconds())); } 331 332 // Add a time value ("inner" scope). 333 void Add(base::TimeDelta other) { time_ += other; } 334 335 private: 336 base::TimeDelta time_; 337 }; 338 339 // A helper class for use with AggregatableHistogramTimer. This is the 340 // // outer-most timer scope used with an AggregatableHistogramTimer. It will 341 // // aggregate the information from the inner AggregatedHistogramTimerScope. 342 class AggregatingHistogramTimerScope { 343 public: 344 explicit AggregatingHistogramTimerScope(AggregatableHistogramTimer* histogram) 345 : histogram_(histogram) { 346 histogram_->Start(); 347 } 348 ~AggregatingHistogramTimerScope() { histogram_->Stop(); } 349 350 private: 351 AggregatableHistogramTimer* histogram_; 352 }; 353 354 // A helper class for use with AggregatableHistogramTimer, the "inner" scope 355 // // which defines the events to be timed. 356 class AggregatedHistogramTimerScope { 357 public: 358 explicit AggregatedHistogramTimerScope(AggregatableHistogramTimer* histogram) 359 : histogram_(histogram) { 360 timer_.Start(); 361 } 362 ~AggregatedHistogramTimerScope() { histogram_->Add(timer_.Elapsed()); } 363 364 private: 365 base::ElapsedTimer timer_; 366 AggregatableHistogramTimer* histogram_; 367 }; 368 369 370 // AggretatedMemoryHistogram collects (time, value) sample pairs and turns 371 // them into time-uniform samples for the backing historgram, such that the 372 // backing histogram receives one sample every T ms, where the T is controlled 373 // by the FLAG_histogram_interval. 374 // 375 // More formally: let F be a real-valued function that maps time to sample 376 // values. We define F as a linear interpolation between adjacent samples. For 377 // each time interval [x; x + T) the backing histogram gets one sample value 378 // that is the average of F(t) in the interval. 379 template <typename Histogram> 380 class AggregatedMemoryHistogram { 381 public: 382 AggregatedMemoryHistogram() 383 : is_initialized_(false), 384 start_ms_(0.0), 385 last_ms_(0.0), 386 aggregate_value_(0.0), 387 last_value_(0.0), 388 backing_histogram_(NULL) {} 389 390 explicit AggregatedMemoryHistogram(Histogram* backing_histogram) 391 : AggregatedMemoryHistogram() { 392 backing_histogram_ = backing_histogram; 393 } 394 395 // Invariants that hold before and after AddSample if 396 // is_initialized_ is true: 397 // 398 // 1) For we processed samples that came in before start_ms_ and sent the 399 // corresponding aggregated samples to backing histogram. 400 // 2) (last_ms_, last_value_) is the last received sample. 401 // 3) last_ms_ < start_ms_ + FLAG_histogram_interval. 402 // 4) aggregate_value_ is the average of the function that is constructed by 403 // linearly interpolating samples received between start_ms_ and last_ms_. 404 void AddSample(double current_ms, double current_value); 405 406 private: 407 double Aggregate(double current_ms, double current_value); 408 bool is_initialized_; 409 double start_ms_; 410 double last_ms_; 411 double aggregate_value_; 412 double last_value_; 413 Histogram* backing_histogram_; 414 }; 415 416 417 template <typename Histogram> 418 void AggregatedMemoryHistogram<Histogram>::AddSample(double current_ms, 419 double current_value) { 420 if (!is_initialized_) { 421 aggregate_value_ = current_value; 422 start_ms_ = current_ms; 423 last_value_ = current_value; 424 last_ms_ = current_ms; 425 is_initialized_ = true; 426 } else { 427 const double kEpsilon = 1e-6; 428 const int kMaxSamples = 1000; 429 if (current_ms < last_ms_ + kEpsilon) { 430 // Two samples have the same time, remember the last one. 431 last_value_ = current_value; 432 } else { 433 double sample_interval_ms = FLAG_histogram_interval; 434 double end_ms = start_ms_ + sample_interval_ms; 435 if (end_ms <= current_ms + kEpsilon) { 436 // Linearly interpolate between the last_ms_ and the current_ms. 437 double slope = (current_value - last_value_) / (current_ms - last_ms_); 438 int i; 439 // Send aggregated samples to the backing histogram from the start_ms 440 // to the current_ms. 441 for (i = 0; i < kMaxSamples && end_ms <= current_ms + kEpsilon; i++) { 442 double end_value = last_value_ + (end_ms - last_ms_) * slope; 443 double sample_value; 444 if (i == 0) { 445 // Take aggregate_value_ into account. 446 sample_value = Aggregate(end_ms, end_value); 447 } else { 448 // There is no aggregate_value_ for i > 0. 449 sample_value = (last_value_ + end_value) / 2; 450 } 451 backing_histogram_->AddSample(static_cast<int>(sample_value + 0.5)); 452 last_value_ = end_value; 453 last_ms_ = end_ms; 454 end_ms += sample_interval_ms; 455 } 456 if (i == kMaxSamples) { 457 // We hit the sample limit, ignore the remaining samples. 458 aggregate_value_ = current_value; 459 start_ms_ = current_ms; 460 } else { 461 aggregate_value_ = last_value_; 462 start_ms_ = last_ms_; 463 } 464 } 465 aggregate_value_ = current_ms > start_ms_ + kEpsilon 466 ? Aggregate(current_ms, current_value) 467 : aggregate_value_; 468 last_value_ = current_value; 469 last_ms_ = current_ms; 470 } 471 } 472 } 473 474 475 template <typename Histogram> 476 double AggregatedMemoryHistogram<Histogram>::Aggregate(double current_ms, 477 double current_value) { 478 double interval_ms = current_ms - start_ms_; 479 double value = (current_value + last_value_) / 2; 480 // The aggregate_value_ is the average for [start_ms_; last_ms_]. 481 // The value is the average for [last_ms_; current_ms]. 482 // Return the weighted average of the aggregate_value_ and the value. 483 return aggregate_value_ * ((last_ms_ - start_ms_) / interval_ms) + 484 value * ((current_ms - last_ms_) / interval_ms); 485 } 486 487 class RuntimeCallCounter final { 488 public: 489 explicit RuntimeCallCounter(const char* name) : name_(name) {} 490 V8_NOINLINE void Reset(); 491 V8_NOINLINE void Dump(v8::tracing::TracedValue* value); 492 void Add(RuntimeCallCounter* other); 493 494 const char* name() const { return name_; } 495 int64_t count() const { return count_; } 496 base::TimeDelta time() const { return time_; } 497 void Increment() { count_++; } 498 void Add(base::TimeDelta delta) { time_ += delta; } 499 500 private: 501 const char* name_; 502 int64_t count_ = 0; 503 base::TimeDelta time_; 504 }; 505 506 // RuntimeCallTimer is used to keep track of the stack of currently active 507 // timers used for properly measuring the own time of a RuntimeCallCounter. 508 class RuntimeCallTimer final { 509 public: 510 RuntimeCallCounter* counter() { return counter_; } 511 void set_counter(RuntimeCallCounter* counter) { counter_ = counter; } 512 RuntimeCallTimer* parent() const { return parent_.Value(); } 513 void set_parent(RuntimeCallTimer* timer) { parent_.SetValue(timer); } 514 const char* name() const { return counter_->name(); } 515 516 inline bool IsStarted(); 517 518 inline void Start(RuntimeCallCounter* counter, RuntimeCallTimer* parent); 519 void Snapshot(); 520 inline RuntimeCallTimer* Stop(); 521 522 private: 523 inline void Pause(base::TimeTicks now); 524 inline void Resume(base::TimeTicks now); 525 inline void CommitTimeToCounter(); 526 inline base::TimeTicks Now(); 527 528 RuntimeCallCounter* counter_ = nullptr; 529 base::AtomicValue<RuntimeCallTimer*> parent_; 530 base::TimeTicks start_ticks_; 531 base::TimeDelta elapsed_; 532 }; 533 534 #define FOR_EACH_API_COUNTER(V) \ 535 V(ArrayBuffer_Cast) \ 536 V(ArrayBuffer_Neuter) \ 537 V(ArrayBuffer_New) \ 538 V(Array_CloneElementAt) \ 539 V(Array_New) \ 540 V(BooleanObject_BooleanValue) \ 541 V(BooleanObject_New) \ 542 V(Context_New) \ 543 V(Context_NewRemoteContext) \ 544 V(DataView_New) \ 545 V(Date_DateTimeConfigurationChangeNotification) \ 546 V(Date_New) \ 547 V(Date_NumberValue) \ 548 V(Debug_Call) \ 549 V(Error_New) \ 550 V(External_New) \ 551 V(Float32Array_New) \ 552 V(Float64Array_New) \ 553 V(Function_Call) \ 554 V(Function_New) \ 555 V(Function_NewInstance) \ 556 V(FunctionTemplate_GetFunction) \ 557 V(FunctionTemplate_New) \ 558 V(FunctionTemplate_NewRemoteInstance) \ 559 V(FunctionTemplate_NewWithCache) \ 560 V(FunctionTemplate_NewWithFastHandler) \ 561 V(Int16Array_New) \ 562 V(Int32Array_New) \ 563 V(Int8Array_New) \ 564 V(JSON_Parse) \ 565 V(JSON_Stringify) \ 566 V(Map_AsArray) \ 567 V(Map_Clear) \ 568 V(Map_Delete) \ 569 V(Map_Get) \ 570 V(Map_Has) \ 571 V(Map_New) \ 572 V(Map_Set) \ 573 V(Message_GetEndColumn) \ 574 V(Message_GetLineNumber) \ 575 V(Message_GetSourceLine) \ 576 V(Message_GetStartColumn) \ 577 V(Module_Evaluate) \ 578 V(Module_Instantiate) \ 579 V(NumberObject_New) \ 580 V(NumberObject_NumberValue) \ 581 V(Object_CallAsConstructor) \ 582 V(Object_CallAsFunction) \ 583 V(Object_CreateDataProperty) \ 584 V(Object_DefineOwnProperty) \ 585 V(Object_DefineProperty) \ 586 V(Object_Delete) \ 587 V(Object_DeleteProperty) \ 588 V(Object_ForceSet) \ 589 V(Object_Get) \ 590 V(Object_GetOwnPropertyDescriptor) \ 591 V(Object_GetOwnPropertyNames) \ 592 V(Object_GetPropertyAttributes) \ 593 V(Object_GetPropertyNames) \ 594 V(Object_GetRealNamedProperty) \ 595 V(Object_GetRealNamedPropertyAttributes) \ 596 V(Object_GetRealNamedPropertyAttributesInPrototypeChain) \ 597 V(Object_GetRealNamedPropertyInPrototypeChain) \ 598 V(Object_HasOwnProperty) \ 599 V(Object_HasRealIndexedProperty) \ 600 V(Object_HasRealNamedCallbackProperty) \ 601 V(Object_HasRealNamedProperty) \ 602 V(Object_Int32Value) \ 603 V(Object_IntegerValue) \ 604 V(Object_New) \ 605 V(Object_NumberValue) \ 606 V(Object_ObjectProtoToString) \ 607 V(Object_Set) \ 608 V(Object_SetAccessor) \ 609 V(Object_SetIntegrityLevel) \ 610 V(Object_SetPrivate) \ 611 V(Object_SetPrototype) \ 612 V(ObjectTemplate_New) \ 613 V(ObjectTemplate_NewInstance) \ 614 V(Object_ToArrayIndex) \ 615 V(Object_ToDetailString) \ 616 V(Object_ToInt32) \ 617 V(Object_ToInteger) \ 618 V(Object_ToNumber) \ 619 V(Object_ToObject) \ 620 V(Object_ToString) \ 621 V(Object_ToUint32) \ 622 V(Object_Uint32Value) \ 623 V(Persistent_New) \ 624 V(Private_New) \ 625 V(Promise_Catch) \ 626 V(Promise_Chain) \ 627 V(Promise_HasRejectHandler) \ 628 V(Promise_Resolver_New) \ 629 V(Promise_Resolver_Resolve) \ 630 V(Promise_Result) \ 631 V(Promise_Status) \ 632 V(Promise_Then) \ 633 V(Proxy_New) \ 634 V(RangeError_New) \ 635 V(ReferenceError_New) \ 636 V(RegExp_New) \ 637 V(ScriptCompiler_Compile) \ 638 V(ScriptCompiler_CompileFunctionInContext) \ 639 V(ScriptCompiler_CompileUnbound) \ 640 V(Script_Run) \ 641 V(Set_Add) \ 642 V(Set_AsArray) \ 643 V(Set_Clear) \ 644 V(Set_Delete) \ 645 V(Set_Has) \ 646 V(Set_New) \ 647 V(SharedArrayBuffer_New) \ 648 V(String_Concat) \ 649 V(String_NewExternalOneByte) \ 650 V(String_NewExternalTwoByte) \ 651 V(String_NewFromOneByte) \ 652 V(String_NewFromTwoByte) \ 653 V(String_NewFromUtf8) \ 654 V(StringObject_New) \ 655 V(StringObject_StringValue) \ 656 V(String_Write) \ 657 V(String_WriteUtf8) \ 658 V(Symbol_New) \ 659 V(SymbolObject_New) \ 660 V(SymbolObject_SymbolValue) \ 661 V(SyntaxError_New) \ 662 V(TryCatch_StackTrace) \ 663 V(TypeError_New) \ 664 V(Uint16Array_New) \ 665 V(Uint32Array_New) \ 666 V(Uint8Array_New) \ 667 V(Uint8ClampedArray_New) \ 668 V(UnboundScript_GetId) \ 669 V(UnboundScript_GetLineNumber) \ 670 V(UnboundScript_GetName) \ 671 V(UnboundScript_GetSourceMappingURL) \ 672 V(UnboundScript_GetSourceURL) \ 673 V(Value_TypeOf) \ 674 V(ValueDeserializer_ReadHeader) \ 675 V(ValueDeserializer_ReadValue) \ 676 V(ValueSerializer_WriteValue) 677 678 #define FOR_EACH_MANUAL_COUNTER(V) \ 679 V(AccessorGetterCallback) \ 680 V(AccessorNameGetterCallback) \ 681 V(AccessorNameGetterCallback_ArrayLength) \ 682 V(AccessorNameGetterCallback_BoundFunctionLength) \ 683 V(AccessorNameGetterCallback_BoundFunctionName) \ 684 V(AccessorNameGetterCallback_FunctionPrototype) \ 685 V(AccessorNameGetterCallback_StringLength) \ 686 V(AccessorNameSetterCallback) \ 687 V(CompileCodeLazy) \ 688 V(CompileDeserialize) \ 689 V(CompileEval) \ 690 V(CompileFullCode) \ 691 V(CompileAnalyse) \ 692 V(CompileBackgroundIgnition) \ 693 V(CompileFunction) \ 694 V(CompileGetFromOptimizedCodeMap) \ 695 V(CompileGetUnoptimizedCode) \ 696 V(CompileIgnition) \ 697 V(CompileIgnitionFinalization) \ 698 V(CompileInnerFunction) \ 699 V(CompileRenumber) \ 700 V(CompileRewriteReturnResult) \ 701 V(CompileScopeAnalysis) \ 702 V(CompileScript) \ 703 V(CompileSerialize) \ 704 V(CompileWaitForDispatcher) \ 705 V(DeoptimizeCode) \ 706 V(FunctionCallback) \ 707 V(GC) \ 708 V(GC_AllAvailableGarbage) \ 709 V(GCEpilogueCallback) \ 710 V(GCPrologueCallback) \ 711 V(GenericNamedPropertyDefinerCallback) \ 712 V(GenericNamedPropertyDeleterCallback) \ 713 V(GenericNamedPropertyDescriptorCallback) \ 714 V(GenericNamedPropertyQueryCallback) \ 715 V(GenericNamedPropertySetterCallback) \ 716 V(GetMoreDataCallback) \ 717 V(IndexedPropertyDefinerCallback) \ 718 V(IndexedPropertyDeleterCallback) \ 719 V(IndexedPropertyDescriptorCallback) \ 720 V(IndexedPropertyGetterCallback) \ 721 V(IndexedPropertyQueryCallback) \ 722 V(IndexedPropertySetterCallback) \ 723 V(InvokeApiInterruptCallbacks) \ 724 V(InvokeFunctionCallback) \ 725 V(JS_Execution) \ 726 V(Map_SetPrototype) \ 727 V(Map_TransitionToAccessorProperty) \ 728 V(Map_TransitionToDataProperty) \ 729 V(Object_DeleteProperty) \ 730 V(OptimizeCode) \ 731 V(ParseArrowFunctionLiteral) \ 732 V(ParseBackgroundArrowFunctionLiteral) \ 733 V(ParseBackgroundFunctionLiteral) \ 734 V(ParseEval) \ 735 V(ParseFunction) \ 736 V(ParseFunctionLiteral) \ 737 V(ParseProgram) \ 738 V(PreParseArrowFunctionLiteral) \ 739 V(PreParseBackgroundArrowFunctionLiteral) \ 740 V(PreParseBackgroundNoVariableResolution) \ 741 V(PreParseBackgroundWithVariableResolution) \ 742 V(PreParseNoVariableResolution) \ 743 V(PreParseWithVariableResolution) \ 744 V(PropertyCallback) \ 745 V(PrototypeMap_TransitionToAccessorProperty) \ 746 V(PrototypeMap_TransitionToDataProperty) \ 747 V(PrototypeObject_DeleteProperty) \ 748 V(RecompileConcurrent) \ 749 V(RecompileSynchronous) \ 750 V(TestCounter1) \ 751 V(TestCounter2) \ 752 V(TestCounter3) \ 753 /* Dummy counter for the unexpected stub miss. */ \ 754 V(UnexpectedStubMiss) 755 756 #define FOR_EACH_HANDLER_COUNTER(V) \ 757 V(IC_HandlerCacheHit) \ 758 V(KeyedLoadIC_LoadIndexedStringStub) \ 759 V(KeyedLoadIC_LoadIndexedInterceptorStub) \ 760 V(KeyedLoadIC_KeyedLoadSloppyArgumentsStub) \ 761 V(KeyedLoadIC_LoadElementDH) \ 762 V(KeyedLoadIC_SlowStub) \ 763 V(KeyedStoreIC_ElementsTransitionAndStoreStub) \ 764 V(KeyedStoreIC_KeyedStoreSloppyArgumentsStub) \ 765 V(KeyedStoreIC_SlowStub) \ 766 V(KeyedStoreIC_StoreFastElementStub) \ 767 V(KeyedStoreIC_StoreElementStub) \ 768 V(LoadIC_FunctionPrototypeStub) \ 769 V(LoadIC_HandlerCacheHit_AccessCheck) \ 770 V(LoadIC_HandlerCacheHit_Exotic) \ 771 V(LoadIC_HandlerCacheHit_Interceptor) \ 772 V(LoadIC_HandlerCacheHit_JSProxy) \ 773 V(LoadIC_HandlerCacheHit_NonExistent) \ 774 V(LoadIC_HandlerCacheHit_Accessor) \ 775 V(LoadIC_HandlerCacheHit_Data) \ 776 V(LoadIC_HandlerCacheHit_Transition) \ 777 V(LoadIC_LoadApiGetterDH) \ 778 V(LoadIC_LoadApiGetterFromPrototypeDH) \ 779 V(LoadIC_LoadApiGetterStub) \ 780 V(LoadIC_LoadCallback) \ 781 V(LoadIC_LoadConstantDH) \ 782 V(LoadIC_LoadConstantFromPrototypeDH) \ 783 V(LoadIC_LoadConstant) \ 784 V(LoadIC_LoadConstantStub) \ 785 V(LoadIC_LoadFieldDH) \ 786 V(LoadIC_LoadFieldFromPrototypeDH) \ 787 V(LoadIC_LoadField) \ 788 V(LoadIC_LoadGlobal) \ 789 V(LoadIC_LoadInterceptor) \ 790 V(LoadIC_LoadNonexistentDH) \ 791 V(LoadIC_LoadNonexistent) \ 792 V(LoadIC_LoadNormal) \ 793 V(LoadIC_LoadScriptContextFieldStub) \ 794 V(LoadIC_LoadViaGetter) \ 795 V(LoadIC_NonReceiver) \ 796 V(LoadIC_Premonomorphic) \ 797 V(LoadIC_SlowStub) \ 798 V(LoadIC_StringLengthStub) \ 799 V(StoreIC_HandlerCacheHit_AccessCheck) \ 800 V(StoreIC_HandlerCacheHit_Exotic) \ 801 V(StoreIC_HandlerCacheHit_Interceptor) \ 802 V(StoreIC_HandlerCacheHit_JSProxy) \ 803 V(StoreIC_HandlerCacheHit_NonExistent) \ 804 V(StoreIC_HandlerCacheHit_Accessor) \ 805 V(StoreIC_HandlerCacheHit_Data) \ 806 V(StoreIC_HandlerCacheHit_Transition) \ 807 V(StoreIC_NonReceiver) \ 808 V(StoreIC_Premonomorphic) \ 809 V(StoreIC_SlowStub) \ 810 V(StoreIC_StoreCallback) \ 811 V(StoreIC_StoreField) \ 812 V(StoreIC_StoreFieldDH) \ 813 V(StoreIC_StoreFieldStub) \ 814 V(StoreIC_StoreGlobal) \ 815 V(StoreIC_StoreGlobalTransition) \ 816 V(StoreIC_StoreInterceptorStub) \ 817 V(StoreIC_StoreNormal) \ 818 V(StoreIC_StoreScriptContextFieldStub) \ 819 V(StoreIC_StoreTransition) \ 820 V(StoreIC_StoreTransitionDH) \ 821 V(StoreIC_StoreViaSetter) 822 823 class RuntimeCallStats final : public ZoneObject { 824 public: 825 typedef RuntimeCallCounter RuntimeCallStats::*CounterId; 826 827 #define CALL_RUNTIME_COUNTER(name) \ 828 RuntimeCallCounter name = RuntimeCallCounter(#name); 829 FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER) 830 #undef CALL_RUNTIME_COUNTER 831 #define CALL_RUNTIME_COUNTER(name, nargs, ressize) \ 832 RuntimeCallCounter Runtime_##name = RuntimeCallCounter(#name); 833 FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER) 834 #undef CALL_RUNTIME_COUNTER 835 #define CALL_BUILTIN_COUNTER(name) \ 836 RuntimeCallCounter Builtin_##name = RuntimeCallCounter(#name); 837 BUILTIN_LIST_C(CALL_BUILTIN_COUNTER) 838 #undef CALL_BUILTIN_COUNTER 839 #define CALL_BUILTIN_COUNTER(name) \ 840 RuntimeCallCounter API_##name = RuntimeCallCounter("API_" #name); 841 FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER) 842 #undef CALL_BUILTIN_COUNTER 843 #define CALL_BUILTIN_COUNTER(name) \ 844 RuntimeCallCounter Handler_##name = RuntimeCallCounter(#name); 845 FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER) 846 #undef CALL_BUILTIN_COUNTER 847 848 static const CounterId counters[]; 849 static const int counters_count; 850 851 // Starting measuring the time for a function. This will establish the 852 // connection to the parent counter for properly calculating the own times. 853 V8_EXPORT_PRIVATE static void Enter(RuntimeCallStats* stats, 854 RuntimeCallTimer* timer, 855 CounterId counter_id); 856 857 // Leave a scope for a measured runtime function. This will properly add 858 // the time delta to the current_counter and subtract the delta from its 859 // parent. 860 V8_EXPORT_PRIVATE static void Leave(RuntimeCallStats* stats, 861 RuntimeCallTimer* timer); 862 863 // Set counter id for the innermost measurement. It can be used to refine 864 // event kind when a runtime entry counter is too generic. 865 V8_EXPORT_PRIVATE static void CorrectCurrentCounterId(RuntimeCallStats* stats, 866 CounterId counter_id); 867 868 V8_EXPORT_PRIVATE void Reset(); 869 // Add all entries from another stats object. 870 void Add(RuntimeCallStats* other); 871 V8_EXPORT_PRIVATE void Print(std::ostream& os); 872 V8_NOINLINE void Dump(v8::tracing::TracedValue* value); 873 874 RuntimeCallStats() { 875 Reset(); 876 in_use_ = false; 877 } 878 879 RuntimeCallTimer* current_timer() { return current_timer_.Value(); } 880 bool InUse() { return in_use_; } 881 882 private: 883 // Counter to track recursive time events. 884 base::AtomicValue<RuntimeCallTimer*> current_timer_; 885 // Used to track nested tracing scopes. 886 bool in_use_; 887 }; 888 889 #define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_name) \ 890 do { \ 891 if (V8_UNLIKELY(FLAG_runtime_stats)) { \ 892 RuntimeCallStats::CorrectCurrentCounterId( \ 893 runtime_call_stats, &RuntimeCallStats::counter_name); \ 894 } \ 895 } while (false) 896 897 #define TRACE_HANDLER_STATS(isolate, counter_name) \ 898 CHANGE_CURRENT_RUNTIME_COUNTER(isolate->counters()->runtime_call_stats(), \ 899 Handler_##counter_name) 900 901 // A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the 902 // the time of C++ scope. 903 class RuntimeCallTimerScope { 904 public: 905 inline RuntimeCallTimerScope(Isolate* isolate, 906 RuntimeCallStats::CounterId counter_id); 907 // This constructor is here just to avoid calling GetIsolate() when the 908 // stats are disabled and the isolate is not directly available. 909 inline RuntimeCallTimerScope(HeapObject* heap_object, 910 RuntimeCallStats::CounterId counter_id); 911 inline RuntimeCallTimerScope(RuntimeCallStats* stats, 912 RuntimeCallStats::CounterId counter_id); 913 914 inline ~RuntimeCallTimerScope() { 915 if (V8_UNLIKELY(stats_ != nullptr)) { 916 RuntimeCallStats::Leave(stats_, &timer_); 917 } 918 } 919 920 private: 921 V8_INLINE void Initialize(RuntimeCallStats* stats, 922 RuntimeCallStats::CounterId counter_id) { 923 stats_ = stats; 924 RuntimeCallStats::Enter(stats_, &timer_, counter_id); 925 } 926 927 RuntimeCallStats* stats_ = nullptr; 928 RuntimeCallTimer timer_; 929 }; 930 931 #define HISTOGRAM_RANGE_LIST(HR) \ 932 /* Generic range histograms */ \ 933 HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \ 934 HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101) \ 935 HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \ 936 HR(gc_idle_time_limit_undershot, V8.GCIdleTimeLimit.Undershot, 0, 10000, \ 937 101) \ 938 HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6) \ 939 HR(errors_thrown_per_context, V8.ErrorsThrownPerContext, 0, 200, 20) \ 940 HR(debug_feature_usage, V8.DebugFeatureUsage, 1, 7, 7) \ 941 HR(incremental_marking_reason, V8.GCIncrementalMarkingReason, 0, 21, 22) \ 942 HR(mark_compact_reason, V8.GCMarkCompactReason, 0, 21, 22) \ 943 HR(scavenge_reason, V8.GCScavengeReason, 0, 21, 22) \ 944 HR(young_generation_handling, V8.GCYoungGenerationHandling, 0, 2, 3) \ 945 /* Asm/Wasm. */ \ 946 HR(wasm_functions_per_module, V8.WasmFunctionsPerModule, 1, 10000, 51) 947 948 #define HISTOGRAM_TIMER_LIST(HT) \ 949 /* Garbage collection timers. */ \ 950 HT(gc_compactor, V8.GCCompactor, 10000, MILLISECOND) \ 951 HT(gc_finalize, V8.GCFinalizeMC, 10000, MILLISECOND) \ 952 HT(gc_finalize_reduce_memory, V8.GCFinalizeMCReduceMemory, 10000, \ 953 MILLISECOND) \ 954 HT(gc_scavenger, V8.GCScavenger, 10000, MILLISECOND) \ 955 HT(gc_context, V8.GCContext, 10000, \ 956 MILLISECOND) /* GC context cleanup time */ \ 957 HT(gc_idle_notification, V8.GCIdleNotification, 10000, MILLISECOND) \ 958 HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND) \ 959 HT(gc_incremental_marking_start, V8.GCIncrementalMarkingStart, 10000, \ 960 MILLISECOND) \ 961 HT(gc_incremental_marking_finalize, V8.GCIncrementalMarkingFinalize, 10000, \ 962 MILLISECOND) \ 963 HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000, \ 964 MILLISECOND) \ 965 /* Compilation times. */ \ 966 HT(compile, V8.CompileMicroSeconds, 1000000, MICROSECOND) \ 967 HT(compile_eval, V8.CompileEvalMicroSeconds, 1000000, MICROSECOND) \ 968 /* Serialization as part of compilation (code caching) */ \ 969 HT(compile_serialize, V8.CompileSerializeMicroSeconds, 100000, MICROSECOND) \ 970 HT(compile_deserialize, V8.CompileDeserializeMicroSeconds, 1000000, \ 971 MICROSECOND) \ 972 /* Total compilation time incl. caching/parsing */ \ 973 HT(compile_script, V8.CompileScriptMicroSeconds, 1000000, MICROSECOND) \ 974 /* Total JavaScript execution time (including callbacks and runtime calls */ \ 975 HT(execute, V8.Execute, 1000000, MICROSECOND) \ 976 /* Asm/Wasm */ \ 977 HT(wasm_instantiate_module_time, V8.WasmInstantiateModuleMicroSeconds, \ 978 1000000, MICROSECOND) \ 979 HT(wasm_decode_module_time, V8.WasmDecodeModuleMicroSeconds, 1000000, \ 980 MICROSECOND) \ 981 HT(wasm_decode_function_time, V8.WasmDecodeFunctionMicroSeconds, 1000000, \ 982 MICROSECOND) \ 983 HT(wasm_compile_module_time, V8.WasmCompileModuleMicroSeconds, 1000000, \ 984 MICROSECOND) \ 985 HT(wasm_compile_function_time, V8.WasmCompileFunctionMicroSeconds, 1000000, \ 986 MICROSECOND) \ 987 HT(asm_wasm_translation_time, V8.AsmWasmTranslationMicroSeconds, 1000000, \ 988 MICROSECOND) 989 990 #define AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) \ 991 AHT(compile_lazy, V8.CompileLazyMicroSeconds) 992 993 #define HISTOGRAM_PERCENTAGE_LIST(HP) \ 994 /* Heap fragmentation. */ \ 995 HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal) \ 996 HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \ 997 HP(external_fragmentation_code_space, \ 998 V8.MemoryExternalFragmentationCodeSpace) \ 999 HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \ 1000 HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace) \ 1001 /* Percentages of heap committed to each space. */ \ 1002 HP(heap_fraction_new_space, V8.MemoryHeapFractionNewSpace) \ 1003 HP(heap_fraction_old_space, V8.MemoryHeapFractionOldSpace) \ 1004 HP(heap_fraction_code_space, V8.MemoryHeapFractionCodeSpace) \ 1005 HP(heap_fraction_map_space, V8.MemoryHeapFractionMapSpace) \ 1006 HP(heap_fraction_lo_space, V8.MemoryHeapFractionLoSpace) 1007 1008 #define HISTOGRAM_LEGACY_MEMORY_LIST(HM) \ 1009 HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \ 1010 HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \ 1011 HM(heap_sample_map_space_committed, V8.MemoryHeapSampleMapSpaceCommitted) \ 1012 HM(heap_sample_code_space_committed, V8.MemoryHeapSampleCodeSpaceCommitted) \ 1013 HM(heap_sample_maximum_committed, V8.MemoryHeapSampleMaximumCommitted) 1014 1015 #define HISTOGRAM_MEMORY_LIST(HM) \ 1016 HM(memory_heap_committed, V8.MemoryHeapCommitted) \ 1017 HM(memory_heap_used, V8.MemoryHeapUsed) \ 1018 /* Asm/Wasm */ \ 1019 HM(wasm_decode_module_peak_memory_bytes, V8.WasmDecodeModulePeakMemoryBytes) \ 1020 HM(wasm_compile_function_peak_memory_bytes, \ 1021 V8.WasmCompileFunctionPeakMemoryBytes) \ 1022 HM(wasm_min_mem_pages_count, V8.WasmMinMemPagesCount) \ 1023 HM(wasm_max_mem_pages_count, V8.WasmMaxMemPagesCount) \ 1024 HM(wasm_function_size_bytes, V8.WasmFunctionSizeBytes) \ 1025 HM(wasm_module_size_bytes, V8.WasmModuleSizeBytes) 1026 1027 // WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC 1028 // Intellisense to crash. It was broken into two macros (each of length 40 1029 // lines) rather than one macro (of length about 80 lines) to work around 1030 // this problem. Please avoid using recursive macros of this length when 1031 // possible. 1032 #define STATS_COUNTER_LIST_1(SC) \ 1033 /* Global Handle Count*/ \ 1034 SC(global_handles, V8.GlobalHandles) \ 1035 /* OS Memory allocated */ \ 1036 SC(memory_allocated, V8.OsMemoryAllocated) \ 1037 SC(maps_normalized, V8.MapsNormalized) \ 1038 SC(maps_created, V8.MapsCreated) \ 1039 SC(elements_transitions, V8.ObjectElementsTransitions) \ 1040 SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \ 1041 SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \ 1042 SC(alive_after_last_gc, V8.AliveAfterLastGC) \ 1043 SC(objs_since_last_young, V8.ObjsSinceLastYoung) \ 1044 SC(objs_since_last_full, V8.ObjsSinceLastFull) \ 1045 SC(string_table_capacity, V8.StringTableCapacity) \ 1046 SC(number_of_symbols, V8.NumberOfSymbols) \ 1047 SC(script_wrappers, V8.ScriptWrappers) \ 1048 SC(inlined_copied_elements, V8.InlinedCopiedElements) \ 1049 SC(arguments_adaptors, V8.ArgumentsAdaptors) \ 1050 SC(compilation_cache_hits, V8.CompilationCacheHits) \ 1051 SC(compilation_cache_misses, V8.CompilationCacheMisses) \ 1052 /* Amount of evaled source code. */ \ 1053 SC(total_eval_size, V8.TotalEvalSize) \ 1054 /* Amount of loaded source code. */ \ 1055 SC(total_load_size, V8.TotalLoadSize) \ 1056 /* Amount of parsed source code. */ \ 1057 SC(total_parse_size, V8.TotalParseSize) \ 1058 /* Amount of source code skipped over using preparsing. */ \ 1059 SC(total_preparse_skipped, V8.TotalPreparseSkipped) \ 1060 /* Amount of compiled source code. */ \ 1061 SC(total_compile_size, V8.TotalCompileSize) \ 1062 /* Amount of source code compiled with the full codegen. */ \ 1063 SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \ 1064 /* Number of contexts created from scratch. */ \ 1065 SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \ 1066 /* Number of contexts created by partial snapshot. */ \ 1067 SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \ 1068 /* Number of code objects found from pc. */ \ 1069 SC(pc_to_code, V8.PcToCode) \ 1070 SC(pc_to_code_cached, V8.PcToCodeCached) \ 1071 /* The store-buffer implementation of the write barrier. */ \ 1072 SC(store_buffer_overflows, V8.StoreBufferOverflows) 1073 1074 #define STATS_COUNTER_LIST_2(SC) \ 1075 /* Number of code stubs. */ \ 1076 SC(code_stubs, V8.CodeStubs) \ 1077 /* Amount of stub code. */ \ 1078 SC(total_stubs_code_size, V8.TotalStubsCodeSize) \ 1079 /* Amount of (JS) compiled code. */ \ 1080 SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \ 1081 SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \ 1082 SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \ 1083 SC(gc_compactor_caused_by_oldspace_exhaustion, \ 1084 V8.GCCompactorCausedByOldspaceExhaustion) \ 1085 SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \ 1086 SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \ 1087 SC(ic_keyed_load_generic_smi, V8.ICKeyedLoadGenericSmi) \ 1088 SC(ic_keyed_load_generic_symbol, V8.ICKeyedLoadGenericSymbol) \ 1089 SC(ic_keyed_load_generic_slow, V8.ICKeyedLoadGenericSlow) \ 1090 SC(ic_named_load_global_stub, V8.ICNamedLoadGlobalStub) \ 1091 SC(ic_store_normal_miss, V8.ICStoreNormalMiss) \ 1092 SC(ic_store_normal_hit, V8.ICStoreNormalHit) \ 1093 SC(ic_binary_op_miss, V8.ICBinaryOpMiss) \ 1094 SC(ic_compare_miss, V8.ICCompareMiss) \ 1095 SC(ic_call_miss, V8.ICCallMiss) \ 1096 SC(ic_keyed_call_miss, V8.ICKeyedCallMiss) \ 1097 SC(ic_store_miss, V8.ICStoreMiss) \ 1098 SC(ic_keyed_store_miss, V8.ICKeyedStoreMiss) \ 1099 SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime) \ 1100 SC(cow_arrays_converted, V8.COWArraysConverted) \ 1101 SC(constructed_objects, V8.ConstructedObjects) \ 1102 SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \ 1103 SC(negative_lookups, V8.NegativeLookups) \ 1104 SC(negative_lookups_miss, V8.NegativeLookupsMiss) \ 1105 SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \ 1106 SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \ 1107 SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \ 1108 SC(enum_cache_hits, V8.EnumCacheHits) \ 1109 SC(enum_cache_misses, V8.EnumCacheMisses) \ 1110 SC(fast_new_closure_total, V8.FastNewClosureTotal) \ 1111 SC(string_add_runtime, V8.StringAddRuntime) \ 1112 SC(string_add_native, V8.StringAddNative) \ 1113 SC(string_add_runtime_ext_to_one_byte, V8.StringAddRuntimeExtToOneByte) \ 1114 SC(sub_string_runtime, V8.SubStringRuntime) \ 1115 SC(sub_string_native, V8.SubStringNative) \ 1116 SC(string_compare_native, V8.StringCompareNative) \ 1117 SC(string_compare_runtime, V8.StringCompareRuntime) \ 1118 SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \ 1119 SC(regexp_entry_native, V8.RegExpEntryNative) \ 1120 SC(number_to_string_native, V8.NumberToStringNative) \ 1121 SC(number_to_string_runtime, V8.NumberToStringRuntime) \ 1122 SC(math_exp_runtime, V8.MathExpRuntime) \ 1123 SC(math_log_runtime, V8.MathLogRuntime) \ 1124 SC(math_pow_runtime, V8.MathPowRuntime) \ 1125 SC(stack_interrupts, V8.StackInterrupts) \ 1126 SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \ 1127 SC(runtime_calls, V8.RuntimeCalls) \ 1128 SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \ 1129 SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \ 1130 SC(soft_deopts_requested, V8.SoftDeoptsRequested) \ 1131 SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \ 1132 SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \ 1133 /* Number of write barriers in generated code. */ \ 1134 SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \ 1135 SC(write_barriers_static, V8.WriteBarriersStatic) \ 1136 SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \ 1137 SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \ 1138 SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \ 1139 SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable) \ 1140 SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted) \ 1141 SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed) \ 1142 SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \ 1143 SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \ 1144 SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \ 1145 SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \ 1146 SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \ 1147 SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \ 1148 SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \ 1149 SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \ 1150 SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed) \ 1151 /* Total code size (including metadata) of baseline code or bytecode. */ \ 1152 SC(total_baseline_code_size, V8.TotalBaselineCodeSize) \ 1153 /* Total count of functions compiled using the baseline compiler. */ \ 1154 SC(total_baseline_compile_count, V8.TotalBaselineCompileCount) \ 1155 SC(wasm_generated_code_size, V8.WasmGeneratedCodeBytes) \ 1156 SC(wasm_reloc_size, V8.WasmRelocBytes) 1157 1158 // This file contains all the v8 counters that are in use. 1159 class Counters { 1160 public: 1161 #define HR(name, caption, min, max, num_buckets) \ 1162 Histogram* name() { return &name##_; } 1163 HISTOGRAM_RANGE_LIST(HR) 1164 #undef HR 1165 1166 #define HT(name, caption, max, res) \ 1167 HistogramTimer* name() { return &name##_; } 1168 HISTOGRAM_TIMER_LIST(HT) 1169 #undef HT 1170 1171 #define AHT(name, caption) \ 1172 AggregatableHistogramTimer* name() { return &name##_; } 1173 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) 1174 #undef AHT 1175 1176 #define HP(name, caption) \ 1177 Histogram* name() { return &name##_; } 1178 HISTOGRAM_PERCENTAGE_LIST(HP) 1179 #undef HP 1180 1181 #define HM(name, caption) \ 1182 Histogram* name() { return &name##_; } 1183 HISTOGRAM_LEGACY_MEMORY_LIST(HM) 1184 HISTOGRAM_MEMORY_LIST(HM) 1185 #undef HM 1186 1187 #define HM(name, caption) \ 1188 AggregatedMemoryHistogram<Histogram>* aggregated_##name() { \ 1189 return &aggregated_##name##_; \ 1190 } 1191 HISTOGRAM_MEMORY_LIST(HM) 1192 #undef HM 1193 1194 #define SC(name, caption) \ 1195 StatsCounter* name() { return &name##_; } 1196 STATS_COUNTER_LIST_1(SC) 1197 STATS_COUNTER_LIST_2(SC) 1198 #undef SC 1199 1200 #define SC(name) \ 1201 StatsCounter* count_of_##name() { return &count_of_##name##_; } \ 1202 StatsCounter* size_of_##name() { return &size_of_##name##_; } 1203 INSTANCE_TYPE_LIST(SC) 1204 #undef SC 1205 1206 #define SC(name) \ 1207 StatsCounter* count_of_CODE_TYPE_##name() \ 1208 { return &count_of_CODE_TYPE_##name##_; } \ 1209 StatsCounter* size_of_CODE_TYPE_##name() \ 1210 { return &size_of_CODE_TYPE_##name##_; } 1211 CODE_KIND_LIST(SC) 1212 #undef SC 1213 1214 #define SC(name) \ 1215 StatsCounter* count_of_FIXED_ARRAY_##name() \ 1216 { return &count_of_FIXED_ARRAY_##name##_; } \ 1217 StatsCounter* size_of_FIXED_ARRAY_##name() \ 1218 { return &size_of_FIXED_ARRAY_##name##_; } 1219 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC) 1220 #undef SC 1221 1222 #define SC(name) \ 1223 StatsCounter* count_of_CODE_AGE_##name() \ 1224 { return &count_of_CODE_AGE_##name##_; } \ 1225 StatsCounter* size_of_CODE_AGE_##name() \ 1226 { return &size_of_CODE_AGE_##name##_; } 1227 CODE_AGE_LIST_COMPLETE(SC) 1228 #undef SC 1229 1230 enum Id { 1231 #define RATE_ID(name, caption, max, res) k_##name, 1232 HISTOGRAM_TIMER_LIST(RATE_ID) 1233 #undef RATE_ID 1234 #define AGGREGATABLE_ID(name, caption) k_##name, 1235 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AGGREGATABLE_ID) 1236 #undef AGGREGATABLE_ID 1237 #define PERCENTAGE_ID(name, caption) k_##name, 1238 HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID) 1239 #undef PERCENTAGE_ID 1240 #define MEMORY_ID(name, caption) k_##name, 1241 HISTOGRAM_LEGACY_MEMORY_LIST(MEMORY_ID) 1242 HISTOGRAM_MEMORY_LIST(MEMORY_ID) 1243 #undef MEMORY_ID 1244 #define COUNTER_ID(name, caption) k_##name, 1245 STATS_COUNTER_LIST_1(COUNTER_ID) 1246 STATS_COUNTER_LIST_2(COUNTER_ID) 1247 #undef COUNTER_ID 1248 #define COUNTER_ID(name) kCountOf##name, kSizeOf##name, 1249 INSTANCE_TYPE_LIST(COUNTER_ID) 1250 #undef COUNTER_ID 1251 #define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \ 1252 kSizeOfCODE_TYPE_##name, 1253 CODE_KIND_LIST(COUNTER_ID) 1254 #undef COUNTER_ID 1255 #define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \ 1256 kSizeOfFIXED_ARRAY__##name, 1257 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID) 1258 #undef COUNTER_ID 1259 #define COUNTER_ID(name) kCountOfCODE_AGE__##name, \ 1260 kSizeOfCODE_AGE__##name, 1261 CODE_AGE_LIST_COMPLETE(COUNTER_ID) 1262 #undef COUNTER_ID 1263 stats_counter_count 1264 }; 1265 1266 void ResetCounters(); 1267 void ResetHistograms(); 1268 RuntimeCallStats* runtime_call_stats() { return &runtime_call_stats_; } 1269 1270 private: 1271 #define HR(name, caption, min, max, num_buckets) Histogram name##_; 1272 HISTOGRAM_RANGE_LIST(HR) 1273 #undef HR 1274 1275 #define HT(name, caption, max, res) HistogramTimer name##_; 1276 HISTOGRAM_TIMER_LIST(HT) 1277 #undef HT 1278 1279 #define AHT(name, caption) \ 1280 AggregatableHistogramTimer name##_; 1281 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) 1282 #undef AHT 1283 1284 #define HP(name, caption) \ 1285 Histogram name##_; 1286 HISTOGRAM_PERCENTAGE_LIST(HP) 1287 #undef HP 1288 1289 #define HM(name, caption) \ 1290 Histogram name##_; 1291 HISTOGRAM_LEGACY_MEMORY_LIST(HM) 1292 HISTOGRAM_MEMORY_LIST(HM) 1293 #undef HM 1294 1295 #define HM(name, caption) \ 1296 AggregatedMemoryHistogram<Histogram> aggregated_##name##_; 1297 HISTOGRAM_MEMORY_LIST(HM) 1298 #undef HM 1299 1300 #define SC(name, caption) \ 1301 StatsCounter name##_; 1302 STATS_COUNTER_LIST_1(SC) 1303 STATS_COUNTER_LIST_2(SC) 1304 #undef SC 1305 1306 #define SC(name) \ 1307 StatsCounter size_of_##name##_; \ 1308 StatsCounter count_of_##name##_; 1309 INSTANCE_TYPE_LIST(SC) 1310 #undef SC 1311 1312 #define SC(name) \ 1313 StatsCounter size_of_CODE_TYPE_##name##_; \ 1314 StatsCounter count_of_CODE_TYPE_##name##_; 1315 CODE_KIND_LIST(SC) 1316 #undef SC 1317 1318 #define SC(name) \ 1319 StatsCounter size_of_FIXED_ARRAY_##name##_; \ 1320 StatsCounter count_of_FIXED_ARRAY_##name##_; 1321 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC) 1322 #undef SC 1323 1324 #define SC(name) \ 1325 StatsCounter size_of_CODE_AGE_##name##_; \ 1326 StatsCounter count_of_CODE_AGE_##name##_; 1327 CODE_AGE_LIST_COMPLETE(SC) 1328 #undef SC 1329 1330 RuntimeCallStats runtime_call_stats_; 1331 1332 friend class Isolate; 1333 1334 explicit Counters(Isolate* isolate); 1335 1336 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters); 1337 }; 1338 1339 } // namespace internal 1340 } // namespace v8 1341 1342 #endif // V8_COUNTERS_H_ 1343