Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #include "src/runtime-profiler.h"
      8 
      9 #include "src/assembler.h"
     10 #include "src/base/platform/platform.h"
     11 #include "src/bootstrapper.h"
     12 #include "src/code-stubs.h"
     13 #include "src/compilation-cache.h"
     14 #include "src/execution.h"
     15 #include "src/full-codegen.h"
     16 #include "src/global-handles.h"
     17 #include "src/heap/mark-compact.h"
     18 #include "src/isolate-inl.h"
     19 #include "src/scopeinfo.h"
     20 
     21 namespace v8 {
     22 namespace internal {
     23 
     24 
     25 // Number of times a function has to be seen on the stack before it is
     26 // optimized.
     27 static const int kProfilerTicksBeforeOptimization = 2;
     28 // If the function optimization was disabled due to high deoptimization count,
     29 // but the function is hot and has been seen on the stack this number of times,
     30 // then we try to reenable optimization for this function.
     31 static const int kProfilerTicksBeforeReenablingOptimization = 250;
     32 // If a function does not have enough type info (according to
     33 // FLAG_type_info_threshold), but has seen a huge number of ticks,
     34 // optimize it as it is.
     35 static const int kTicksWhenNotEnoughTypeInfo = 100;
     36 // We only have one byte to store the number of ticks.
     37 STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
     38 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
     39 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
     40 
     41 // Maximum size in bytes of generate code for a function to allow OSR.
     42 static const int kOSRCodeSizeAllowanceBase =
     43     100 * FullCodeGenerator::kCodeSizeMultiplier;
     44 
     45 static const int kOSRCodeSizeAllowancePerTick =
     46     4 * FullCodeGenerator::kCodeSizeMultiplier;
     47 
     48 // Maximum size in bytes of generated code for a function to be optimized
     49 // the very first time it is seen on the stack.
     50 static const int kMaxSizeEarlyOpt =
     51     5 * FullCodeGenerator::kCodeSizeMultiplier;
     52 
     53 
     54 RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
     55     : isolate_(isolate),
     56       any_ic_changed_(false) {
     57 }
     58 
     59 
     60 static void GetICCounts(Code* shared_code, int* ic_with_type_info_count,
     61                         int* ic_generic_count, int* ic_total_count,
     62                         int* type_info_percentage, int* generic_percentage) {
     63   *ic_total_count = 0;
     64   *ic_generic_count = 0;
     65   *ic_with_type_info_count = 0;
     66   Object* raw_info = shared_code->type_feedback_info();
     67   if (raw_info->IsTypeFeedbackInfo()) {
     68     TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
     69     *ic_with_type_info_count = info->ic_with_type_info_count();
     70     *ic_generic_count = info->ic_generic_count();
     71     *ic_total_count = info->ic_total_count();
     72   }
     73   if (*ic_total_count > 0) {
     74     *type_info_percentage = 100 * *ic_with_type_info_count / *ic_total_count;
     75     *generic_percentage = 100 * *ic_generic_count / *ic_total_count;
     76   } else {
     77     *type_info_percentage = 100;  // Compared against lower bound.
     78     *generic_percentage = 0;      // Compared against upper bound.
     79   }
     80 }
     81 
     82 
     83 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
     84   DCHECK(function->IsOptimizable());
     85 
     86   if (FLAG_trace_opt && function->PassesFilter(FLAG_hydrogen_filter)) {
     87     PrintF("[marking ");
     88     function->ShortPrint();
     89     PrintF(" for recompilation, reason: %s", reason);
     90     if (FLAG_type_info_threshold > 0) {
     91       int typeinfo, generic, total, type_percentage, generic_percentage;
     92       GetICCounts(function->shared()->code(), &typeinfo, &generic, &total,
     93                   &type_percentage, &generic_percentage);
     94       PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total,
     95              type_percentage);
     96       PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage);
     97     }
     98     PrintF("]\n");
     99   }
    100 
    101 
    102   if (isolate_->concurrent_recompilation_enabled() &&
    103       !isolate_->bootstrapper()->IsActive()) {
    104     if (isolate_->concurrent_osr_enabled() &&
    105         isolate_->optimizing_compiler_thread()->IsQueuedForOSR(function)) {
    106       // Do not attempt regular recompilation if we already queued this for OSR.
    107       // TODO(yangguo): This is necessary so that we don't install optimized
    108       // code on a function that is already optimized, since OSR and regular
    109       // recompilation race.  This goes away as soon as OSR becomes one-shot.
    110       return;
    111     }
    112     DCHECK(!function->IsInOptimizationQueue());
    113     function->MarkForConcurrentOptimization();
    114   } else {
    115     // The next call to the function will trigger optimization.
    116     function->MarkForOptimization();
    117   }
    118 }
    119 
    120 
    121 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function,
    122                                                 int loop_nesting_levels) {
    123   SharedFunctionInfo* shared = function->shared();
    124   // See AlwaysFullCompiler (in compiler.cc) comment on why we need
    125   // Debug::has_break_points().
    126   if (!FLAG_use_osr ||
    127       isolate_->DebuggerHasBreakPoints() ||
    128       function->IsBuiltin()) {
    129     return;
    130   }
    131 
    132   // If the code is not optimizable, don't try OSR.
    133   if (!shared->code()->optimizable()) return;
    134 
    135   // We are not prepared to do OSR for a function that already has an
    136   // allocated arguments object.  The optimized code would bypass it for
    137   // arguments accesses, which is unsound.  Don't try OSR.
    138   if (shared->uses_arguments()) return;
    139 
    140   // We're using on-stack replacement: patch the unoptimized code so that
    141   // any back edge in any unoptimized frame will trigger on-stack
    142   // replacement for that frame.
    143   if (FLAG_trace_osr) {
    144     PrintF("[OSR - patching back edges in ");
    145     function->PrintName();
    146     PrintF("]\n");
    147   }
    148 
    149   for (int i = 0; i < loop_nesting_levels; i++) {
    150     BackEdgeTable::Patch(isolate_, shared->code());
    151   }
    152 }
    153 
    154 
    155 void RuntimeProfiler::OptimizeNow() {
    156   HandleScope scope(isolate_);
    157 
    158   if (isolate_->DebuggerHasBreakPoints()) return;
    159 
    160   DisallowHeapAllocation no_gc;
    161 
    162   // Run through the JavaScript frames and collect them. If we already
    163   // have a sample of the function, we mark it for optimizations
    164   // (eagerly or lazily).
    165   int frame_count = 0;
    166   int frame_count_limit = FLAG_frame_count;
    167   for (JavaScriptFrameIterator it(isolate_);
    168        frame_count++ < frame_count_limit && !it.done();
    169        it.Advance()) {
    170     JavaScriptFrame* frame = it.frame();
    171     JSFunction* function = frame->function();
    172 
    173     SharedFunctionInfo* shared = function->shared();
    174     Code* shared_code = shared->code();
    175 
    176     List<JSFunction*> functions(4);
    177     frame->GetFunctions(&functions);
    178     for (int i = functions.length(); --i >= 0; ) {
    179       SharedFunctionInfo* shared_function_info = functions[i]->shared();
    180       int ticks = shared_function_info->profiler_ticks();
    181       if (ticks < Smi::kMaxValue) {
    182         shared_function_info->set_profiler_ticks(ticks + 1);
    183       }
    184     }
    185 
    186     if (shared_code->kind() != Code::FUNCTION) continue;
    187     if (function->IsInOptimizationQueue()) continue;
    188 
    189     if (FLAG_always_osr) {
    190       AttemptOnStackReplacement(function, Code::kMaxLoopNestingMarker);
    191       // Fall through and do a normal optimized compile as well.
    192     } else if (!frame->is_optimized() &&
    193         (function->IsMarkedForOptimization() ||
    194          function->IsMarkedForConcurrentOptimization() ||
    195          function->IsOptimized())) {
    196       // Attempt OSR if we are still running unoptimized code even though the
    197       // the function has long been marked or even already been optimized.
    198       int ticks = shared_code->profiler_ticks();
    199       int allowance = kOSRCodeSizeAllowanceBase +
    200                       ticks * kOSRCodeSizeAllowancePerTick;
    201       if (shared_code->CodeSize() > allowance) {
    202         if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1);
    203       } else {
    204         AttemptOnStackReplacement(function);
    205       }
    206       continue;
    207     }
    208 
    209     // Only record top-level code on top of the execution stack and
    210     // avoid optimizing excessively large scripts since top-level code
    211     // will be executed only once.
    212     const int kMaxToplevelSourceSize = 10 * 1024;
    213     if (shared->is_toplevel() &&
    214         (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
    215       continue;
    216     }
    217 
    218     // Do not record non-optimizable functions.
    219     if (shared->optimization_disabled()) {
    220       if (shared->deopt_count() >= FLAG_max_opt_count) {
    221         // If optimization was disabled due to many deoptimizations,
    222         // then check if the function is hot and try to reenable optimization.
    223         int ticks = shared_code->profiler_ticks();
    224         if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
    225           shared_code->set_profiler_ticks(0);
    226           shared->TryReenableOptimization();
    227         } else {
    228           shared_code->set_profiler_ticks(ticks + 1);
    229         }
    230       }
    231       continue;
    232     }
    233     if (!function->IsOptimizable()) continue;
    234 
    235     int ticks = shared_code->profiler_ticks();
    236 
    237     if (ticks >= kProfilerTicksBeforeOptimization) {
    238       int typeinfo, generic, total, type_percentage, generic_percentage;
    239       GetICCounts(shared_code, &typeinfo, &generic, &total, &type_percentage,
    240                   &generic_percentage);
    241       if (type_percentage >= FLAG_type_info_threshold &&
    242           generic_percentage <= FLAG_generic_ic_threshold) {
    243         // If this particular function hasn't had any ICs patched for enough
    244         // ticks, optimize it now.
    245         Optimize(function, "hot and stable");
    246       } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
    247         Optimize(function, "not much type info but very hot");
    248       } else {
    249         shared_code->set_profiler_ticks(ticks + 1);
    250         if (FLAG_trace_opt_verbose) {
    251           PrintF("[not yet optimizing ");
    252           function->PrintName();
    253           PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
    254                  type_percentage);
    255         }
    256       }
    257     } else if (!any_ic_changed_ &&
    258                shared_code->instruction_size() < kMaxSizeEarlyOpt) {
    259       // If no IC was patched since the last tick and this function is very
    260       // small, optimistically optimize it now.
    261       int typeinfo, generic, total, type_percentage, generic_percentage;
    262       GetICCounts(shared_code, &typeinfo, &generic, &total, &type_percentage,
    263                   &generic_percentage);
    264       if (type_percentage >= FLAG_type_info_threshold &&
    265           generic_percentage <= FLAG_generic_ic_threshold) {
    266         Optimize(function, "small function");
    267       } else {
    268         shared_code->set_profiler_ticks(ticks + 1);
    269       }
    270     } else {
    271       shared_code->set_profiler_ticks(ticks + 1);
    272     }
    273   }
    274   any_ic_changed_ = false;
    275 }
    276 
    277 
    278 } }  // namespace v8::internal
    279