Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #include "src/compiler.h"
      8 
      9 #include "src/bootstrapper.h"
     10 #include "src/codegen.h"
     11 #include "src/compilation-cache.h"
     12 #include "src/cpu-profiler.h"
     13 #include "src/debug.h"
     14 #include "src/deoptimizer.h"
     15 #include "src/full-codegen.h"
     16 #include "src/gdb-jit.h"
     17 #include "src/typing.h"
     18 #include "src/hydrogen.h"
     19 #include "src/isolate-inl.h"
     20 #include "src/lithium.h"
     21 #include "src/liveedit.h"
     22 #include "src/parser.h"
     23 #include "src/rewriter.h"
     24 #include "src/runtime-profiler.h"
     25 #include "src/scanner-character-streams.h"
     26 #include "src/scopeinfo.h"
     27 #include "src/scopes.h"
     28 #include "src/vm-state-inl.h"
     29 
     30 namespace v8 {
     31 namespace internal {
     32 
     33 
     34 CompilationInfo::CompilationInfo(Handle<Script> script,
     35                                  Zone* zone)
     36     : flags_(StrictModeField::encode(SLOPPY)),
     37       script_(script),
     38       osr_ast_id_(BailoutId::None()),
     39       parameter_count_(0),
     40       this_has_uses_(true),
     41       optimization_id_(-1) {
     42   Initialize(script->GetIsolate(), BASE, zone);
     43 }
     44 
     45 
     46 CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
     47                                  Zone* zone)
     48     : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
     49       shared_info_(shared_info),
     50       script_(Handle<Script>(Script::cast(shared_info->script()))),
     51       osr_ast_id_(BailoutId::None()),
     52       parameter_count_(0),
     53       this_has_uses_(true),
     54       optimization_id_(-1) {
     55   Initialize(script_->GetIsolate(), BASE, zone);
     56 }
     57 
     58 
     59 CompilationInfo::CompilationInfo(Handle<JSFunction> closure,
     60                                  Zone* zone)
     61     : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
     62       closure_(closure),
     63       shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
     64       script_(Handle<Script>(Script::cast(shared_info_->script()))),
     65       context_(closure->context()),
     66       osr_ast_id_(BailoutId::None()),
     67       parameter_count_(0),
     68       this_has_uses_(true),
     69       optimization_id_(-1) {
     70   Initialize(script_->GetIsolate(), BASE, zone);
     71 }
     72 
     73 
     74 CompilationInfo::CompilationInfo(HydrogenCodeStub* stub,
     75                                  Isolate* isolate,
     76                                  Zone* zone)
     77     : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
     78       osr_ast_id_(BailoutId::None()),
     79       parameter_count_(0),
     80       this_has_uses_(true),
     81       optimization_id_(-1) {
     82   Initialize(isolate, STUB, zone);
     83   code_stub_ = stub;
     84 }
     85 
     86 
     87 void CompilationInfo::Initialize(Isolate* isolate,
     88                                  Mode mode,
     89                                  Zone* zone) {
     90   isolate_ = isolate;
     91   function_ = NULL;
     92   scope_ = NULL;
     93   global_scope_ = NULL;
     94   extension_ = NULL;
     95   cached_data_ = NULL;
     96   cached_data_mode_ = NO_CACHED_DATA;
     97   zone_ = zone;
     98   deferred_handles_ = NULL;
     99   code_stub_ = NULL;
    100   prologue_offset_ = Code::kPrologueOffsetNotSet;
    101   opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
    102   no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
    103                    ? new List<OffsetRange>(2) : NULL;
    104   for (int i = 0; i < DependentCode::kGroupCount; i++) {
    105     dependencies_[i] = NULL;
    106   }
    107   if (mode == STUB) {
    108     mode_ = STUB;
    109     return;
    110   }
    111   mode_ = mode;
    112   abort_due_to_dependency_ = false;
    113   if (script_->type()->value() == Script::TYPE_NATIVE) MarkAsNative();
    114   if (isolate_->debug()->is_active()) MarkAsDebug();
    115 
    116   if (!shared_info_.is_null()) {
    117     ASSERT(strict_mode() == SLOPPY);
    118     SetStrictMode(shared_info_->strict_mode());
    119   }
    120   set_bailout_reason(kUnknown);
    121 
    122   if (!shared_info().is_null() && shared_info()->is_compiled()) {
    123     // We should initialize the CompilationInfo feedback vector from the
    124     // passed in shared info, rather than creating a new one.
    125     feedback_vector_ = Handle<FixedArray>(shared_info()->feedback_vector(),
    126                                           isolate);
    127   }
    128 }
    129 
    130 
    131 CompilationInfo::~CompilationInfo() {
    132   delete deferred_handles_;
    133   delete no_frame_ranges_;
    134 #ifdef DEBUG
    135   // Check that no dependent maps have been added or added dependent maps have
    136   // been rolled back or committed.
    137   for (int i = 0; i < DependentCode::kGroupCount; i++) {
    138     ASSERT_EQ(NULL, dependencies_[i]);
    139   }
    140 #endif  // DEBUG
    141 }
    142 
    143 
    144 void CompilationInfo::CommitDependencies(Handle<Code> code) {
    145   for (int i = 0; i < DependentCode::kGroupCount; i++) {
    146     ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
    147     if (group_objects == NULL) continue;
    148     ASSERT(!object_wrapper_.is_null());
    149     for (int j = 0; j < group_objects->length(); j++) {
    150       DependentCode::DependencyGroup group =
    151           static_cast<DependentCode::DependencyGroup>(i);
    152       DependentCode* dependent_code =
    153           DependentCode::ForObject(group_objects->at(j), group);
    154       dependent_code->UpdateToFinishedCode(group, this, *code);
    155     }
    156     dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
    157   }
    158 }
    159 
    160 
    161 void CompilationInfo::RollbackDependencies() {
    162   // Unregister from all dependent maps if not yet committed.
    163   for (int i = 0; i < DependentCode::kGroupCount; i++) {
    164     ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
    165     if (group_objects == NULL) continue;
    166     for (int j = 0; j < group_objects->length(); j++) {
    167       DependentCode::DependencyGroup group =
    168           static_cast<DependentCode::DependencyGroup>(i);
    169       DependentCode* dependent_code =
    170           DependentCode::ForObject(group_objects->at(j), group);
    171       dependent_code->RemoveCompilationInfo(group, this);
    172     }
    173     dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
    174   }
    175 }
    176 
    177 
    178 int CompilationInfo::num_parameters() const {
    179   if (IsStub()) {
    180     ASSERT(parameter_count_ > 0);
    181     return parameter_count_;
    182   } else {
    183     return scope()->num_parameters();
    184   }
    185 }
    186 
    187 
    188 int CompilationInfo::num_heap_slots() const {
    189   if (IsStub()) {
    190     return 0;
    191   } else {
    192     return scope()->num_heap_slots();
    193   }
    194 }
    195 
    196 
    197 Code::Flags CompilationInfo::flags() const {
    198   if (IsStub()) {
    199     return Code::ComputeFlags(code_stub()->GetCodeKind(),
    200                               code_stub()->GetICState(),
    201                               code_stub()->GetExtraICState(),
    202                               code_stub()->GetStubType());
    203   } else {
    204     return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
    205   }
    206 }
    207 
    208 
    209 // Disable optimization for the rest of the compilation pipeline.
    210 void CompilationInfo::DisableOptimization() {
    211   bool is_optimizable_closure =
    212     FLAG_optimize_closures &&
    213     closure_.is_null() &&
    214     !scope_->HasTrivialOuterContext() &&
    215     !scope_->outer_scope_calls_sloppy_eval() &&
    216     !scope_->inside_with();
    217   SetMode(is_optimizable_closure ? BASE : NONOPT);
    218 }
    219 
    220 
    221 // Primitive functions are unlikely to be picked up by the stack-walking
    222 // profiler, so they trigger their own optimization when they're called
    223 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
    224 bool CompilationInfo::ShouldSelfOptimize() {
    225   return FLAG_crankshaft &&
    226       !function()->flags()->Contains(kDontSelfOptimize) &&
    227       !function()->dont_optimize() &&
    228       function()->scope()->AllowsLazyCompilation() &&
    229       (shared_info().is_null() || !shared_info()->optimization_disabled());
    230 }
    231 
    232 
    233 void CompilationInfo::PrepareForCompilation(Scope* scope) {
    234   ASSERT(scope_ == NULL);
    235   scope_ = scope;
    236 
    237   int length = function()->slot_count();
    238   if (feedback_vector_.is_null()) {
    239     // Allocate the feedback vector too.
    240     feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length);
    241   }
    242   ASSERT(feedback_vector_->length() == length);
    243 }
    244 
    245 
    246 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
    247  public:
    248   explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
    249       : HOptimizedGraphBuilder(info) {
    250   }
    251 
    252 #define DEF_VISIT(type)                                 \
    253   virtual void Visit##type(type* node) V8_OVERRIDE {    \
    254     if (node->position() != RelocInfo::kNoPosition) {   \
    255       SetSourcePosition(node->position());              \
    256     }                                                   \
    257     HOptimizedGraphBuilder::Visit##type(node);          \
    258   }
    259   EXPRESSION_NODE_LIST(DEF_VISIT)
    260 #undef DEF_VISIT
    261 
    262 #define DEF_VISIT(type)                                          \
    263   virtual void Visit##type(type* node) V8_OVERRIDE {             \
    264     if (node->position() != RelocInfo::kNoPosition) {            \
    265       SetSourcePosition(node->position());                       \
    266     }                                                            \
    267     HOptimizedGraphBuilder::Visit##type(node);                   \
    268   }
    269   STATEMENT_NODE_LIST(DEF_VISIT)
    270 #undef DEF_VISIT
    271 
    272 #define DEF_VISIT(type)                                            \
    273   virtual void Visit##type(type* node) V8_OVERRIDE {               \
    274     HOptimizedGraphBuilder::Visit##type(node);                     \
    275   }
    276   MODULE_NODE_LIST(DEF_VISIT)
    277   DECLARATION_NODE_LIST(DEF_VISIT)
    278 #undef DEF_VISIT
    279 };
    280 
    281 
    282 OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
    283   ASSERT(isolate()->use_crankshaft());
    284   ASSERT(info()->IsOptimizing());
    285   ASSERT(!info()->IsCompilingForDebugging());
    286 
    287   // We should never arrive here if there is no code object on the
    288   // shared function object.
    289   ASSERT(info()->shared_info()->code()->kind() == Code::FUNCTION);
    290 
    291   // We should never arrive here if optimization has been disabled on the
    292   // shared function info.
    293   ASSERT(!info()->shared_info()->optimization_disabled());
    294 
    295   // Fall back to using the full code generator if it's not possible
    296   // to use the Hydrogen-based optimizing compiler. We already have
    297   // generated code for this from the shared function object.
    298   if (FLAG_always_full_compiler) return AbortOptimization();
    299 
    300   // Do not use crankshaft if we need to be able to set break points.
    301   if (isolate()->DebuggerHasBreakPoints()) {
    302     return AbortOptimization(kDebuggerHasBreakPoints);
    303   }
    304 
    305   // Limit the number of times we re-compile a functions with
    306   // the optimizing compiler.
    307   const int kMaxOptCount =
    308       FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
    309   if (info()->opt_count() > kMaxOptCount) {
    310     return AbortAndDisableOptimization(kOptimizedTooManyTimes);
    311   }
    312 
    313   // Due to an encoding limit on LUnallocated operands in the Lithium
    314   // language, we cannot optimize functions with too many formal parameters
    315   // or perform on-stack replacement for function with too many
    316   // stack-allocated local variables.
    317   //
    318   // The encoding is as a signed value, with parameters and receiver using
    319   // the negative indices and locals the non-negative ones.
    320   const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
    321   Scope* scope = info()->scope();
    322   if ((scope->num_parameters() + 1) > parameter_limit) {
    323     return AbortAndDisableOptimization(kTooManyParameters);
    324   }
    325 
    326   const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
    327   if (info()->is_osr() &&
    328       scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
    329     return AbortAndDisableOptimization(kTooManyParametersLocals);
    330   }
    331 
    332   if (scope->HasIllegalRedeclaration()) {
    333     return AbortAndDisableOptimization(kFunctionWithIllegalRedeclaration);
    334   }
    335 
    336   // Take --hydrogen-filter into account.
    337   if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
    338     return AbortOptimization(kHydrogenFilter);
    339   }
    340 
    341   // Recompile the unoptimized version of the code if the current version
    342   // doesn't have deoptimization support. Alternatively, we may decide to
    343   // run the full code generator to get a baseline for the compile-time
    344   // performance of the hydrogen-based compiler.
    345   bool should_recompile = !info()->shared_info()->has_deoptimization_support();
    346   if (should_recompile || FLAG_hydrogen_stats) {
    347     ElapsedTimer timer;
    348     if (FLAG_hydrogen_stats) {
    349       timer.Start();
    350     }
    351     CompilationInfoWithZone unoptimized(info()->shared_info());
    352     // Note that we use the same AST that we will use for generating the
    353     // optimized code.
    354     unoptimized.SetFunction(info()->function());
    355     unoptimized.PrepareForCompilation(info()->scope());
    356     unoptimized.SetContext(info()->context());
    357     if (should_recompile) unoptimized.EnableDeoptimizationSupport();
    358     bool succeeded = FullCodeGenerator::MakeCode(&unoptimized);
    359     if (should_recompile) {
    360       if (!succeeded) return SetLastStatus(FAILED);
    361       Handle<SharedFunctionInfo> shared = info()->shared_info();
    362       shared->EnableDeoptimizationSupport(*unoptimized.code());
    363       // The existing unoptimized code was replaced with the new one.
    364       Compiler::RecordFunctionCompilation(
    365           Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
    366     }
    367     if (FLAG_hydrogen_stats) {
    368       isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
    369     }
    370   }
    371 
    372   // Check that the unoptimized, shared code is ready for
    373   // optimizations.  When using the always_opt flag we disregard the
    374   // optimizable marker in the code object and optimize anyway. This
    375   // is safe as long as the unoptimized code has deoptimization
    376   // support.
    377   ASSERT(FLAG_always_opt || info()->shared_info()->code()->optimizable());
    378   ASSERT(info()->shared_info()->has_deoptimization_support());
    379 
    380   if (FLAG_trace_hydrogen) {
    381     Handle<String> name = info()->function()->debug_name();
    382     PrintF("-----------------------------------------------------------\n");
    383     PrintF("Compiling method %s using hydrogen\n", name->ToCString().get());
    384     isolate()->GetHTracer()->TraceCompilation(info());
    385   }
    386 
    387   // Type-check the function.
    388   AstTyper::Run(info());
    389 
    390   graph_builder_ = FLAG_hydrogen_track_positions
    391       ? new(info()->zone()) HOptimizedGraphBuilderWithPositions(info())
    392       : new(info()->zone()) HOptimizedGraphBuilder(info());
    393 
    394   Timer t(this, &time_taken_to_create_graph_);
    395   info()->set_this_has_uses(false);
    396   graph_ = graph_builder_->CreateGraph();
    397 
    398   if (isolate()->has_pending_exception()) {
    399     return SetLastStatus(FAILED);
    400   }
    401 
    402   // The function being compiled may have bailed out due to an inline
    403   // candidate bailing out.  In such a case, we don't disable
    404   // optimization on the shared_info.
    405   ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL);
    406   if (graph_ == NULL) {
    407     if (graph_builder_->inline_bailout()) {
    408       return AbortOptimization();
    409     } else {
    410       return AbortAndDisableOptimization();
    411     }
    412   }
    413 
    414   if (info()->HasAbortedDueToDependencyChange()) {
    415     return AbortOptimization(kBailedOutDueToDependencyChange);
    416   }
    417 
    418   return SetLastStatus(SUCCEEDED);
    419 }
    420 
    421 
    422 OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() {
    423   DisallowHeapAllocation no_allocation;
    424   DisallowHandleAllocation no_handles;
    425   DisallowHandleDereference no_deref;
    426   DisallowCodeDependencyChange no_dependency_change;
    427 
    428   ASSERT(last_status() == SUCCEEDED);
    429   Timer t(this, &time_taken_to_optimize_);
    430   ASSERT(graph_ != NULL);
    431   BailoutReason bailout_reason = kNoReason;
    432 
    433   if (graph_->Optimize(&bailout_reason)) {
    434     chunk_ = LChunk::NewChunk(graph_);
    435     if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
    436   } else if (bailout_reason != kNoReason) {
    437     graph_builder_->Bailout(bailout_reason);
    438   }
    439 
    440   return AbortOptimization();
    441 }
    442 
    443 
    444 OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() {
    445   ASSERT(last_status() == SUCCEEDED);
    446   ASSERT(!info()->HasAbortedDueToDependencyChange());
    447   DisallowCodeDependencyChange no_dependency_change;
    448   {  // Scope for timer.
    449     Timer timer(this, &time_taken_to_codegen_);
    450     ASSERT(chunk_ != NULL);
    451     ASSERT(graph_ != NULL);
    452     // Deferred handles reference objects that were accessible during
    453     // graph creation.  To make sure that we don't encounter inconsistencies
    454     // between graph creation and code generation, we disallow accessing
    455     // objects through deferred handles during the latter, with exceptions.
    456     DisallowDeferredHandleDereference no_deferred_handle_deref;
    457     Handle<Code> optimized_code = chunk_->Codegen();
    458     if (optimized_code.is_null()) {
    459       if (info()->bailout_reason() == kNoReason) {
    460         info_->set_bailout_reason(kCodeGenerationFailed);
    461       } else if (info()->bailout_reason() == kMapBecameDeprecated) {
    462         if (FLAG_trace_opt) {
    463           PrintF("[aborted optimizing ");
    464           info()->closure()->ShortPrint();
    465           PrintF(" because a map became deprecated]\n");
    466         }
    467         return AbortOptimization();
    468       } else if (info()->bailout_reason() == kMapBecameUnstable) {
    469         if (FLAG_trace_opt) {
    470           PrintF("[aborted optimizing ");
    471           info()->closure()->ShortPrint();
    472           PrintF(" because a map became unstable]\n");
    473         }
    474         return AbortOptimization();
    475       }
    476       return AbortAndDisableOptimization();
    477     }
    478     info()->SetCode(optimized_code);
    479   }
    480   RecordOptimizationStats();
    481   // Add to the weak list of optimized code objects.
    482   info()->context()->native_context()->AddOptimizedCode(*info()->code());
    483   return SetLastStatus(SUCCEEDED);
    484 }
    485 
    486 
    487 void OptimizedCompileJob::RecordOptimizationStats() {
    488   Handle<JSFunction> function = info()->closure();
    489   if (!function->IsOptimized()) {
    490     // Concurrent recompilation and OSR may race.  Increment only once.
    491     int opt_count = function->shared()->opt_count();
    492     function->shared()->set_opt_count(opt_count + 1);
    493   }
    494   double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
    495   double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
    496   double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
    497   if (FLAG_trace_opt) {
    498     PrintF("[optimizing ");
    499     function->ShortPrint();
    500     PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
    501            ms_codegen);
    502   }
    503   if (FLAG_trace_opt_stats) {
    504     static double compilation_time = 0.0;
    505     static int compiled_functions = 0;
    506     static int code_size = 0;
    507 
    508     compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
    509     compiled_functions++;
    510     code_size += function->shared()->SourceSize();
    511     PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
    512            compiled_functions,
    513            code_size,
    514            compilation_time);
    515   }
    516   if (FLAG_hydrogen_stats) {
    517     isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
    518                                                     time_taken_to_optimize_,
    519                                                     time_taken_to_codegen_);
    520   }
    521 }
    522 
    523 
    524 // Sets the expected number of properties based on estimate from compiler.
    525 void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
    526                                           int estimate) {
    527   // If no properties are added in the constructor, they are more likely
    528   // to be added later.
    529   if (estimate == 0) estimate = 2;
    530 
    531   // TODO(yangguo): check whether those heuristics are still up-to-date.
    532   // We do not shrink objects that go into a snapshot (yet), so we adjust
    533   // the estimate conservatively.
    534   if (shared->GetIsolate()->serializer_enabled()) {
    535     estimate += 2;
    536   } else if (FLAG_clever_optimizations) {
    537     // Inobject slack tracking will reclaim redundant inobject space later,
    538     // so we can afford to adjust the estimate generously.
    539     estimate += 8;
    540   } else {
    541     estimate += 3;
    542   }
    543 
    544   shared->set_expected_nof_properties(estimate);
    545 }
    546 
    547 
    548 static void UpdateSharedFunctionInfo(CompilationInfo* info) {
    549   // Update the shared function info with the compiled code and the
    550   // scope info.  Please note, that the order of the shared function
    551   // info initialization is important since set_scope_info might
    552   // trigger a GC, causing the ASSERT below to be invalid if the code
    553   // was flushed. By setting the code object last we avoid this.
    554   Handle<SharedFunctionInfo> shared = info->shared_info();
    555   Handle<ScopeInfo> scope_info =
    556       ScopeInfo::Create(info->scope(), info->zone());
    557   shared->set_scope_info(*scope_info);
    558 
    559   Handle<Code> code = info->code();
    560   CHECK(code->kind() == Code::FUNCTION);
    561   shared->ReplaceCode(*code);
    562   if (shared->optimization_disabled()) code->set_optimizable(false);
    563 
    564   shared->set_feedback_vector(*info->feedback_vector());
    565 
    566   // Set the expected number of properties for instances.
    567   FunctionLiteral* lit = info->function();
    568   int expected = lit->expected_property_count();
    569   SetExpectedNofPropertiesFromEstimate(shared, expected);
    570 
    571   // Check the function has compiled code.
    572   ASSERT(shared->is_compiled());
    573   shared->set_dont_optimize_reason(lit->dont_optimize_reason());
    574   shared->set_dont_inline(lit->flags()->Contains(kDontInline));
    575   shared->set_ast_node_count(lit->ast_node_count());
    576   shared->set_strict_mode(lit->strict_mode());
    577 }
    578 
    579 
    580 // Sets the function info on a function.
    581 // The start_position points to the first '(' character after the function name
    582 // in the full script source. When counting characters in the script source the
    583 // the first character is number 0 (not 1).
    584 static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
    585                             FunctionLiteral* lit,
    586                             bool is_toplevel,
    587                             Handle<Script> script) {
    588   function_info->set_length(lit->parameter_count());
    589   function_info->set_formal_parameter_count(lit->parameter_count());
    590   function_info->set_script(*script);
    591   function_info->set_function_token_position(lit->function_token_position());
    592   function_info->set_start_position(lit->start_position());
    593   function_info->set_end_position(lit->end_position());
    594   function_info->set_is_expression(lit->is_expression());
    595   function_info->set_is_anonymous(lit->is_anonymous());
    596   function_info->set_is_toplevel(is_toplevel);
    597   function_info->set_inferred_name(*lit->inferred_name());
    598   function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
    599   function_info->set_allows_lazy_compilation_without_context(
    600       lit->AllowsLazyCompilationWithoutContext());
    601   function_info->set_strict_mode(lit->strict_mode());
    602   function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
    603   function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
    604   function_info->set_ast_node_count(lit->ast_node_count());
    605   function_info->set_is_function(lit->is_function());
    606   function_info->set_dont_optimize_reason(lit->dont_optimize_reason());
    607   function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
    608   function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
    609   function_info->set_is_generator(lit->is_generator());
    610 }
    611 
    612 
    613 static bool CompileUnoptimizedCode(CompilationInfo* info) {
    614   ASSERT(info->function() != NULL);
    615   if (!Rewriter::Rewrite(info)) return false;
    616   if (!Scope::Analyze(info)) return false;
    617   ASSERT(info->scope() != NULL);
    618 
    619   if (!FullCodeGenerator::MakeCode(info)) {
    620     Isolate* isolate = info->isolate();
    621     if (!isolate->has_pending_exception()) isolate->StackOverflow();
    622     return false;
    623   }
    624   return true;
    625 }
    626 
    627 
    628 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
    629     CompilationInfo* info) {
    630   VMState<COMPILER> state(info->isolate());
    631   PostponeInterruptsScope postpone(info->isolate());
    632   if (!Parser::Parse(info)) return MaybeHandle<Code>();
    633   info->SetStrictMode(info->function()->strict_mode());
    634 
    635   if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
    636   Compiler::RecordFunctionCompilation(
    637       Logger::LAZY_COMPILE_TAG, info, info->shared_info());
    638   UpdateSharedFunctionInfo(info);
    639   ASSERT_EQ(Code::FUNCTION, info->code()->kind());
    640   return info->code();
    641 }
    642 
    643 
    644 MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) {
    645   ASSERT(!function->GetIsolate()->has_pending_exception());
    646   ASSERT(!function->is_compiled());
    647   if (function->shared()->is_compiled()) {
    648     return Handle<Code>(function->shared()->code());
    649   }
    650 
    651   CompilationInfoWithZone info(function);
    652   Handle<Code> result;
    653   ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
    654                              GetUnoptimizedCodeCommon(&info),
    655                              Code);
    656 
    657   if (FLAG_always_opt &&
    658       info.isolate()->use_crankshaft() &&
    659       !info.shared_info()->optimization_disabled() &&
    660       !info.isolate()->DebuggerHasBreakPoints()) {
    661     Handle<Code> opt_code;
    662     if (Compiler::GetOptimizedCode(
    663             function, result,
    664             Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
    665       result = opt_code;
    666     }
    667   }
    668 
    669   return result;
    670 }
    671 
    672 
    673 MaybeHandle<Code> Compiler::GetUnoptimizedCode(
    674     Handle<SharedFunctionInfo> shared) {
    675   ASSERT(!shared->GetIsolate()->has_pending_exception());
    676   ASSERT(!shared->is_compiled());
    677 
    678   CompilationInfoWithZone info(shared);
    679   return GetUnoptimizedCodeCommon(&info);
    680 }
    681 
    682 
    683 bool Compiler::EnsureCompiled(Handle<JSFunction> function,
    684                               ClearExceptionFlag flag) {
    685   if (function->is_compiled()) return true;
    686   MaybeHandle<Code> maybe_code = Compiler::GetUnoptimizedCode(function);
    687   Handle<Code> code;
    688   if (!maybe_code.ToHandle(&code)) {
    689     if (flag == CLEAR_EXCEPTION) {
    690       function->GetIsolate()->clear_pending_exception();
    691     }
    692     return false;
    693   }
    694   function->ReplaceCode(*code);
    695   ASSERT(function->is_compiled());
    696   return true;
    697 }
    698 
    699 
    700 // Compile full code for debugging. This code will have debug break slots
    701 // and deoptimization information. Deoptimization information is required
    702 // in case that an optimized version of this function is still activated on
    703 // the stack. It will also make sure that the full code is compiled with
    704 // the same flags as the previous version, that is flags which can change
    705 // the code generated. The current method of mapping from already compiled
    706 // full code without debug break slots to full code with debug break slots
    707 // depends on the generated code is otherwise exactly the same.
    708 // If compilation fails, just keep the existing code.
    709 MaybeHandle<Code> Compiler::GetCodeForDebugging(Handle<JSFunction> function) {
    710   CompilationInfoWithZone info(function);
    711   Isolate* isolate = info.isolate();
    712   VMState<COMPILER> state(isolate);
    713 
    714   info.MarkAsDebug();
    715 
    716   ASSERT(!isolate->has_pending_exception());
    717   Handle<Code> old_code(function->shared()->code());
    718   ASSERT(old_code->kind() == Code::FUNCTION);
    719   ASSERT(!old_code->has_debug_break_slots());
    720 
    721   info.MarkCompilingForDebugging();
    722   if (old_code->is_compiled_optimizable()) {
    723     info.EnableDeoptimizationSupport();
    724   } else {
    725     info.MarkNonOptimizable();
    726   }
    727   MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info);
    728   Handle<Code> new_code;
    729   if (!maybe_new_code.ToHandle(&new_code)) {
    730     isolate->clear_pending_exception();
    731   } else {
    732     ASSERT_EQ(old_code->is_compiled_optimizable(),
    733               new_code->is_compiled_optimizable());
    734   }
    735   return maybe_new_code;
    736 }
    737 
    738 
    739 void Compiler::CompileForLiveEdit(Handle<Script> script) {
    740   // TODO(635): support extensions.
    741   CompilationInfoWithZone info(script);
    742   PostponeInterruptsScope postpone(info.isolate());
    743   VMState<COMPILER> state(info.isolate());
    744 
    745   info.MarkAsGlobal();
    746   if (!Parser::Parse(&info)) return;
    747   info.SetStrictMode(info.function()->strict_mode());
    748 
    749   LiveEditFunctionTracker tracker(info.isolate(), info.function());
    750   if (!CompileUnoptimizedCode(&info)) return;
    751   if (!info.shared_info().is_null()) {
    752     Handle<ScopeInfo> scope_info = ScopeInfo::Create(info.scope(),
    753                                                      info.zone());
    754     info.shared_info()->set_scope_info(*scope_info);
    755   }
    756   tracker.RecordRootFunctionInfo(info.code());
    757 }
    758 
    759 
    760 static bool DebuggerWantsEagerCompilation(CompilationInfo* info,
    761                                           bool allow_lazy_without_ctx = false) {
    762   return LiveEditFunctionTracker::IsActive(info->isolate()) ||
    763          (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
    764 }
    765 
    766 
    767 static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
    768   Isolate* isolate = info->isolate();
    769   PostponeInterruptsScope postpone(isolate);
    770   ASSERT(!isolate->native_context().is_null());
    771   Handle<Script> script = info->script();
    772 
    773   // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
    774   FixedArray* array = isolate->native_context()->embedder_data();
    775   script->set_context_data(array->get(0));
    776 
    777   isolate->debug()->OnBeforeCompile(script);
    778 
    779   ASSERT(info->is_eval() || info->is_global());
    780 
    781   bool parse_allow_lazy =
    782       (info->cached_data_mode() == CONSUME_CACHED_DATA ||
    783        String::cast(script->source())->length() > FLAG_min_preparse_length) &&
    784       !DebuggerWantsEagerCompilation(info);
    785 
    786   if (!parse_allow_lazy && info->cached_data_mode() != NO_CACHED_DATA) {
    787     // We are going to parse eagerly, but we either 1) have cached data produced
    788     // by lazy parsing or 2) are asked to generate cached data. We cannot use
    789     // the existing data, since it won't contain all the symbols we need for
    790     // eager parsing. In addition, it doesn't make sense to produce the data
    791     // when parsing eagerly. That data would contain all symbols, but no
    792     // functions, so it cannot be used to aid lazy parsing later.
    793     info->SetCachedData(NULL, NO_CACHED_DATA);
    794   }
    795 
    796   Handle<SharedFunctionInfo> result;
    797 
    798   { VMState<COMPILER> state(info->isolate());
    799     if (!Parser::Parse(info, parse_allow_lazy)) {
    800       return Handle<SharedFunctionInfo>::null();
    801     }
    802 
    803     FunctionLiteral* lit = info->function();
    804     LiveEditFunctionTracker live_edit_tracker(isolate, lit);
    805 
    806     // Measure how long it takes to do the compilation; only take the
    807     // rest of the function into account to avoid overlap with the
    808     // parsing statistics.
    809     HistogramTimer* rate = info->is_eval()
    810           ? info->isolate()->counters()->compile_eval()
    811           : info->isolate()->counters()->compile();
    812     HistogramTimerScope timer(rate);
    813 
    814     // Compile the code.
    815     if (!CompileUnoptimizedCode(info)) {
    816       return Handle<SharedFunctionInfo>::null();
    817     }
    818 
    819     // Allocate function.
    820     ASSERT(!info->code().is_null());
    821     result = isolate->factory()->NewSharedFunctionInfo(
    822         lit->name(),
    823         lit->materialized_literal_count(),
    824         lit->is_generator(),
    825         info->code(),
    826         ScopeInfo::Create(info->scope(), info->zone()),
    827         info->feedback_vector());
    828 
    829     ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
    830     SetFunctionInfo(result, lit, true, script);
    831 
    832     Handle<String> script_name = script->name()->IsString()
    833         ? Handle<String>(String::cast(script->name()))
    834         : isolate->factory()->empty_string();
    835     Logger::LogEventsAndTags log_tag = info->is_eval()
    836         ? Logger::EVAL_TAG
    837         : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);
    838 
    839     PROFILE(isolate, CodeCreateEvent(
    840                 log_tag, *info->code(), *result, info, *script_name));
    841     GDBJIT(AddCode(script_name, script, info->code(), info));
    842 
    843     // Hint to the runtime system used when allocating space for initial
    844     // property space by setting the expected number of properties for
    845     // the instances of the function.
    846     SetExpectedNofPropertiesFromEstimate(result,
    847                                          lit->expected_property_count());
    848 
    849     script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
    850 
    851     live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
    852   }
    853 
    854   isolate->debug()->OnAfterCompile(script, Debug::NO_AFTER_COMPILE_FLAGS);
    855 
    856   return result;
    857 }
    858 
    859 
    860 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
    861     Handle<String> source,
    862     Handle<Context> context,
    863     StrictMode strict_mode,
    864     ParseRestriction restriction,
    865     int scope_position) {
    866   Isolate* isolate = source->GetIsolate();
    867   int source_length = source->length();
    868   isolate->counters()->total_eval_size()->Increment(source_length);
    869   isolate->counters()->total_compile_size()->Increment(source_length);
    870 
    871   CompilationCache* compilation_cache = isolate->compilation_cache();
    872   MaybeHandle<SharedFunctionInfo> maybe_shared_info =
    873       compilation_cache->LookupEval(source, context, strict_mode,
    874                                     scope_position);
    875   Handle<SharedFunctionInfo> shared_info;
    876 
    877   if (!maybe_shared_info.ToHandle(&shared_info)) {
    878     Handle<Script> script = isolate->factory()->NewScript(source);
    879     CompilationInfoWithZone info(script);
    880     info.MarkAsEval();
    881     if (context->IsNativeContext()) info.MarkAsGlobal();
    882     info.SetStrictMode(strict_mode);
    883     info.SetParseRestriction(restriction);
    884     info.SetContext(context);
    885 
    886     Debug::RecordEvalCaller(script);
    887 
    888     shared_info = CompileToplevel(&info);
    889 
    890     if (shared_info.is_null()) {
    891       return MaybeHandle<JSFunction>();
    892     } else {
    893       // Explicitly disable optimization for eval code. We're not yet prepared
    894       // to handle eval-code in the optimizing compiler.
    895       shared_info->DisableOptimization(kEval);
    896 
    897       // If caller is strict mode, the result must be in strict mode as well.
    898       ASSERT(strict_mode == SLOPPY || shared_info->strict_mode() == STRICT);
    899       if (!shared_info->dont_cache()) {
    900         compilation_cache->PutEval(
    901             source, context, shared_info, scope_position);
    902       }
    903     }
    904   } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
    905     shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
    906   }
    907 
    908   return isolate->factory()->NewFunctionFromSharedFunctionInfo(
    909       shared_info, context, NOT_TENURED);
    910 }
    911 
    912 
    913 Handle<SharedFunctionInfo> Compiler::CompileScript(
    914     Handle<String> source,
    915     Handle<Object> script_name,
    916     int line_offset,
    917     int column_offset,
    918     bool is_shared_cross_origin,
    919     Handle<Context> context,
    920     v8::Extension* extension,
    921     ScriptData** cached_data,
    922     CachedDataMode cached_data_mode,
    923     NativesFlag natives) {
    924   if (cached_data_mode == NO_CACHED_DATA) {
    925     cached_data = NULL;
    926   } else if (cached_data_mode == PRODUCE_CACHED_DATA) {
    927     ASSERT(cached_data && !*cached_data);
    928   } else {
    929     ASSERT(cached_data_mode == CONSUME_CACHED_DATA);
    930     ASSERT(cached_data && *cached_data);
    931   }
    932   Isolate* isolate = source->GetIsolate();
    933   int source_length = source->length();
    934   isolate->counters()->total_load_size()->Increment(source_length);
    935   isolate->counters()->total_compile_size()->Increment(source_length);
    936 
    937   CompilationCache* compilation_cache = isolate->compilation_cache();
    938 
    939   // Do a lookup in the compilation cache but not for extensions.
    940   MaybeHandle<SharedFunctionInfo> maybe_result;
    941   Handle<SharedFunctionInfo> result;
    942   if (extension == NULL) {
    943     maybe_result = compilation_cache->LookupScript(
    944         source, script_name, line_offset, column_offset,
    945         is_shared_cross_origin, context);
    946   }
    947 
    948   if (!maybe_result.ToHandle(&result)) {
    949     // No cache entry found. Compile the script.
    950 
    951     // Create a script object describing the script to be compiled.
    952     Handle<Script> script = isolate->factory()->NewScript(source);
    953     if (natives == NATIVES_CODE) {
    954       script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
    955     }
    956     if (!script_name.is_null()) {
    957       script->set_name(*script_name);
    958       script->set_line_offset(Smi::FromInt(line_offset));
    959       script->set_column_offset(Smi::FromInt(column_offset));
    960     }
    961     script->set_is_shared_cross_origin(is_shared_cross_origin);
    962 
    963     // Compile the function and add it to the cache.
    964     CompilationInfoWithZone info(script);
    965     info.MarkAsGlobal();
    966     info.SetExtension(extension);
    967     info.SetCachedData(cached_data, cached_data_mode);
    968     info.SetContext(context);
    969     if (FLAG_use_strict) info.SetStrictMode(STRICT);
    970     result = CompileToplevel(&info);
    971     if (extension == NULL && !result.is_null() && !result->dont_cache()) {
    972       compilation_cache->PutScript(source, context, result);
    973     }
    974     if (result.is_null()) isolate->ReportPendingMessages();
    975   } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
    976       result->ResetForNewContext(isolate->heap()->global_ic_age());
    977   }
    978   return result;
    979 }
    980 
    981 
    982 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
    983                                                        Handle<Script> script) {
    984   // Precondition: code has been parsed and scopes have been analyzed.
    985   CompilationInfoWithZone info(script);
    986   info.SetFunction(literal);
    987   info.PrepareForCompilation(literal->scope());
    988   info.SetStrictMode(literal->scope()->strict_mode());
    989 
    990   Isolate* isolate = info.isolate();
    991   Factory* factory = isolate->factory();
    992   LiveEditFunctionTracker live_edit_tracker(isolate, literal);
    993   // Determine if the function can be lazily compiled. This is necessary to
    994   // allow some of our builtin JS files to be lazily compiled. These
    995   // builtins cannot be handled lazily by the parser, since we have to know
    996   // if a function uses the special natives syntax, which is something the
    997   // parser records.
    998   // If the debugger requests compilation for break points, we cannot be
    999   // aggressive about lazy compilation, because it might trigger compilation
   1000   // of functions without an outer context when setting a breakpoint through
   1001   // Debug::FindSharedFunctionInfoInScript.
   1002   bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
   1003   bool allow_lazy = literal->AllowsLazyCompilation() &&
   1004       !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
   1005 
   1006   // Generate code
   1007   Handle<ScopeInfo> scope_info;
   1008   if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
   1009     Handle<Code> code = isolate->builtins()->CompileUnoptimized();
   1010     info.SetCode(code);
   1011     scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
   1012   } else if (FullCodeGenerator::MakeCode(&info)) {
   1013     ASSERT(!info.code().is_null());
   1014     scope_info = ScopeInfo::Create(info.scope(), info.zone());
   1015   } else {
   1016     return Handle<SharedFunctionInfo>::null();
   1017   }
   1018 
   1019   // Create a shared function info object.
   1020   Handle<SharedFunctionInfo> result =
   1021       factory->NewSharedFunctionInfo(literal->name(),
   1022                                      literal->materialized_literal_count(),
   1023                                      literal->is_generator(),
   1024                                      info.code(),
   1025                                      scope_info,
   1026                                      info.feedback_vector());
   1027   SetFunctionInfo(result, literal, false, script);
   1028   RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
   1029   result->set_allows_lazy_compilation(allow_lazy);
   1030   result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
   1031 
   1032   // Set the expected number of properties for instances and return
   1033   // the resulting function.
   1034   SetExpectedNofPropertiesFromEstimate(result,
   1035                                        literal->expected_property_count());
   1036   live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
   1037   return result;
   1038 }
   1039 
   1040 
   1041 MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
   1042     Handle<JSFunction> function,
   1043     BailoutId osr_ast_id) {
   1044   if (FLAG_cache_optimized_code) {
   1045     Handle<SharedFunctionInfo> shared(function->shared());
   1046     DisallowHeapAllocation no_gc;
   1047     int index = shared->SearchOptimizedCodeMap(
   1048         function->context()->native_context(), osr_ast_id);
   1049     if (index > 0) {
   1050       if (FLAG_trace_opt) {
   1051         PrintF("[found optimized code for ");
   1052         function->ShortPrint();
   1053         if (!osr_ast_id.IsNone()) {
   1054           PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
   1055         }
   1056         PrintF("]\n");
   1057       }
   1058       FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index);
   1059       if (literals != NULL) function->set_literals(literals);
   1060       return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index));
   1061     }
   1062   }
   1063   return MaybeHandle<Code>();
   1064 }
   1065 
   1066 
   1067 static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
   1068   Handle<Code> code = info->code();
   1069   if (code->kind() != Code::OPTIMIZED_FUNCTION) return;  // Nothing to do.
   1070 
   1071   // Cache optimized code.
   1072   if (FLAG_cache_optimized_code) {
   1073     Handle<JSFunction> function = info->closure();
   1074     Handle<SharedFunctionInfo> shared(function->shared());
   1075     Handle<FixedArray> literals(function->literals());
   1076     Handle<Context> native_context(function->context()->native_context());
   1077     SharedFunctionInfo::AddToOptimizedCodeMap(
   1078         shared, native_context, code, literals, info->osr_ast_id());
   1079   }
   1080 }
   1081 
   1082 
   1083 static bool CompileOptimizedPrologue(CompilationInfo* info) {
   1084   if (!Parser::Parse(info)) return false;
   1085   info->SetStrictMode(info->function()->strict_mode());
   1086 
   1087   if (!Rewriter::Rewrite(info)) return false;
   1088   if (!Scope::Analyze(info)) return false;
   1089   ASSERT(info->scope() != NULL);
   1090   return true;
   1091 }
   1092 
   1093 
   1094 static bool GetOptimizedCodeNow(CompilationInfo* info) {
   1095   if (!CompileOptimizedPrologue(info)) return false;
   1096 
   1097   Logger::TimerEventScope timer(
   1098       info->isolate(), Logger::TimerEventScope::v8_recompile_synchronous);
   1099 
   1100   OptimizedCompileJob job(info);
   1101   if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED) return false;
   1102   if (job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED) return false;
   1103   if (job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) return false;
   1104 
   1105   // Success!
   1106   ASSERT(!info->isolate()->has_pending_exception());
   1107   InsertCodeIntoOptimizedCodeMap(info);
   1108   Compiler::RecordFunctionCompilation(
   1109       Logger::LAZY_COMPILE_TAG, info, info->shared_info());
   1110   return true;
   1111 }
   1112 
   1113 
   1114 static bool GetOptimizedCodeLater(CompilationInfo* info) {
   1115   Isolate* isolate = info->isolate();
   1116   if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
   1117     if (FLAG_trace_concurrent_recompilation) {
   1118       PrintF("  ** Compilation queue full, will retry optimizing ");
   1119       info->closure()->PrintName();
   1120       PrintF(" later.\n");
   1121     }
   1122     return false;
   1123   }
   1124 
   1125   CompilationHandleScope handle_scope(info);
   1126   if (!CompileOptimizedPrologue(info)) return false;
   1127   info->SaveHandles();  // Copy handles to the compilation handle scope.
   1128 
   1129   Logger::TimerEventScope timer(
   1130       isolate, Logger::TimerEventScope::v8_recompile_synchronous);
   1131 
   1132   OptimizedCompileJob* job = new(info->zone()) OptimizedCompileJob(info);
   1133   OptimizedCompileJob::Status status = job->CreateGraph();
   1134   if (status != OptimizedCompileJob::SUCCEEDED) return false;
   1135   isolate->optimizing_compiler_thread()->QueueForOptimization(job);
   1136 
   1137   if (FLAG_trace_concurrent_recompilation) {
   1138     PrintF("  ** Queued ");
   1139      info->closure()->PrintName();
   1140     if (info->is_osr()) {
   1141       PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
   1142     } else {
   1143       PrintF(" for concurrent optimization.\n");
   1144     }
   1145   }
   1146   return true;
   1147 }
   1148 
   1149 
   1150 MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
   1151                                              Handle<Code> current_code,
   1152                                              ConcurrencyMode mode,
   1153                                              BailoutId osr_ast_id) {
   1154   Handle<Code> cached_code;
   1155   if (GetCodeFromOptimizedCodeMap(
   1156           function, osr_ast_id).ToHandle(&cached_code)) {
   1157     return cached_code;
   1158   }
   1159 
   1160   SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function));
   1161   Isolate* isolate = info->isolate();
   1162   VMState<COMPILER> state(isolate);
   1163   ASSERT(!isolate->has_pending_exception());
   1164   PostponeInterruptsScope postpone(isolate);
   1165 
   1166   Handle<SharedFunctionInfo> shared = info->shared_info();
   1167   ASSERT_NE(ScopeInfo::Empty(isolate), shared->scope_info());
   1168   int compiled_size = shared->end_position() - shared->start_position();
   1169   isolate->counters()->total_compile_size()->Increment(compiled_size);
   1170   current_code->set_profiler_ticks(0);
   1171 
   1172   info->SetOptimizing(osr_ast_id, current_code);
   1173 
   1174   if (mode == CONCURRENT) {
   1175     if (GetOptimizedCodeLater(info.get())) {
   1176       info.Detach();  // The background recompile job owns this now.
   1177       return isolate->builtins()->InOptimizationQueue();
   1178     }
   1179   } else {
   1180     if (GetOptimizedCodeNow(info.get())) return info->code();
   1181   }
   1182 
   1183   // Failed.
   1184   if (FLAG_trace_opt) {
   1185     PrintF("[failed to optimize ");
   1186     function->PrintName();
   1187     PrintF(": %s]\n", GetBailoutReason(info->bailout_reason()));
   1188   }
   1189 
   1190   if (isolate->has_pending_exception()) isolate->clear_pending_exception();
   1191   return MaybeHandle<Code>();
   1192 }
   1193 
   1194 
   1195 Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
   1196   // Take ownership of compilation info.  Deleting compilation info
   1197   // also tears down the zone and the recompile job.
   1198   SmartPointer<CompilationInfo> info(job->info());
   1199   Isolate* isolate = info->isolate();
   1200 
   1201   VMState<COMPILER> state(isolate);
   1202   Logger::TimerEventScope timer(
   1203       isolate, Logger::TimerEventScope::v8_recompile_synchronous);
   1204 
   1205   Handle<SharedFunctionInfo> shared = info->shared_info();
   1206   shared->code()->set_profiler_ticks(0);
   1207 
   1208   // 1) Optimization may have failed.
   1209   // 2) The function may have already been optimized by OSR.  Simply continue.
   1210   //    Except when OSR already disabled optimization for some reason.
   1211   // 3) The code may have already been invalidated due to dependency change.
   1212   // 4) Debugger may have been activated.
   1213 
   1214   if (job->last_status() != OptimizedCompileJob::SUCCEEDED ||
   1215       shared->optimization_disabled() ||
   1216       info->HasAbortedDueToDependencyChange() ||
   1217       isolate->DebuggerHasBreakPoints()) {
   1218     return Handle<Code>::null();
   1219   }
   1220 
   1221   if (job->GenerateCode() != OptimizedCompileJob::SUCCEEDED) {
   1222     return Handle<Code>::null();
   1223   }
   1224 
   1225   Compiler::RecordFunctionCompilation(
   1226       Logger::LAZY_COMPILE_TAG, info.get(), shared);
   1227   if (info->shared_info()->SearchOptimizedCodeMap(
   1228           info->context()->native_context(), info->osr_ast_id()) == -1) {
   1229     InsertCodeIntoOptimizedCodeMap(info.get());
   1230   }
   1231 
   1232   if (FLAG_trace_concurrent_recompilation) {
   1233     PrintF("  ** Optimized code for ");
   1234     info->closure()->PrintName();
   1235     PrintF(" generated.\n");
   1236   }
   1237 
   1238   return Handle<Code>(*info->code());
   1239 }
   1240 
   1241 
   1242 void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
   1243                                          CompilationInfo* info,
   1244                                          Handle<SharedFunctionInfo> shared) {
   1245   // SharedFunctionInfo is passed separately, because if CompilationInfo
   1246   // was created using Script object, it will not have it.
   1247 
   1248   // Log the code generation. If source information is available include
   1249   // script name and line number. Check explicitly whether logging is
   1250   // enabled as finding the line number is not free.
   1251   if (info->isolate()->logger()->is_logging_code_events() ||
   1252       info->isolate()->cpu_profiler()->is_profiling()) {
   1253     Handle<Script> script = info->script();
   1254     Handle<Code> code = info->code();
   1255     if (code.is_identical_to(
   1256             info->isolate()->builtins()->CompileUnoptimized())) {
   1257       return;
   1258     }
   1259     int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
   1260     int column_num =
   1261         Script::GetColumnNumber(script, shared->start_position()) + 1;
   1262     String* script_name = script->name()->IsString()
   1263         ? String::cast(script->name())
   1264         : info->isolate()->heap()->empty_string();
   1265     Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
   1266     PROFILE(info->isolate(), CodeCreateEvent(
   1267         log_tag, *code, *shared, info, script_name, line_num, column_num));
   1268   }
   1269 
   1270   GDBJIT(AddCode(Handle<String>(shared->DebugName()),
   1271                  Handle<Script>(info->script()),
   1272                  Handle<Code>(info->code()),
   1273                  info));
   1274 }
   1275 
   1276 
   1277 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
   1278     : name_(name), info_(info), zone_(info->isolate()) {
   1279   if (FLAG_hydrogen_stats) {
   1280     info_zone_start_allocation_size_ = info->zone()->allocation_size();
   1281     timer_.Start();
   1282   }
   1283 }
   1284 
   1285 
   1286 CompilationPhase::~CompilationPhase() {
   1287   if (FLAG_hydrogen_stats) {
   1288     unsigned size = zone()->allocation_size();
   1289     size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
   1290     isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
   1291   }
   1292 }
   1293 
   1294 
   1295 bool CompilationPhase::ShouldProduceTraceOutput() const {
   1296   // Trace if the appropriate trace flag is set and the phase name's first
   1297   // character is in the FLAG_trace_phase command line parameter.
   1298   AllowHandleDereference allow_deref;
   1299   bool tracing_on = info()->IsStub()
   1300       ? FLAG_trace_hydrogen_stubs
   1301       : (FLAG_trace_hydrogen &&
   1302          info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
   1303   return (tracing_on &&
   1304       OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
   1305 }
   1306 
   1307 } }  // namespace v8::internal
   1308