1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/v8.h" 6 7 #include "src/compiler.h" 8 9 #include "src/bootstrapper.h" 10 #include "src/codegen.h" 11 #include "src/compilation-cache.h" 12 #include "src/compiler/pipeline.h" 13 #include "src/cpu-profiler.h" 14 #include "src/debug.h" 15 #include "src/deoptimizer.h" 16 #include "src/full-codegen.h" 17 #include "src/gdb-jit.h" 18 #include "src/hydrogen.h" 19 #include "src/isolate-inl.h" 20 #include "src/lithium.h" 21 #include "src/liveedit.h" 22 #include "src/parser.h" 23 #include "src/rewriter.h" 24 #include "src/runtime-profiler.h" 25 #include "src/scanner-character-streams.h" 26 #include "src/scopeinfo.h" 27 #include "src/scopes.h" 28 #include "src/typing.h" 29 #include "src/vm-state-inl.h" 30 31 namespace v8 { 32 namespace internal { 33 34 35 ScriptData::ScriptData(const byte* data, int length) 36 : owns_data_(false), data_(data), length_(length) { 37 if (!IsAligned(reinterpret_cast<intptr_t>(data), kPointerAlignment)) { 38 byte* copy = NewArray<byte>(length); 39 DCHECK(IsAligned(reinterpret_cast<intptr_t>(copy), kPointerAlignment)); 40 CopyBytes(copy, data, length); 41 data_ = copy; 42 AcquireDataOwnership(); 43 } 44 } 45 46 47 CompilationInfo::CompilationInfo(Handle<Script> script, Zone* zone) 48 : flags_(kThisHasUses), 49 script_(script), 50 source_stream_(NULL), 51 osr_ast_id_(BailoutId::None()), 52 parameter_count_(0), 53 optimization_id_(-1), 54 ast_value_factory_(NULL), 55 ast_value_factory_owned_(false), 56 aborted_due_to_dependency_change_(false) { 57 Initialize(script->GetIsolate(), BASE, zone); 58 } 59 60 61 CompilationInfo::CompilationInfo(Isolate* isolate, Zone* zone) 62 : flags_(kThisHasUses), 63 script_(Handle<Script>::null()), 64 source_stream_(NULL), 65 osr_ast_id_(BailoutId::None()), 66 parameter_count_(0), 67 optimization_id_(-1), 68 ast_value_factory_(NULL), 69 ast_value_factory_owned_(false), 70 aborted_due_to_dependency_change_(false) { 71 Initialize(isolate, STUB, zone); 72 } 73 74 75 CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info, 76 Zone* zone) 77 : flags_(kLazy | kThisHasUses), 78 shared_info_(shared_info), 79 script_(Handle<Script>(Script::cast(shared_info->script()))), 80 source_stream_(NULL), 81 osr_ast_id_(BailoutId::None()), 82 parameter_count_(0), 83 optimization_id_(-1), 84 ast_value_factory_(NULL), 85 ast_value_factory_owned_(false), 86 aborted_due_to_dependency_change_(false) { 87 Initialize(script_->GetIsolate(), BASE, zone); 88 } 89 90 91 CompilationInfo::CompilationInfo(Handle<JSFunction> closure, Zone* zone) 92 : flags_(kLazy | kThisHasUses), 93 closure_(closure), 94 shared_info_(Handle<SharedFunctionInfo>(closure->shared())), 95 script_(Handle<Script>(Script::cast(shared_info_->script()))), 96 source_stream_(NULL), 97 context_(closure->context()), 98 osr_ast_id_(BailoutId::None()), 99 parameter_count_(0), 100 optimization_id_(-1), 101 ast_value_factory_(NULL), 102 ast_value_factory_owned_(false), 103 aborted_due_to_dependency_change_(false) { 104 Initialize(script_->GetIsolate(), BASE, zone); 105 } 106 107 108 CompilationInfo::CompilationInfo(HydrogenCodeStub* stub, Isolate* isolate, 109 Zone* zone) 110 : flags_(kLazy | kThisHasUses), 111 source_stream_(NULL), 112 osr_ast_id_(BailoutId::None()), 113 parameter_count_(0), 114 optimization_id_(-1), 115 ast_value_factory_(NULL), 116 ast_value_factory_owned_(false), 117 aborted_due_to_dependency_change_(false) { 118 Initialize(isolate, STUB, zone); 119 code_stub_ = stub; 120 } 121 122 123 CompilationInfo::CompilationInfo( 124 ScriptCompiler::ExternalSourceStream* stream, 125 ScriptCompiler::StreamedSource::Encoding encoding, Isolate* isolate, 126 Zone* zone) 127 : flags_(kThisHasUses), 128 source_stream_(stream), 129 source_stream_encoding_(encoding), 130 osr_ast_id_(BailoutId::None()), 131 parameter_count_(0), 132 optimization_id_(-1), 133 ast_value_factory_(NULL), 134 ast_value_factory_owned_(false), 135 aborted_due_to_dependency_change_(false) { 136 Initialize(isolate, BASE, zone); 137 } 138 139 140 void CompilationInfo::Initialize(Isolate* isolate, 141 Mode mode, 142 Zone* zone) { 143 isolate_ = isolate; 144 function_ = NULL; 145 scope_ = NULL; 146 global_scope_ = NULL; 147 extension_ = NULL; 148 cached_data_ = NULL; 149 compile_options_ = ScriptCompiler::kNoCompileOptions; 150 zone_ = zone; 151 deferred_handles_ = NULL; 152 code_stub_ = NULL; 153 prologue_offset_ = Code::kPrologueOffsetNotSet; 154 opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count(); 155 no_frame_ranges_ = isolate->cpu_profiler()->is_profiling() 156 ? new List<OffsetRange>(2) : NULL; 157 for (int i = 0; i < DependentCode::kGroupCount; i++) { 158 dependencies_[i] = NULL; 159 } 160 if (mode == STUB) { 161 mode_ = STUB; 162 return; 163 } 164 mode_ = mode; 165 if (!script_.is_null() && script_->type()->value() == Script::TYPE_NATIVE) { 166 MarkAsNative(); 167 } 168 if (isolate_->debug()->is_active()) MarkAsDebug(); 169 if (FLAG_context_specialization) MarkAsContextSpecializing(); 170 if (FLAG_turbo_inlining) MarkAsInliningEnabled(); 171 if (FLAG_turbo_types) MarkAsTypingEnabled(); 172 173 if (!shared_info_.is_null()) { 174 DCHECK(strict_mode() == SLOPPY); 175 SetStrictMode(shared_info_->strict_mode()); 176 } 177 bailout_reason_ = kUnknown; 178 179 if (!shared_info().is_null() && shared_info()->is_compiled()) { 180 // We should initialize the CompilationInfo feedback vector from the 181 // passed in shared info, rather than creating a new one. 182 feedback_vector_ = 183 Handle<TypeFeedbackVector>(shared_info()->feedback_vector(), isolate); 184 } 185 } 186 187 188 CompilationInfo::~CompilationInfo() { 189 if (GetFlag(kDisableFutureOptimization)) { 190 shared_info()->DisableOptimization(bailout_reason()); 191 } 192 delete deferred_handles_; 193 delete no_frame_ranges_; 194 if (ast_value_factory_owned_) delete ast_value_factory_; 195 #ifdef DEBUG 196 // Check that no dependent maps have been added or added dependent maps have 197 // been rolled back or committed. 198 for (int i = 0; i < DependentCode::kGroupCount; i++) { 199 DCHECK_EQ(NULL, dependencies_[i]); 200 } 201 #endif // DEBUG 202 } 203 204 205 void CompilationInfo::CommitDependencies(Handle<Code> code) { 206 for (int i = 0; i < DependentCode::kGroupCount; i++) { 207 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i]; 208 if (group_objects == NULL) continue; 209 DCHECK(!object_wrapper_.is_null()); 210 for (int j = 0; j < group_objects->length(); j++) { 211 DependentCode::DependencyGroup group = 212 static_cast<DependentCode::DependencyGroup>(i); 213 DependentCode* dependent_code = 214 DependentCode::ForObject(group_objects->at(j), group); 215 dependent_code->UpdateToFinishedCode(group, this, *code); 216 } 217 dependencies_[i] = NULL; // Zone-allocated, no need to delete. 218 } 219 } 220 221 222 void CompilationInfo::RollbackDependencies() { 223 // Unregister from all dependent maps if not yet committed. 224 for (int i = 0; i < DependentCode::kGroupCount; i++) { 225 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i]; 226 if (group_objects == NULL) continue; 227 for (int j = 0; j < group_objects->length(); j++) { 228 DependentCode::DependencyGroup group = 229 static_cast<DependentCode::DependencyGroup>(i); 230 DependentCode* dependent_code = 231 DependentCode::ForObject(group_objects->at(j), group); 232 dependent_code->RemoveCompilationInfo(group, this); 233 } 234 dependencies_[i] = NULL; // Zone-allocated, no need to delete. 235 } 236 } 237 238 239 int CompilationInfo::num_parameters() const { 240 if (IsStub()) { 241 DCHECK(parameter_count_ > 0); 242 return parameter_count_; 243 } else { 244 return scope()->num_parameters(); 245 } 246 } 247 248 249 int CompilationInfo::num_heap_slots() const { 250 if (IsStub()) { 251 return 0; 252 } else { 253 return scope()->num_heap_slots(); 254 } 255 } 256 257 258 Code::Flags CompilationInfo::flags() const { 259 if (IsStub()) { 260 return Code::ComputeFlags(code_stub()->GetCodeKind(), 261 code_stub()->GetICState(), 262 code_stub()->GetExtraICState(), 263 code_stub()->GetStubType()); 264 } else { 265 return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION); 266 } 267 } 268 269 270 // Primitive functions are unlikely to be picked up by the stack-walking 271 // profiler, so they trigger their own optimization when they're called 272 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time. 273 bool CompilationInfo::ShouldSelfOptimize() { 274 return FLAG_crankshaft && 275 !function()->flags()->Contains(kDontSelfOptimize) && 276 !function()->dont_optimize() && 277 function()->scope()->AllowsLazyCompilation() && 278 (shared_info().is_null() || !shared_info()->optimization_disabled()); 279 } 280 281 282 void CompilationInfo::PrepareForCompilation(Scope* scope) { 283 DCHECK(scope_ == NULL); 284 scope_ = scope; 285 286 int length = function()->slot_count(); 287 if (feedback_vector_.is_null()) { 288 // Allocate the feedback vector too. 289 feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length); 290 } 291 DCHECK(feedback_vector_->length() == length); 292 } 293 294 295 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder { 296 public: 297 explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info) 298 : HOptimizedGraphBuilder(info) { 299 } 300 301 #define DEF_VISIT(type) \ 302 virtual void Visit##type(type* node) OVERRIDE { \ 303 if (node->position() != RelocInfo::kNoPosition) { \ 304 SetSourcePosition(node->position()); \ 305 } \ 306 HOptimizedGraphBuilder::Visit##type(node); \ 307 } 308 EXPRESSION_NODE_LIST(DEF_VISIT) 309 #undef DEF_VISIT 310 311 #define DEF_VISIT(type) \ 312 virtual void Visit##type(type* node) OVERRIDE { \ 313 if (node->position() != RelocInfo::kNoPosition) { \ 314 SetSourcePosition(node->position()); \ 315 } \ 316 HOptimizedGraphBuilder::Visit##type(node); \ 317 } 318 STATEMENT_NODE_LIST(DEF_VISIT) 319 #undef DEF_VISIT 320 321 #define DEF_VISIT(type) \ 322 virtual void Visit##type(type* node) OVERRIDE { \ 323 HOptimizedGraphBuilder::Visit##type(node); \ 324 } 325 MODULE_NODE_LIST(DEF_VISIT) 326 DECLARATION_NODE_LIST(DEF_VISIT) 327 #undef DEF_VISIT 328 }; 329 330 331 OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() { 332 DCHECK(isolate()->use_crankshaft()); 333 DCHECK(info()->IsOptimizing()); 334 DCHECK(!info()->IsCompilingForDebugging()); 335 336 // We should never arrive here if optimization has been disabled on the 337 // shared function info. 338 DCHECK(!info()->shared_info()->optimization_disabled()); 339 340 // Do not use crankshaft if we need to be able to set break points. 341 if (isolate()->DebuggerHasBreakPoints()) { 342 return RetryOptimization(kDebuggerHasBreakPoints); 343 } 344 345 // Limit the number of times we re-compile a functions with 346 // the optimizing compiler. 347 const int kMaxOptCount = 348 FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000; 349 if (info()->opt_count() > kMaxOptCount) { 350 return AbortOptimization(kOptimizedTooManyTimes); 351 } 352 353 // Due to an encoding limit on LUnallocated operands in the Lithium 354 // language, we cannot optimize functions with too many formal parameters 355 // or perform on-stack replacement for function with too many 356 // stack-allocated local variables. 357 // 358 // The encoding is as a signed value, with parameters and receiver using 359 // the negative indices and locals the non-negative ones. 360 const int parameter_limit = -LUnallocated::kMinFixedSlotIndex; 361 Scope* scope = info()->scope(); 362 if ((scope->num_parameters() + 1) > parameter_limit) { 363 return AbortOptimization(kTooManyParameters); 364 } 365 366 const int locals_limit = LUnallocated::kMaxFixedSlotIndex; 367 if (info()->is_osr() && 368 scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) { 369 return AbortOptimization(kTooManyParametersLocals); 370 } 371 372 if (scope->HasIllegalRedeclaration()) { 373 return AbortOptimization(kFunctionWithIllegalRedeclaration); 374 } 375 376 // Check the whitelist for Crankshaft. 377 if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) { 378 return AbortOptimization(kHydrogenFilter); 379 } 380 381 // Crankshaft requires a version of fullcode with deoptimization support. 382 // Recompile the unoptimized version of the code if the current version 383 // doesn't have deoptimization support already. 384 // Otherwise, if we are gathering compilation time and space statistics 385 // for hydrogen, gather baseline statistics for a fullcode compilation. 386 bool should_recompile = !info()->shared_info()->has_deoptimization_support(); 387 if (should_recompile || FLAG_hydrogen_stats) { 388 base::ElapsedTimer timer; 389 if (FLAG_hydrogen_stats) { 390 timer.Start(); 391 } 392 if (!Compiler::EnsureDeoptimizationSupport(info())) { 393 return SetLastStatus(FAILED); 394 } 395 if (FLAG_hydrogen_stats) { 396 isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed()); 397 } 398 } 399 400 DCHECK(info()->shared_info()->has_deoptimization_support()); 401 402 // Check the whitelist for TurboFan. 403 if ((FLAG_turbo_asm && info()->shared_info()->asm_function()) || 404 info()->closure()->PassesFilter(FLAG_turbo_filter)) { 405 compiler::Pipeline pipeline(info()); 406 pipeline.GenerateCode(); 407 if (!info()->code().is_null()) { 408 if (FLAG_turbo_deoptimization) { 409 info()->context()->native_context()->AddOptimizedCode(*info()->code()); 410 } 411 return SetLastStatus(SUCCEEDED); 412 } 413 } 414 415 if (FLAG_trace_hydrogen) { 416 Handle<String> name = info()->function()->debug_name(); 417 PrintF("-----------------------------------------------------------\n"); 418 PrintF("Compiling method %s using hydrogen\n", name->ToCString().get()); 419 isolate()->GetHTracer()->TraceCompilation(info()); 420 } 421 422 // Type-check the function. 423 AstTyper::Run(info()); 424 425 graph_builder_ = (FLAG_hydrogen_track_positions || FLAG_trace_ic) 426 ? new(info()->zone()) HOptimizedGraphBuilderWithPositions(info()) 427 : new(info()->zone()) HOptimizedGraphBuilder(info()); 428 429 Timer t(this, &time_taken_to_create_graph_); 430 info()->set_this_has_uses(false); 431 graph_ = graph_builder_->CreateGraph(); 432 433 if (isolate()->has_pending_exception()) { 434 return SetLastStatus(FAILED); 435 } 436 437 if (graph_ == NULL) return SetLastStatus(BAILED_OUT); 438 439 if (info()->HasAbortedDueToDependencyChange()) { 440 // Dependency has changed during graph creation. Let's try again later. 441 return RetryOptimization(kBailedOutDueToDependencyChange); 442 } 443 444 return SetLastStatus(SUCCEEDED); 445 } 446 447 448 OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() { 449 DisallowHeapAllocation no_allocation; 450 DisallowHandleAllocation no_handles; 451 DisallowHandleDereference no_deref; 452 DisallowCodeDependencyChange no_dependency_change; 453 454 DCHECK(last_status() == SUCCEEDED); 455 // TODO(turbofan): Currently everything is done in the first phase. 456 if (!info()->code().is_null()) { 457 return last_status(); 458 } 459 460 Timer t(this, &time_taken_to_optimize_); 461 DCHECK(graph_ != NULL); 462 BailoutReason bailout_reason = kNoReason; 463 464 if (graph_->Optimize(&bailout_reason)) { 465 chunk_ = LChunk::NewChunk(graph_); 466 if (chunk_ != NULL) return SetLastStatus(SUCCEEDED); 467 } else if (bailout_reason != kNoReason) { 468 graph_builder_->Bailout(bailout_reason); 469 } 470 471 return SetLastStatus(BAILED_OUT); 472 } 473 474 475 OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() { 476 DCHECK(last_status() == SUCCEEDED); 477 // TODO(turbofan): Currently everything is done in the first phase. 478 if (!info()->code().is_null()) { 479 RecordOptimizationStats(); 480 return last_status(); 481 } 482 483 DCHECK(!info()->HasAbortedDueToDependencyChange()); 484 DisallowCodeDependencyChange no_dependency_change; 485 DisallowJavascriptExecution no_js(isolate()); 486 { // Scope for timer. 487 Timer timer(this, &time_taken_to_codegen_); 488 DCHECK(chunk_ != NULL); 489 DCHECK(graph_ != NULL); 490 // Deferred handles reference objects that were accessible during 491 // graph creation. To make sure that we don't encounter inconsistencies 492 // between graph creation and code generation, we disallow accessing 493 // objects through deferred handles during the latter, with exceptions. 494 DisallowDeferredHandleDereference no_deferred_handle_deref; 495 Handle<Code> optimized_code = chunk_->Codegen(); 496 if (optimized_code.is_null()) { 497 if (info()->bailout_reason() == kNoReason) { 498 return AbortOptimization(kCodeGenerationFailed); 499 } 500 return SetLastStatus(BAILED_OUT); 501 } 502 info()->SetCode(optimized_code); 503 } 504 RecordOptimizationStats(); 505 // Add to the weak list of optimized code objects. 506 info()->context()->native_context()->AddOptimizedCode(*info()->code()); 507 return SetLastStatus(SUCCEEDED); 508 } 509 510 511 void OptimizedCompileJob::RecordOptimizationStats() { 512 Handle<JSFunction> function = info()->closure(); 513 if (!function->IsOptimized()) { 514 // Concurrent recompilation and OSR may race. Increment only once. 515 int opt_count = function->shared()->opt_count(); 516 function->shared()->set_opt_count(opt_count + 1); 517 } 518 double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF(); 519 double ms_optimize = time_taken_to_optimize_.InMillisecondsF(); 520 double ms_codegen = time_taken_to_codegen_.InMillisecondsF(); 521 if (FLAG_trace_opt) { 522 PrintF("[optimizing "); 523 function->ShortPrint(); 524 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize, 525 ms_codegen); 526 } 527 if (FLAG_trace_opt_stats) { 528 static double compilation_time = 0.0; 529 static int compiled_functions = 0; 530 static int code_size = 0; 531 532 compilation_time += (ms_creategraph + ms_optimize + ms_codegen); 533 compiled_functions++; 534 code_size += function->shared()->SourceSize(); 535 PrintF("Compiled: %d functions with %d byte source size in %fms.\n", 536 compiled_functions, 537 code_size, 538 compilation_time); 539 } 540 if (FLAG_hydrogen_stats) { 541 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_, 542 time_taken_to_optimize_, 543 time_taken_to_codegen_); 544 } 545 } 546 547 548 // Sets the expected number of properties based on estimate from compiler. 549 void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared, 550 int estimate) { 551 // If no properties are added in the constructor, they are more likely 552 // to be added later. 553 if (estimate == 0) estimate = 2; 554 555 // TODO(yangguo): check whether those heuristics are still up-to-date. 556 // We do not shrink objects that go into a snapshot (yet), so we adjust 557 // the estimate conservatively. 558 if (shared->GetIsolate()->serializer_enabled()) { 559 estimate += 2; 560 } else if (FLAG_clever_optimizations) { 561 // Inobject slack tracking will reclaim redundant inobject space later, 562 // so we can afford to adjust the estimate generously. 563 estimate += 8; 564 } else { 565 estimate += 3; 566 } 567 568 shared->set_expected_nof_properties(estimate); 569 } 570 571 572 // Sets the function info on a function. 573 // The start_position points to the first '(' character after the function name 574 // in the full script source. When counting characters in the script source the 575 // the first character is number 0 (not 1). 576 static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info, 577 FunctionLiteral* lit, 578 bool is_toplevel, 579 Handle<Script> script) { 580 function_info->set_length(lit->parameter_count()); 581 function_info->set_formal_parameter_count(lit->parameter_count()); 582 function_info->set_script(*script); 583 function_info->set_function_token_position(lit->function_token_position()); 584 function_info->set_start_position(lit->start_position()); 585 function_info->set_end_position(lit->end_position()); 586 function_info->set_is_expression(lit->is_expression()); 587 function_info->set_is_anonymous(lit->is_anonymous()); 588 function_info->set_is_toplevel(is_toplevel); 589 function_info->set_inferred_name(*lit->inferred_name()); 590 function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation()); 591 function_info->set_allows_lazy_compilation_without_context( 592 lit->AllowsLazyCompilationWithoutContext()); 593 function_info->set_strict_mode(lit->strict_mode()); 594 function_info->set_uses_arguments(lit->scope()->arguments() != NULL); 595 function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters()); 596 function_info->set_ast_node_count(lit->ast_node_count()); 597 function_info->set_is_function(lit->is_function()); 598 function_info->set_bailout_reason(lit->dont_optimize_reason()); 599 function_info->set_dont_cache(lit->flags()->Contains(kDontCache)); 600 function_info->set_kind(lit->kind()); 601 function_info->set_asm_function(lit->scope()->asm_function()); 602 } 603 604 605 static void RecordFunctionCompilation(Logger::LogEventsAndTags tag, 606 CompilationInfo* info, 607 Handle<SharedFunctionInfo> shared) { 608 // SharedFunctionInfo is passed separately, because if CompilationInfo 609 // was created using Script object, it will not have it. 610 611 // Log the code generation. If source information is available include 612 // script name and line number. Check explicitly whether logging is 613 // enabled as finding the line number is not free. 614 if (info->isolate()->logger()->is_logging_code_events() || 615 info->isolate()->cpu_profiler()->is_profiling()) { 616 Handle<Script> script = info->script(); 617 Handle<Code> code = info->code(); 618 if (code.is_identical_to(info->isolate()->builtins()->CompileLazy())) { 619 return; 620 } 621 int line_num = Script::GetLineNumber(script, shared->start_position()) + 1; 622 int column_num = 623 Script::GetColumnNumber(script, shared->start_position()) + 1; 624 String* script_name = script->name()->IsString() 625 ? String::cast(script->name()) 626 : info->isolate()->heap()->empty_string(); 627 Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script); 628 PROFILE(info->isolate(), 629 CodeCreateEvent(log_tag, *code, *shared, info, script_name, 630 line_num, column_num)); 631 } 632 633 GDBJIT(AddCode(Handle<String>(shared->DebugName()), 634 Handle<Script>(info->script()), Handle<Code>(info->code()), 635 info)); 636 } 637 638 639 static bool CompileUnoptimizedCode(CompilationInfo* info) { 640 DCHECK(AllowCompilation::IsAllowed(info->isolate())); 641 DCHECK(info->function() != NULL); 642 if (!Rewriter::Rewrite(info)) return false; 643 if (!Scope::Analyze(info)) return false; 644 DCHECK(info->scope() != NULL); 645 646 if (!FullCodeGenerator::MakeCode(info)) { 647 Isolate* isolate = info->isolate(); 648 if (!isolate->has_pending_exception()) isolate->StackOverflow(); 649 return false; 650 } 651 return true; 652 } 653 654 655 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon( 656 CompilationInfo* info) { 657 VMState<COMPILER> state(info->isolate()); 658 PostponeInterruptsScope postpone(info->isolate()); 659 660 // Parse and update CompilationInfo with the results. 661 if (!Parser::Parse(info)) return MaybeHandle<Code>(); 662 Handle<SharedFunctionInfo> shared = info->shared_info(); 663 FunctionLiteral* lit = info->function(); 664 shared->set_strict_mode(lit->strict_mode()); 665 SetExpectedNofPropertiesFromEstimate(shared, lit->expected_property_count()); 666 shared->set_bailout_reason(lit->dont_optimize_reason()); 667 shared->set_ast_node_count(lit->ast_node_count()); 668 669 // Compile unoptimized code. 670 if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>(); 671 672 CHECK_EQ(Code::FUNCTION, info->code()->kind()); 673 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared); 674 675 // Update the shared function info with the scope info. Allocating the 676 // ScopeInfo object may cause a GC. 677 Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope(), info->zone()); 678 shared->set_scope_info(*scope_info); 679 680 // Update the code and feedback vector for the shared function info. 681 shared->ReplaceCode(*info->code()); 682 if (shared->optimization_disabled()) info->code()->set_optimizable(false); 683 shared->set_feedback_vector(*info->feedback_vector()); 684 685 return info->code(); 686 } 687 688 689 MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap( 690 Handle<JSFunction> function, BailoutId osr_ast_id) { 691 if (FLAG_cache_optimized_code) { 692 Handle<SharedFunctionInfo> shared(function->shared()); 693 // Bound functions are not cached. 694 if (shared->bound()) return MaybeHandle<Code>(); 695 DisallowHeapAllocation no_gc; 696 int index = shared->SearchOptimizedCodeMap( 697 function->context()->native_context(), osr_ast_id); 698 if (index > 0) { 699 if (FLAG_trace_opt) { 700 PrintF("[found optimized code for "); 701 function->ShortPrint(); 702 if (!osr_ast_id.IsNone()) { 703 PrintF(" at OSR AST id %d", osr_ast_id.ToInt()); 704 } 705 PrintF("]\n"); 706 } 707 FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index); 708 if (literals != NULL) function->set_literals(literals); 709 return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index)); 710 } 711 } 712 return MaybeHandle<Code>(); 713 } 714 715 716 static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) { 717 Handle<Code> code = info->code(); 718 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do. 719 720 // Context specialization folds-in the context, so no sharing can occur. 721 if (code->is_turbofanned() && info->is_context_specializing()) return; 722 723 // Cache optimized code. 724 if (FLAG_cache_optimized_code) { 725 Handle<JSFunction> function = info->closure(); 726 Handle<SharedFunctionInfo> shared(function->shared()); 727 // Do not cache bound functions. 728 if (shared->bound()) return; 729 Handle<FixedArray> literals(function->literals()); 730 Handle<Context> native_context(function->context()->native_context()); 731 SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code, 732 literals, info->osr_ast_id()); 733 } 734 } 735 736 737 static bool CompileOptimizedPrologue(CompilationInfo* info) { 738 if (!Parser::Parse(info)) return false; 739 if (!Rewriter::Rewrite(info)) return false; 740 if (!Scope::Analyze(info)) return false; 741 DCHECK(info->scope() != NULL); 742 return true; 743 } 744 745 746 static bool GetOptimizedCodeNow(CompilationInfo* info) { 747 if (!CompileOptimizedPrologue(info)) return false; 748 749 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate()); 750 751 OptimizedCompileJob job(info); 752 if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED || 753 job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED || 754 job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) { 755 if (FLAG_trace_opt) { 756 PrintF("[aborted optimizing "); 757 info->closure()->ShortPrint(); 758 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); 759 } 760 return false; 761 } 762 763 // Success! 764 DCHECK(!info->isolate()->has_pending_exception()); 765 InsertCodeIntoOptimizedCodeMap(info); 766 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, 767 info->shared_info()); 768 if (FLAG_trace_opt) { 769 PrintF("[completed optimizing "); 770 info->closure()->ShortPrint(); 771 PrintF("]\n"); 772 } 773 return true; 774 } 775 776 777 static bool GetOptimizedCodeLater(CompilationInfo* info) { 778 Isolate* isolate = info->isolate(); 779 if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) { 780 if (FLAG_trace_concurrent_recompilation) { 781 PrintF(" ** Compilation queue full, will retry optimizing "); 782 info->closure()->ShortPrint(); 783 PrintF(" later.\n"); 784 } 785 return false; 786 } 787 788 CompilationHandleScope handle_scope(info); 789 if (!CompileOptimizedPrologue(info)) return false; 790 info->SaveHandles(); // Copy handles to the compilation handle scope. 791 792 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate()); 793 794 OptimizedCompileJob* job = new (info->zone()) OptimizedCompileJob(info); 795 OptimizedCompileJob::Status status = job->CreateGraph(); 796 if (status != OptimizedCompileJob::SUCCEEDED) return false; 797 isolate->optimizing_compiler_thread()->QueueForOptimization(job); 798 799 if (FLAG_trace_concurrent_recompilation) { 800 PrintF(" ** Queued "); 801 info->closure()->ShortPrint(); 802 if (info->is_osr()) { 803 PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt()); 804 } else { 805 PrintF(" for concurrent optimization.\n"); 806 } 807 } 808 return true; 809 } 810 811 812 MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) { 813 DCHECK(!function->GetIsolate()->has_pending_exception()); 814 DCHECK(!function->is_compiled()); 815 if (function->shared()->is_compiled()) { 816 return Handle<Code>(function->shared()->code()); 817 } 818 819 CompilationInfoWithZone info(function); 820 Handle<Code> result; 821 ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result, 822 GetUnoptimizedCodeCommon(&info), 823 Code); 824 return result; 825 } 826 827 828 MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) { 829 DCHECK(!function->GetIsolate()->has_pending_exception()); 830 DCHECK(!function->is_compiled()); 831 832 if (FLAG_turbo_asm && function->shared()->asm_function()) { 833 CompilationInfoWithZone info(function); 834 835 VMState<COMPILER> state(info.isolate()); 836 PostponeInterruptsScope postpone(info.isolate()); 837 838 info.SetOptimizing(BailoutId::None(), 839 Handle<Code>(function->shared()->code())); 840 841 info.MarkAsContextSpecializing(); 842 info.MarkAsTypingEnabled(); 843 info.MarkAsInliningDisabled(); 844 845 if (GetOptimizedCodeNow(&info)) return info.code(); 846 } 847 848 if (function->shared()->is_compiled()) { 849 return Handle<Code>(function->shared()->code()); 850 } 851 852 CompilationInfoWithZone info(function); 853 Handle<Code> result; 854 ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result, 855 GetUnoptimizedCodeCommon(&info), Code); 856 857 if (FLAG_always_opt && 858 info.isolate()->use_crankshaft() && 859 !info.shared_info()->optimization_disabled() && 860 !info.isolate()->DebuggerHasBreakPoints()) { 861 Handle<Code> opt_code; 862 if (Compiler::GetOptimizedCode( 863 function, result, 864 Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) { 865 result = opt_code; 866 } 867 } 868 869 return result; 870 } 871 872 873 MaybeHandle<Code> Compiler::GetUnoptimizedCode( 874 Handle<SharedFunctionInfo> shared) { 875 DCHECK(!shared->GetIsolate()->has_pending_exception()); 876 DCHECK(!shared->is_compiled()); 877 878 CompilationInfoWithZone info(shared); 879 return GetUnoptimizedCodeCommon(&info); 880 } 881 882 883 bool Compiler::EnsureCompiled(Handle<JSFunction> function, 884 ClearExceptionFlag flag) { 885 if (function->is_compiled()) return true; 886 MaybeHandle<Code> maybe_code = Compiler::GetLazyCode(function); 887 Handle<Code> code; 888 if (!maybe_code.ToHandle(&code)) { 889 if (flag == CLEAR_EXCEPTION) { 890 function->GetIsolate()->clear_pending_exception(); 891 } 892 return false; 893 } 894 function->ReplaceCode(*code); 895 DCHECK(function->is_compiled()); 896 return true; 897 } 898 899 900 // TODO(turbofan): In the future, unoptimized code with deopt support could 901 // be generated lazily once deopt is triggered. 902 bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) { 903 if (!info->shared_info()->has_deoptimization_support()) { 904 CompilationInfoWithZone unoptimized(info->shared_info()); 905 // Note that we use the same AST that we will use for generating the 906 // optimized code. 907 unoptimized.SetFunction(info->function()); 908 unoptimized.PrepareForCompilation(info->scope()); 909 unoptimized.SetContext(info->context()); 910 unoptimized.EnableDeoptimizationSupport(); 911 if (!FullCodeGenerator::MakeCode(&unoptimized)) return false; 912 913 Handle<SharedFunctionInfo> shared = info->shared_info(); 914 shared->EnableDeoptimizationSupport(*unoptimized.code()); 915 shared->set_feedback_vector(*unoptimized.feedback_vector()); 916 917 // The scope info might not have been set if a lazily compiled 918 // function is inlined before being called for the first time. 919 if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) { 920 Handle<ScopeInfo> target_scope_info = 921 ScopeInfo::Create(info->scope(), info->zone()); 922 shared->set_scope_info(*target_scope_info); 923 } 924 925 // The existing unoptimized code was replaced with the new one. 926 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, &unoptimized, shared); 927 } 928 return true; 929 } 930 931 932 // Compile full code for debugging. This code will have debug break slots 933 // and deoptimization information. Deoptimization information is required 934 // in case that an optimized version of this function is still activated on 935 // the stack. It will also make sure that the full code is compiled with 936 // the same flags as the previous version, that is flags which can change 937 // the code generated. The current method of mapping from already compiled 938 // full code without debug break slots to full code with debug break slots 939 // depends on the generated code is otherwise exactly the same. 940 // If compilation fails, just keep the existing code. 941 MaybeHandle<Code> Compiler::GetDebugCode(Handle<JSFunction> function) { 942 CompilationInfoWithZone info(function); 943 Isolate* isolate = info.isolate(); 944 VMState<COMPILER> state(isolate); 945 946 info.MarkAsDebug(); 947 948 DCHECK(!isolate->has_pending_exception()); 949 Handle<Code> old_code(function->shared()->code()); 950 DCHECK(old_code->kind() == Code::FUNCTION); 951 DCHECK(!old_code->has_debug_break_slots()); 952 953 info.MarkCompilingForDebugging(); 954 if (old_code->is_compiled_optimizable()) { 955 info.EnableDeoptimizationSupport(); 956 } else { 957 info.MarkNonOptimizable(); 958 } 959 MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info); 960 Handle<Code> new_code; 961 if (!maybe_new_code.ToHandle(&new_code)) { 962 isolate->clear_pending_exception(); 963 } else { 964 DCHECK_EQ(old_code->is_compiled_optimizable(), 965 new_code->is_compiled_optimizable()); 966 } 967 return maybe_new_code; 968 } 969 970 971 void Compiler::CompileForLiveEdit(Handle<Script> script) { 972 // TODO(635): support extensions. 973 CompilationInfoWithZone info(script); 974 PostponeInterruptsScope postpone(info.isolate()); 975 VMState<COMPILER> state(info.isolate()); 976 977 info.MarkAsGlobal(); 978 if (!Parser::Parse(&info)) return; 979 980 LiveEditFunctionTracker tracker(info.isolate(), info.function()); 981 if (!CompileUnoptimizedCode(&info)) return; 982 if (!info.shared_info().is_null()) { 983 Handle<ScopeInfo> scope_info = ScopeInfo::Create(info.scope(), 984 info.zone()); 985 info.shared_info()->set_scope_info(*scope_info); 986 } 987 tracker.RecordRootFunctionInfo(info.code()); 988 } 989 990 991 static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) { 992 Isolate* isolate = info->isolate(); 993 PostponeInterruptsScope postpone(isolate); 994 DCHECK(!isolate->native_context().is_null()); 995 Handle<Script> script = info->script(); 996 997 // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile? 998 FixedArray* array = isolate->native_context()->embedder_data(); 999 script->set_context_data(array->get(0)); 1000 1001 isolate->debug()->OnBeforeCompile(script); 1002 1003 DCHECK(info->is_eval() || info->is_global()); 1004 1005 Handle<SharedFunctionInfo> result; 1006 1007 { VMState<COMPILER> state(info->isolate()); 1008 if (info->function() == NULL) { 1009 // Parse the script if needed (if it's already parsed, function() is 1010 // non-NULL). 1011 bool parse_allow_lazy = 1012 (info->compile_options() == ScriptCompiler::kConsumeParserCache || 1013 String::cast(script->source())->length() > 1014 FLAG_min_preparse_length) && 1015 !Compiler::DebuggerWantsEagerCompilation(info); 1016 1017 if (!parse_allow_lazy && 1018 (info->compile_options() == ScriptCompiler::kProduceParserCache || 1019 info->compile_options() == ScriptCompiler::kConsumeParserCache)) { 1020 // We are going to parse eagerly, but we either 1) have cached data 1021 // produced by lazy parsing or 2) are asked to generate cached data. 1022 // Eager parsing cannot benefit from cached data, and producing cached 1023 // data while parsing eagerly is not implemented. 1024 info->SetCachedData(NULL, ScriptCompiler::kNoCompileOptions); 1025 } 1026 if (!Parser::Parse(info, parse_allow_lazy)) { 1027 return Handle<SharedFunctionInfo>::null(); 1028 } 1029 } 1030 1031 FunctionLiteral* lit = info->function(); 1032 LiveEditFunctionTracker live_edit_tracker(isolate, lit); 1033 1034 // Measure how long it takes to do the compilation; only take the 1035 // rest of the function into account to avoid overlap with the 1036 // parsing statistics. 1037 HistogramTimer* rate = info->is_eval() 1038 ? info->isolate()->counters()->compile_eval() 1039 : info->isolate()->counters()->compile(); 1040 HistogramTimerScope timer(rate); 1041 1042 // Compile the code. 1043 if (!CompileUnoptimizedCode(info)) { 1044 return Handle<SharedFunctionInfo>::null(); 1045 } 1046 1047 // Allocate function. 1048 DCHECK(!info->code().is_null()); 1049 result = isolate->factory()->NewSharedFunctionInfo( 1050 lit->name(), lit->materialized_literal_count(), lit->kind(), 1051 info->code(), ScopeInfo::Create(info->scope(), info->zone()), 1052 info->feedback_vector()); 1053 1054 DCHECK_EQ(RelocInfo::kNoPosition, lit->function_token_position()); 1055 SetFunctionInfo(result, lit, true, script); 1056 1057 Handle<String> script_name = script->name()->IsString() 1058 ? Handle<String>(String::cast(script->name())) 1059 : isolate->factory()->empty_string(); 1060 Logger::LogEventsAndTags log_tag = info->is_eval() 1061 ? Logger::EVAL_TAG 1062 : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script); 1063 1064 PROFILE(isolate, CodeCreateEvent( 1065 log_tag, *info->code(), *result, info, *script_name)); 1066 GDBJIT(AddCode(script_name, script, info->code(), info)); 1067 1068 // Hint to the runtime system used when allocating space for initial 1069 // property space by setting the expected number of properties for 1070 // the instances of the function. 1071 SetExpectedNofPropertiesFromEstimate(result, 1072 lit->expected_property_count()); 1073 1074 if (!script.is_null()) 1075 script->set_compilation_state(Script::COMPILATION_STATE_COMPILED); 1076 1077 live_edit_tracker.RecordFunctionInfo(result, lit, info->zone()); 1078 } 1079 1080 isolate->debug()->OnAfterCompile(script); 1081 1082 return result; 1083 } 1084 1085 1086 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval( 1087 Handle<String> source, Handle<SharedFunctionInfo> outer_info, 1088 Handle<Context> context, StrictMode strict_mode, 1089 ParseRestriction restriction, int scope_position) { 1090 Isolate* isolate = source->GetIsolate(); 1091 int source_length = source->length(); 1092 isolate->counters()->total_eval_size()->Increment(source_length); 1093 isolate->counters()->total_compile_size()->Increment(source_length); 1094 1095 CompilationCache* compilation_cache = isolate->compilation_cache(); 1096 MaybeHandle<SharedFunctionInfo> maybe_shared_info = 1097 compilation_cache->LookupEval(source, outer_info, context, strict_mode, 1098 scope_position); 1099 Handle<SharedFunctionInfo> shared_info; 1100 1101 if (!maybe_shared_info.ToHandle(&shared_info)) { 1102 Handle<Script> script = isolate->factory()->NewScript(source); 1103 CompilationInfoWithZone info(script); 1104 info.MarkAsEval(); 1105 if (context->IsNativeContext()) info.MarkAsGlobal(); 1106 info.SetStrictMode(strict_mode); 1107 info.SetParseRestriction(restriction); 1108 info.SetContext(context); 1109 1110 Debug::RecordEvalCaller(script); 1111 1112 shared_info = CompileToplevel(&info); 1113 1114 if (shared_info.is_null()) { 1115 return MaybeHandle<JSFunction>(); 1116 } else { 1117 // Explicitly disable optimization for eval code. We're not yet prepared 1118 // to handle eval-code in the optimizing compiler. 1119 shared_info->DisableOptimization(kEval); 1120 1121 // If caller is strict mode, the result must be in strict mode as well. 1122 DCHECK(strict_mode == SLOPPY || shared_info->strict_mode() == STRICT); 1123 if (!shared_info->dont_cache()) { 1124 compilation_cache->PutEval(source, outer_info, context, shared_info, 1125 scope_position); 1126 } 1127 } 1128 } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) { 1129 shared_info->ResetForNewContext(isolate->heap()->global_ic_age()); 1130 } 1131 1132 return isolate->factory()->NewFunctionFromSharedFunctionInfo( 1133 shared_info, context, NOT_TENURED); 1134 } 1135 1136 1137 Handle<SharedFunctionInfo> Compiler::CompileScript( 1138 Handle<String> source, Handle<Object> script_name, int line_offset, 1139 int column_offset, bool is_shared_cross_origin, Handle<Context> context, 1140 v8::Extension* extension, ScriptData** cached_data, 1141 ScriptCompiler::CompileOptions compile_options, NativesFlag natives) { 1142 if (compile_options == ScriptCompiler::kNoCompileOptions) { 1143 cached_data = NULL; 1144 } else if (compile_options == ScriptCompiler::kProduceParserCache || 1145 compile_options == ScriptCompiler::kProduceCodeCache) { 1146 DCHECK(cached_data && !*cached_data); 1147 DCHECK(extension == NULL); 1148 } else { 1149 DCHECK(compile_options == ScriptCompiler::kConsumeParserCache || 1150 compile_options == ScriptCompiler::kConsumeCodeCache); 1151 DCHECK(cached_data && *cached_data); 1152 DCHECK(extension == NULL); 1153 } 1154 Isolate* isolate = source->GetIsolate(); 1155 int source_length = source->length(); 1156 isolate->counters()->total_load_size()->Increment(source_length); 1157 isolate->counters()->total_compile_size()->Increment(source_length); 1158 1159 CompilationCache* compilation_cache = isolate->compilation_cache(); 1160 1161 // Do a lookup in the compilation cache but not for extensions. 1162 MaybeHandle<SharedFunctionInfo> maybe_result; 1163 Handle<SharedFunctionInfo> result; 1164 if (extension == NULL) { 1165 if (FLAG_serialize_toplevel && 1166 compile_options == ScriptCompiler::kConsumeCodeCache && 1167 !isolate->debug()->is_loaded()) { 1168 HistogramTimerScope timer(isolate->counters()->compile_deserialize()); 1169 return CodeSerializer::Deserialize(isolate, *cached_data, source); 1170 } else { 1171 maybe_result = compilation_cache->LookupScript( 1172 source, script_name, line_offset, column_offset, 1173 is_shared_cross_origin, context); 1174 } 1175 } 1176 1177 base::ElapsedTimer timer; 1178 if (FLAG_profile_deserialization && FLAG_serialize_toplevel && 1179 compile_options == ScriptCompiler::kProduceCodeCache) { 1180 timer.Start(); 1181 } 1182 1183 if (!maybe_result.ToHandle(&result)) { 1184 // No cache entry found. Compile the script. 1185 1186 // Create a script object describing the script to be compiled. 1187 Handle<Script> script = isolate->factory()->NewScript(source); 1188 if (natives == NATIVES_CODE) { 1189 script->set_type(Smi::FromInt(Script::TYPE_NATIVE)); 1190 } 1191 if (!script_name.is_null()) { 1192 script->set_name(*script_name); 1193 script->set_line_offset(Smi::FromInt(line_offset)); 1194 script->set_column_offset(Smi::FromInt(column_offset)); 1195 } 1196 script->set_is_shared_cross_origin(is_shared_cross_origin); 1197 1198 // Compile the function and add it to the cache. 1199 CompilationInfoWithZone info(script); 1200 info.MarkAsGlobal(); 1201 info.SetCachedData(cached_data, compile_options); 1202 info.SetExtension(extension); 1203 info.SetContext(context); 1204 if (FLAG_serialize_toplevel && 1205 compile_options == ScriptCompiler::kProduceCodeCache) { 1206 info.PrepareForSerializing(); 1207 } 1208 if (FLAG_use_strict) info.SetStrictMode(STRICT); 1209 1210 result = CompileToplevel(&info); 1211 if (extension == NULL && !result.is_null() && !result->dont_cache()) { 1212 compilation_cache->PutScript(source, context, result); 1213 if (FLAG_serialize_toplevel && 1214 compile_options == ScriptCompiler::kProduceCodeCache) { 1215 HistogramTimerScope histogram_timer( 1216 isolate->counters()->compile_serialize()); 1217 *cached_data = CodeSerializer::Serialize(isolate, result, source); 1218 if (FLAG_profile_deserialization) { 1219 PrintF("[Compiling and serializing %d bytes took %0.3f ms]\n", 1220 (*cached_data)->length(), timer.Elapsed().InMillisecondsF()); 1221 } 1222 } 1223 } 1224 1225 if (result.is_null()) isolate->ReportPendingMessages(); 1226 } else if (result->ic_age() != isolate->heap()->global_ic_age()) { 1227 result->ResetForNewContext(isolate->heap()->global_ic_age()); 1228 } 1229 return result; 1230 } 1231 1232 1233 Handle<SharedFunctionInfo> Compiler::CompileStreamedScript( 1234 CompilationInfo* info, int source_length) { 1235 Isolate* isolate = info->isolate(); 1236 isolate->counters()->total_load_size()->Increment(source_length); 1237 isolate->counters()->total_compile_size()->Increment(source_length); 1238 1239 if (FLAG_use_strict) info->SetStrictMode(STRICT); 1240 // TODO(marja): FLAG_serialize_toplevel is not honoured and won't be; when the 1241 // real code caching lands, streaming needs to be adapted to use it. 1242 return CompileToplevel(info); 1243 } 1244 1245 1246 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo( 1247 FunctionLiteral* literal, Handle<Script> script, 1248 CompilationInfo* outer_info) { 1249 // Precondition: code has been parsed and scopes have been analyzed. 1250 CompilationInfoWithZone info(script); 1251 info.SetFunction(literal); 1252 info.PrepareForCompilation(literal->scope()); 1253 info.SetStrictMode(literal->scope()->strict_mode()); 1254 if (outer_info->will_serialize()) info.PrepareForSerializing(); 1255 1256 Isolate* isolate = info.isolate(); 1257 Factory* factory = isolate->factory(); 1258 LiveEditFunctionTracker live_edit_tracker(isolate, literal); 1259 // Determine if the function can be lazily compiled. This is necessary to 1260 // allow some of our builtin JS files to be lazily compiled. These 1261 // builtins cannot be handled lazily by the parser, since we have to know 1262 // if a function uses the special natives syntax, which is something the 1263 // parser records. 1264 // If the debugger requests compilation for break points, we cannot be 1265 // aggressive about lazy compilation, because it might trigger compilation 1266 // of functions without an outer context when setting a breakpoint through 1267 // Debug::FindSharedFunctionInfoInScript. 1268 bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext(); 1269 bool allow_lazy = literal->AllowsLazyCompilation() && 1270 !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx); 1271 1272 // Generate code 1273 Handle<ScopeInfo> scope_info; 1274 if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) { 1275 Handle<Code> code = isolate->builtins()->CompileLazy(); 1276 info.SetCode(code); 1277 scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate)); 1278 } else if (FullCodeGenerator::MakeCode(&info)) { 1279 DCHECK(!info.code().is_null()); 1280 scope_info = ScopeInfo::Create(info.scope(), info.zone()); 1281 } else { 1282 return Handle<SharedFunctionInfo>::null(); 1283 } 1284 1285 // Create a shared function info object. 1286 Handle<SharedFunctionInfo> result = factory->NewSharedFunctionInfo( 1287 literal->name(), literal->materialized_literal_count(), literal->kind(), 1288 info.code(), scope_info, info.feedback_vector()); 1289 SetFunctionInfo(result, literal, false, script); 1290 RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result); 1291 result->set_allows_lazy_compilation(allow_lazy); 1292 result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx); 1293 1294 // Set the expected number of properties for instances and return 1295 // the resulting function. 1296 SetExpectedNofPropertiesFromEstimate(result, 1297 literal->expected_property_count()); 1298 live_edit_tracker.RecordFunctionInfo(result, literal, info.zone()); 1299 return result; 1300 } 1301 1302 1303 MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function, 1304 Handle<Code> current_code, 1305 ConcurrencyMode mode, 1306 BailoutId osr_ast_id) { 1307 Handle<Code> cached_code; 1308 if (GetCodeFromOptimizedCodeMap( 1309 function, osr_ast_id).ToHandle(&cached_code)) { 1310 return cached_code; 1311 } 1312 1313 SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function)); 1314 Isolate* isolate = info->isolate(); 1315 DCHECK(AllowCompilation::IsAllowed(isolate)); 1316 VMState<COMPILER> state(isolate); 1317 DCHECK(!isolate->has_pending_exception()); 1318 PostponeInterruptsScope postpone(isolate); 1319 1320 Handle<SharedFunctionInfo> shared = info->shared_info(); 1321 if (shared->code()->kind() != Code::FUNCTION || 1322 ScopeInfo::Empty(isolate) == shared->scope_info()) { 1323 // The function was never compiled. Compile it unoptimized first. 1324 // TODO(titzer): reuse the AST and scope info from this compile. 1325 CompilationInfoWithZone nested(function); 1326 nested.EnableDeoptimizationSupport(); 1327 if (!GetUnoptimizedCodeCommon(&nested).ToHandle(¤t_code)) { 1328 return MaybeHandle<Code>(); 1329 } 1330 shared->ReplaceCode(*current_code); 1331 } 1332 current_code->set_profiler_ticks(0); 1333 1334 info->SetOptimizing(osr_ast_id, current_code); 1335 1336 if (mode == CONCURRENT) { 1337 if (GetOptimizedCodeLater(info.get())) { 1338 info.Detach(); // The background recompile job owns this now. 1339 return isolate->builtins()->InOptimizationQueue(); 1340 } 1341 } else { 1342 if (GetOptimizedCodeNow(info.get())) return info->code(); 1343 } 1344 1345 if (isolate->has_pending_exception()) isolate->clear_pending_exception(); 1346 return MaybeHandle<Code>(); 1347 } 1348 1349 1350 Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) { 1351 // Take ownership of compilation info. Deleting compilation info 1352 // also tears down the zone and the recompile job. 1353 SmartPointer<CompilationInfo> info(job->info()); 1354 Isolate* isolate = info->isolate(); 1355 1356 VMState<COMPILER> state(isolate); 1357 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate()); 1358 1359 Handle<SharedFunctionInfo> shared = info->shared_info(); 1360 shared->code()->set_profiler_ticks(0); 1361 1362 // 1) Optimization on the concurrent thread may have failed. 1363 // 2) The function may have already been optimized by OSR. Simply continue. 1364 // Except when OSR already disabled optimization for some reason. 1365 // 3) The code may have already been invalidated due to dependency change. 1366 // 4) Debugger may have been activated. 1367 // 5) Code generation may have failed. 1368 if (job->last_status() == OptimizedCompileJob::SUCCEEDED) { 1369 if (shared->optimization_disabled()) { 1370 job->RetryOptimization(kOptimizationDisabled); 1371 } else if (info->HasAbortedDueToDependencyChange()) { 1372 job->RetryOptimization(kBailedOutDueToDependencyChange); 1373 } else if (isolate->DebuggerHasBreakPoints()) { 1374 job->RetryOptimization(kDebuggerHasBreakPoints); 1375 } else if (job->GenerateCode() == OptimizedCompileJob::SUCCEEDED) { 1376 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info.get(), shared); 1377 if (info->shared_info()->SearchOptimizedCodeMap( 1378 info->context()->native_context(), info->osr_ast_id()) == -1) { 1379 InsertCodeIntoOptimizedCodeMap(info.get()); 1380 } 1381 if (FLAG_trace_opt) { 1382 PrintF("[completed optimizing "); 1383 info->closure()->ShortPrint(); 1384 PrintF("]\n"); 1385 } 1386 return Handle<Code>(*info->code()); 1387 } 1388 } 1389 1390 DCHECK(job->last_status() != OptimizedCompileJob::SUCCEEDED); 1391 if (FLAG_trace_opt) { 1392 PrintF("[aborted optimizing "); 1393 info->closure()->ShortPrint(); 1394 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); 1395 } 1396 return Handle<Code>::null(); 1397 } 1398 1399 1400 bool Compiler::DebuggerWantsEagerCompilation(CompilationInfo* info, 1401 bool allow_lazy_without_ctx) { 1402 return LiveEditFunctionTracker::IsActive(info->isolate()) || 1403 (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx); 1404 } 1405 1406 1407 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info) 1408 : name_(name), info_(info), zone_(info->isolate()) { 1409 if (FLAG_hydrogen_stats) { 1410 info_zone_start_allocation_size_ = info->zone()->allocation_size(); 1411 timer_.Start(); 1412 } 1413 } 1414 1415 1416 CompilationPhase::~CompilationPhase() { 1417 if (FLAG_hydrogen_stats) { 1418 unsigned size = zone()->allocation_size(); 1419 size += info_->zone()->allocation_size() - info_zone_start_allocation_size_; 1420 isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size); 1421 } 1422 } 1423 1424 1425 bool CompilationPhase::ShouldProduceTraceOutput() const { 1426 // Trace if the appropriate trace flag is set and the phase name's first 1427 // character is in the FLAG_trace_phase command line parameter. 1428 AllowHandleDereference allow_deref; 1429 bool tracing_on = info()->IsStub() 1430 ? FLAG_trace_hydrogen_stubs 1431 : (FLAG_trace_hydrogen && 1432 info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter)); 1433 return (tracing_on && 1434 base::OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL); 1435 } 1436 1437 } } // namespace v8::internal 1438