1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/compiler.h" 6 7 #include <algorithm> 8 9 #include "src/ast/ast-numbering.h" 10 #include "src/ast/prettyprinter.h" 11 #include "src/ast/scopeinfo.h" 12 #include "src/ast/scopes.h" 13 #include "src/bootstrapper.h" 14 #include "src/codegen.h" 15 #include "src/compilation-cache.h" 16 #include "src/compiler/pipeline.h" 17 #include "src/crankshaft/hydrogen.h" 18 #include "src/debug/debug.h" 19 #include "src/debug/liveedit.h" 20 #include "src/deoptimizer.h" 21 #include "src/frames-inl.h" 22 #include "src/full-codegen/full-codegen.h" 23 #include "src/interpreter/interpreter.h" 24 #include "src/isolate-inl.h" 25 #include "src/log-inl.h" 26 #include "src/messages.h" 27 #include "src/parsing/parser.h" 28 #include "src/parsing/rewriter.h" 29 #include "src/parsing/scanner-character-streams.h" 30 #include "src/runtime-profiler.h" 31 #include "src/snapshot/code-serializer.h" 32 #include "src/typing-asm.h" 33 #include "src/vm-state-inl.h" 34 35 namespace v8 { 36 namespace internal { 37 38 39 #define PARSE_INFO_GETTER(type, name) \ 40 type CompilationInfo::name() const { \ 41 CHECK(parse_info()); \ 42 return parse_info()->name(); \ 43 } 44 45 46 #define PARSE_INFO_GETTER_WITH_DEFAULT(type, name, def) \ 47 type CompilationInfo::name() const { \ 48 return parse_info() ? parse_info()->name() : def; \ 49 } 50 51 52 PARSE_INFO_GETTER(Handle<Script>, script) 53 PARSE_INFO_GETTER(FunctionLiteral*, literal) 54 PARSE_INFO_GETTER_WITH_DEFAULT(Scope*, scope, nullptr) 55 PARSE_INFO_GETTER_WITH_DEFAULT(Handle<Context>, context, 56 Handle<Context>::null()) 57 PARSE_INFO_GETTER(Handle<SharedFunctionInfo>, shared_info) 58 59 #undef PARSE_INFO_GETTER 60 #undef PARSE_INFO_GETTER_WITH_DEFAULT 61 62 // A wrapper around a CompilationInfo that detaches the Handles from 63 // the underlying DeferredHandleScope and stores them in info_ on 64 // destruction. 65 class CompilationHandleScope BASE_EMBEDDED { 66 public: 67 explicit CompilationHandleScope(CompilationInfo* info) 68 : deferred_(info->isolate()), info_(info) {} 69 ~CompilationHandleScope() { info_->set_deferred_handles(deferred_.Detach()); } 70 71 private: 72 DeferredHandleScope deferred_; 73 CompilationInfo* info_; 74 }; 75 76 // Helper that times a scoped region and records the elapsed time. 77 struct ScopedTimer { 78 explicit ScopedTimer(base::TimeDelta* location) : location_(location) { 79 DCHECK(location_ != NULL); 80 timer_.Start(); 81 } 82 83 ~ScopedTimer() { *location_ += timer_.Elapsed(); } 84 85 base::ElapsedTimer timer_; 86 base::TimeDelta* location_; 87 }; 88 89 // ---------------------------------------------------------------------------- 90 // Implementation of CompilationInfo 91 92 bool CompilationInfo::has_shared_info() const { 93 return parse_info_ && !parse_info_->shared_info().is_null(); 94 } 95 96 CompilationInfo::CompilationInfo(ParseInfo* parse_info, 97 Handle<JSFunction> closure) 98 : CompilationInfo(parse_info, {}, Code::ComputeFlags(Code::FUNCTION), BASE, 99 parse_info->isolate(), parse_info->zone()) { 100 closure_ = closure; 101 102 // Compiling for the snapshot typically results in different code than 103 // compiling later on. This means that code recompiled with deoptimization 104 // support won't be "equivalent" (as defined by SharedFunctionInfo:: 105 // EnableDeoptimizationSupport), so it will replace the old code and all 106 // its type feedback. To avoid this, always compile functions in the snapshot 107 // with deoptimization support. 108 if (isolate_->serializer_enabled()) EnableDeoptimizationSupport(); 109 110 if (FLAG_function_context_specialization) MarkAsFunctionContextSpecializing(); 111 if (FLAG_turbo_inlining) MarkAsInliningEnabled(); 112 if (FLAG_turbo_source_positions) MarkAsSourcePositionsEnabled(); 113 if (FLAG_turbo_splitting) MarkAsSplittingEnabled(); 114 } 115 116 CompilationInfo::CompilationInfo(Vector<const char> debug_name, 117 Isolate* isolate, Zone* zone, 118 Code::Flags code_flags) 119 : CompilationInfo(nullptr, debug_name, code_flags, STUB, isolate, zone) {} 120 121 CompilationInfo::CompilationInfo(ParseInfo* parse_info, 122 Vector<const char> debug_name, 123 Code::Flags code_flags, Mode mode, 124 Isolate* isolate, Zone* zone) 125 : parse_info_(parse_info), 126 isolate_(isolate), 127 flags_(0), 128 code_flags_(code_flags), 129 mode_(mode), 130 osr_ast_id_(BailoutId::None()), 131 zone_(zone), 132 deferred_handles_(nullptr), 133 dependencies_(isolate, zone), 134 bailout_reason_(kNoReason), 135 prologue_offset_(Code::kPrologueOffsetNotSet), 136 track_positions_(FLAG_hydrogen_track_positions || 137 isolate->is_profiling()), 138 parameter_count_(0), 139 optimization_id_(-1), 140 osr_expr_stack_height_(0), 141 debug_name_(debug_name) {} 142 143 CompilationInfo::~CompilationInfo() { 144 DisableFutureOptimization(); 145 dependencies()->Rollback(); 146 delete deferred_handles_; 147 } 148 149 150 int CompilationInfo::num_parameters() const { 151 return !IsStub() ? scope()->num_parameters() : parameter_count_; 152 } 153 154 155 int CompilationInfo::num_parameters_including_this() const { 156 return num_parameters() + (is_this_defined() ? 1 : 0); 157 } 158 159 160 bool CompilationInfo::is_this_defined() const { return !IsStub(); } 161 162 163 // Primitive functions are unlikely to be picked up by the stack-walking 164 // profiler, so they trigger their own optimization when they're called 165 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time. 166 bool CompilationInfo::ShouldSelfOptimize() { 167 return FLAG_crankshaft && 168 !(literal()->flags() & AstProperties::kDontSelfOptimize) && 169 !literal()->dont_optimize() && 170 literal()->scope()->AllowsLazyCompilation() && 171 !shared_info()->optimization_disabled(); 172 } 173 174 175 bool CompilationInfo::has_simple_parameters() { 176 return scope()->has_simple_parameters(); 177 } 178 179 180 base::SmartArrayPointer<char> CompilationInfo::GetDebugName() const { 181 if (parse_info() && parse_info()->literal()) { 182 AllowHandleDereference allow_deref; 183 return parse_info()->literal()->debug_name()->ToCString(); 184 } 185 if (parse_info() && !parse_info()->shared_info().is_null()) { 186 return parse_info()->shared_info()->DebugName()->ToCString(); 187 } 188 Vector<const char> name_vec = debug_name_; 189 if (name_vec.is_empty()) name_vec = ArrayVector("unknown"); 190 base::SmartArrayPointer<char> name(new char[name_vec.length() + 1]); 191 memcpy(name.get(), name_vec.start(), name_vec.length()); 192 name[name_vec.length()] = '\0'; 193 return name; 194 } 195 196 StackFrame::Type CompilationInfo::GetOutputStackFrameType() const { 197 switch (output_code_kind()) { 198 case Code::STUB: 199 case Code::BYTECODE_HANDLER: 200 case Code::HANDLER: 201 case Code::BUILTIN: 202 #define CASE_KIND(kind) case Code::kind: 203 IC_KIND_LIST(CASE_KIND) 204 #undef CASE_KIND 205 return StackFrame::STUB; 206 case Code::WASM_FUNCTION: 207 return StackFrame::WASM; 208 case Code::JS_TO_WASM_FUNCTION: 209 return StackFrame::JS_TO_WASM; 210 case Code::WASM_TO_JS_FUNCTION: 211 return StackFrame::WASM_TO_JS; 212 default: 213 UNIMPLEMENTED(); 214 return StackFrame::NONE; 215 } 216 } 217 218 int CompilationInfo::GetDeclareGlobalsFlags() const { 219 DCHECK(DeclareGlobalsLanguageMode::is_valid(parse_info()->language_mode())); 220 return DeclareGlobalsEvalFlag::encode(parse_info()->is_eval()) | 221 DeclareGlobalsNativeFlag::encode(parse_info()->is_native()) | 222 DeclareGlobalsLanguageMode::encode(parse_info()->language_mode()); 223 } 224 225 bool CompilationInfo::ExpectsJSReceiverAsReceiver() { 226 return is_sloppy(parse_info()->language_mode()) && !parse_info()->is_native(); 227 } 228 229 #if DEBUG 230 void CompilationInfo::PrintAstForTesting() { 231 PrintF("--- Source from AST ---\n%s\n", 232 PrettyPrinter(isolate()).PrintProgram(literal())); 233 } 234 #endif 235 236 // ---------------------------------------------------------------------------- 237 // Implementation of CompilationJob 238 239 CompilationJob::Status CompilationJob::CreateGraph() { 240 DisallowJavascriptExecution no_js(isolate()); 241 DCHECK(info()->IsOptimizing()); 242 243 if (FLAG_trace_opt) { 244 OFStream os(stdout); 245 os << "[compiling method " << Brief(*info()->closure()) << " using " 246 << compiler_name_; 247 if (info()->is_osr()) os << " OSR"; 248 os << "]" << std::endl; 249 } 250 251 // Delegate to the underlying implementation. 252 DCHECK_EQ(SUCCEEDED, last_status()); 253 ScopedTimer t(&time_taken_to_create_graph_); 254 return SetLastStatus(CreateGraphImpl()); 255 } 256 257 CompilationJob::Status CompilationJob::OptimizeGraph() { 258 DisallowHeapAllocation no_allocation; 259 DisallowHandleAllocation no_handles; 260 DisallowHandleDereference no_deref; 261 DisallowCodeDependencyChange no_dependency_change; 262 263 // Delegate to the underlying implementation. 264 DCHECK_EQ(SUCCEEDED, last_status()); 265 ScopedTimer t(&time_taken_to_optimize_); 266 return SetLastStatus(OptimizeGraphImpl()); 267 } 268 269 CompilationJob::Status CompilationJob::GenerateCode() { 270 DisallowCodeDependencyChange no_dependency_change; 271 DisallowJavascriptExecution no_js(isolate()); 272 DCHECK(!info()->dependencies()->HasAborted()); 273 274 // Delegate to the underlying implementation. 275 DCHECK_EQ(SUCCEEDED, last_status()); 276 ScopedTimer t(&time_taken_to_codegen_); 277 return SetLastStatus(GenerateCodeImpl()); 278 } 279 280 281 namespace { 282 283 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object, 284 Handle<Code> code) { 285 Handle<WeakCell> cell = Code::WeakCellFor(code); 286 Heap* heap = isolate->heap(); 287 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object)); 288 dep = DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell); 289 heap->AddWeakObjectToCodeDependency(object, dep); 290 } 291 292 } // namespace 293 294 void CompilationJob::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) { 295 // TODO(turbofan): Move this to pipeline.cc once Crankshaft dies. 296 Isolate* const isolate = code->GetIsolate(); 297 DCHECK(code->is_optimized_code()); 298 std::vector<Handle<Map>> maps; 299 std::vector<Handle<HeapObject>> objects; 300 { 301 DisallowHeapAllocation no_gc; 302 int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | 303 RelocInfo::ModeMask(RelocInfo::CELL); 304 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { 305 RelocInfo::Mode mode = it.rinfo()->rmode(); 306 if (mode == RelocInfo::CELL && 307 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) { 308 objects.push_back(handle(it.rinfo()->target_cell(), isolate)); 309 } else if (mode == RelocInfo::EMBEDDED_OBJECT && 310 code->IsWeakObjectInOptimizedCode( 311 it.rinfo()->target_object())) { 312 Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()), 313 isolate); 314 if (object->IsMap()) { 315 maps.push_back(Handle<Map>::cast(object)); 316 } else { 317 objects.push_back(object); 318 } 319 } 320 } 321 } 322 for (Handle<Map> map : maps) { 323 if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) { 324 isolate->heap()->AddRetainedMap(map); 325 } 326 Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code); 327 } 328 for (Handle<HeapObject> object : objects) { 329 AddWeakObjectToCodeDependency(isolate, object, code); 330 } 331 code->set_can_have_weak_objects(true); 332 } 333 334 void CompilationJob::RecordOptimizationStats() { 335 Handle<JSFunction> function = info()->closure(); 336 if (!function->IsOptimized()) { 337 // Concurrent recompilation and OSR may race. Increment only once. 338 int opt_count = function->shared()->opt_count(); 339 function->shared()->set_opt_count(opt_count + 1); 340 } 341 double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF(); 342 double ms_optimize = time_taken_to_optimize_.InMillisecondsF(); 343 double ms_codegen = time_taken_to_codegen_.InMillisecondsF(); 344 if (FLAG_trace_opt) { 345 PrintF("[optimizing "); 346 function->ShortPrint(); 347 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize, 348 ms_codegen); 349 } 350 if (FLAG_trace_opt_stats) { 351 static double compilation_time = 0.0; 352 static int compiled_functions = 0; 353 static int code_size = 0; 354 355 compilation_time += (ms_creategraph + ms_optimize + ms_codegen); 356 compiled_functions++; 357 code_size += function->shared()->SourceSize(); 358 PrintF("Compiled: %d functions with %d byte source size in %fms.\n", 359 compiled_functions, 360 code_size, 361 compilation_time); 362 } 363 if (FLAG_hydrogen_stats) { 364 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_, 365 time_taken_to_optimize_, 366 time_taken_to_codegen_); 367 } 368 } 369 370 // ---------------------------------------------------------------------------- 371 // Local helper methods that make up the compilation pipeline. 372 373 namespace { 374 375 bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) { 376 return shared->is_toplevel() && shared->script()->IsScript() && 377 Script::cast(shared->script())->compilation_type() == 378 Script::COMPILATION_TYPE_EVAL; 379 } 380 381 void RecordFunctionCompilation(CodeEventListener::LogEventsAndTags tag, 382 CompilationInfo* info) { 383 // Log the code generation. If source information is available include 384 // script name and line number. Check explicitly whether logging is 385 // enabled as finding the line number is not free. 386 if (info->isolate()->logger()->is_logging_code_events() || 387 info->isolate()->is_profiling()) { 388 Handle<SharedFunctionInfo> shared = info->shared_info(); 389 Handle<Script> script = info->parse_info()->script(); 390 Handle<AbstractCode> abstract_code = 391 info->has_bytecode_array() 392 ? Handle<AbstractCode>::cast(info->bytecode_array()) 393 : Handle<AbstractCode>::cast(info->code()); 394 if (abstract_code.is_identical_to( 395 info->isolate()->builtins()->CompileLazy())) { 396 return; 397 } 398 int line_num = Script::GetLineNumber(script, shared->start_position()) + 1; 399 int column_num = 400 Script::GetColumnNumber(script, shared->start_position()) + 1; 401 String* script_name = script->name()->IsString() 402 ? String::cast(script->name()) 403 : info->isolate()->heap()->empty_string(); 404 CodeEventListener::LogEventsAndTags log_tag = 405 Logger::ToNativeByScript(tag, *script); 406 PROFILE(info->isolate(), 407 CodeCreateEvent(log_tag, *abstract_code, *shared, script_name, 408 line_num, column_num)); 409 } 410 } 411 412 void EnsureFeedbackMetadata(CompilationInfo* info) { 413 DCHECK(info->has_shared_info()); 414 415 // If no type feedback metadata exists, we create it now. At this point the 416 // AstNumbering pass has already run. Note the snapshot can contain outdated 417 // vectors for a different configuration, hence we also recreate a new vector 418 // when the function is not compiled (i.e. no code was serialized). 419 420 // TODO(mvstanton): reintroduce is_empty() predicate to feedback_metadata(). 421 if (info->shared_info()->feedback_metadata()->length() == 0 || 422 !info->shared_info()->is_compiled()) { 423 Handle<TypeFeedbackMetadata> feedback_metadata = TypeFeedbackMetadata::New( 424 info->isolate(), info->literal()->feedback_vector_spec()); 425 info->shared_info()->set_feedback_metadata(*feedback_metadata); 426 } 427 428 // It's very important that recompiles do not alter the structure of the type 429 // feedback vector. Verify that the structure fits the function literal. 430 CHECK(!info->shared_info()->feedback_metadata()->SpecDiffersFrom( 431 info->literal()->feedback_vector_spec())); 432 } 433 434 bool UseIgnition(CompilationInfo* info) { 435 DCHECK(info->has_shared_info()); 436 437 // When requesting debug code as a replacement for existing code, we provide 438 // the same kind as the existing code (to prevent implicit tier-change). 439 if (info->is_debug() && info->shared_info()->is_compiled()) { 440 return info->shared_info()->HasBytecodeArray(); 441 } 442 443 // For generator or async functions we might avoid Ignition wholesale. 444 if (info->shared_info()->is_resumable() && !FLAG_ignition_generators) { 445 return false; 446 } 447 448 // Since we can't OSR from Ignition, skip Ignition for asm.js functions. 449 if (info->shared_info()->asm_function()) { 450 return false; 451 } 452 453 // Checks whether top level functions should be passed by the filter. 454 if (info->shared_info()->is_toplevel()) { 455 Vector<const char> filter = CStrVector(FLAG_ignition_filter); 456 return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*'); 457 } 458 459 // Finally respect the filter. 460 return info->shared_info()->PassesFilter(FLAG_ignition_filter); 461 } 462 463 int CodeAndMetadataSize(CompilationInfo* info) { 464 if (info->has_bytecode_array()) { 465 return info->bytecode_array()->SizeIncludingMetadata(); 466 } 467 return info->code()->SizeIncludingMetadata(); 468 } 469 470 bool GenerateUnoptimizedCode(CompilationInfo* info) { 471 bool success; 472 EnsureFeedbackMetadata(info); 473 if (FLAG_validate_asm && info->scope()->asm_module()) { 474 AsmTyper typer(info->isolate(), info->zone(), *(info->script()), 475 info->literal()); 476 if (FLAG_enable_simd_asmjs) { 477 typer.set_allow_simd(true); 478 } 479 if (!typer.Validate()) { 480 DCHECK(!info->isolate()->has_pending_exception()); 481 PrintF("Validation of asm.js module failed: %s", typer.error_message()); 482 } 483 } 484 if (FLAG_ignition && UseIgnition(info)) { 485 success = interpreter::Interpreter::MakeBytecode(info); 486 } else { 487 success = FullCodeGenerator::MakeCode(info); 488 } 489 if (success) { 490 Isolate* isolate = info->isolate(); 491 Counters* counters = isolate->counters(); 492 // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually. 493 counters->total_baseline_code_size()->Increment(CodeAndMetadataSize(info)); 494 counters->total_baseline_compile_count()->Increment(1); 495 } 496 return success; 497 } 498 499 bool CompileUnoptimizedCode(CompilationInfo* info) { 500 DCHECK(AllowCompilation::IsAllowed(info->isolate())); 501 if (!Compiler::Analyze(info->parse_info()) || 502 !GenerateUnoptimizedCode(info)) { 503 Isolate* isolate = info->isolate(); 504 if (!isolate->has_pending_exception()) isolate->StackOverflow(); 505 return false; 506 } 507 return true; 508 } 509 510 void InstallSharedScopeInfo(CompilationInfo* info, 511 Handle<SharedFunctionInfo> shared) { 512 Handle<ScopeInfo> scope_info = 513 ScopeInfo::Create(info->isolate(), info->zone(), info->scope()); 514 shared->set_scope_info(*scope_info); 515 } 516 517 void InstallSharedCompilationResult(CompilationInfo* info, 518 Handle<SharedFunctionInfo> shared) { 519 // TODO(mstarzinger): Compiling for debug code might be used to reveal inner 520 // functions via {FindSharedFunctionInfoInScript}, in which case we end up 521 // regenerating existing bytecode. Fix this! 522 if (info->is_debug() && info->has_bytecode_array()) { 523 shared->ClearBytecodeArray(); 524 } 525 // Assert that we are not overwriting (possibly patched) debug code. 526 DCHECK(!shared->HasDebugInfo()); 527 DCHECK(!info->code().is_null()); 528 shared->ReplaceCode(*info->code()); 529 if (info->has_bytecode_array()) { 530 DCHECK(!shared->HasBytecodeArray()); // Only compiled once. 531 shared->set_bytecode_array(*info->bytecode_array()); 532 } 533 } 534 535 MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) { 536 VMState<COMPILER> state(info->isolate()); 537 PostponeInterruptsScope postpone(info->isolate()); 538 539 // Parse and update CompilationInfo with the results. 540 if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>(); 541 Handle<SharedFunctionInfo> shared = info->shared_info(); 542 DCHECK_EQ(shared->language_mode(), info->literal()->language_mode()); 543 544 // Compile either unoptimized code or bytecode for the interpreter. 545 if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>(); 546 547 // Update the shared function info with the scope info. 548 InstallSharedScopeInfo(info, shared); 549 550 // Install compilation result on the shared function info 551 InstallSharedCompilationResult(info, shared); 552 553 // Record the function compilation event. 554 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); 555 556 return info->code(); 557 } 558 559 MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap( 560 Handle<JSFunction> function, BailoutId osr_ast_id) { 561 Handle<SharedFunctionInfo> shared(function->shared()); 562 DisallowHeapAllocation no_gc; 563 CodeAndLiterals cached = shared->SearchOptimizedCodeMap( 564 function->context()->native_context(), osr_ast_id); 565 if (cached.code != nullptr) { 566 // Caching of optimized code enabled and optimized code found. 567 if (cached.literals != nullptr) function->set_literals(cached.literals); 568 DCHECK(!cached.code->marked_for_deoptimization()); 569 DCHECK(function->shared()->is_compiled()); 570 return Handle<Code>(cached.code); 571 } 572 return MaybeHandle<Code>(); 573 } 574 575 void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) { 576 Handle<Code> code = info->code(); 577 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do. 578 579 // Function context specialization folds-in the function context, 580 // so no sharing can occur. 581 if (info->is_function_context_specializing()) return; 582 // Frame specialization implies function context specialization. 583 DCHECK(!info->is_frame_specializing()); 584 585 // Cache optimized context-specific code. 586 Handle<JSFunction> function = info->closure(); 587 Handle<SharedFunctionInfo> shared(function->shared()); 588 Handle<LiteralsArray> literals(function->literals()); 589 Handle<Context> native_context(function->context()->native_context()); 590 SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code, 591 literals, info->osr_ast_id()); 592 593 // Do not cache (native) context-independent code compiled for OSR. 594 if (code->is_turbofanned() && info->is_osr()) return; 595 596 // Cache optimized (native) context-independent code. 597 if (FLAG_turbo_cache_shared_code && code->is_turbofanned() && 598 !info->is_native_context_specializing()) { 599 DCHECK(!info->is_function_context_specializing()); 600 DCHECK(info->osr_ast_id().IsNone()); 601 Handle<SharedFunctionInfo> shared(function->shared()); 602 SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap(shared, code); 603 } 604 } 605 606 bool Renumber(ParseInfo* parse_info) { 607 if (!AstNumbering::Renumber(parse_info->isolate(), parse_info->zone(), 608 parse_info->literal())) { 609 return false; 610 } 611 Handle<SharedFunctionInfo> shared_info = parse_info->shared_info(); 612 if (!shared_info.is_null()) { 613 FunctionLiteral* lit = parse_info->literal(); 614 shared_info->set_ast_node_count(lit->ast_node_count()); 615 if (lit->dont_optimize_reason() != kNoReason) { 616 shared_info->DisableOptimization(lit->dont_optimize_reason()); 617 } 618 shared_info->set_dont_crankshaft( 619 shared_info->dont_crankshaft() || 620 (lit->flags() & AstProperties::kDontCrankshaft)); 621 } 622 return true; 623 } 624 625 bool UseTurboFan(Handle<SharedFunctionInfo> shared) { 626 bool optimization_disabled = shared->optimization_disabled(); 627 bool dont_crankshaft = shared->dont_crankshaft(); 628 629 // Check the enabling conditions for Turbofan. 630 // 1. "use asm" code. 631 bool is_turbofanable_asm = 632 FLAG_turbo_asm && shared->asm_function() && !optimization_disabled; 633 634 // 2. Fallback for features unsupported by Crankshaft. 635 bool is_unsupported_by_crankshaft_but_turbofanable = 636 dont_crankshaft && strcmp(FLAG_turbo_filter, "~~") == 0 && 637 !optimization_disabled; 638 639 // 3. Explicitly enabled by the command-line filter. 640 bool passes_turbo_filter = shared->PassesFilter(FLAG_turbo_filter); 641 642 return is_turbofanable_asm || is_unsupported_by_crankshaft_but_turbofanable || 643 passes_turbo_filter; 644 } 645 646 bool GetOptimizedCodeNow(CompilationJob* job) { 647 CompilationInfo* info = job->info(); 648 Isolate* isolate = info->isolate(); 649 650 // Parsing is not required when optimizing from existing bytecode. 651 if (!info->is_optimizing_from_bytecode()) { 652 if (!Compiler::ParseAndAnalyze(info->parse_info())) return false; 653 EnsureFeedbackMetadata(info); 654 } 655 656 JSFunction::EnsureLiterals(info->closure()); 657 658 TimerEventScope<TimerEventRecompileSynchronous> timer(isolate); 659 RuntimeCallTimerScope runtimeTimer(isolate, 660 &RuntimeCallStats::RecompileSynchronous); 661 TRACE_EVENT0("v8", "V8.RecompileSynchronous"); 662 663 if (job->CreateGraph() != CompilationJob::SUCCEEDED || 664 job->OptimizeGraph() != CompilationJob::SUCCEEDED || 665 job->GenerateCode() != CompilationJob::SUCCEEDED) { 666 if (FLAG_trace_opt) { 667 PrintF("[aborted optimizing "); 668 info->closure()->ShortPrint(); 669 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); 670 } 671 return false; 672 } 673 674 // Success! 675 job->RecordOptimizationStats(); 676 DCHECK(!isolate->has_pending_exception()); 677 InsertCodeIntoOptimizedCodeMap(info); 678 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); 679 return true; 680 } 681 682 bool GetOptimizedCodeLater(CompilationJob* job) { 683 CompilationInfo* info = job->info(); 684 Isolate* isolate = info->isolate(); 685 686 if (!isolate->optimizing_compile_dispatcher()->IsQueueAvailable()) { 687 if (FLAG_trace_concurrent_recompilation) { 688 PrintF(" ** Compilation queue full, will retry optimizing "); 689 info->closure()->ShortPrint(); 690 PrintF(" later.\n"); 691 } 692 return false; 693 } 694 695 // All handles below this point will be allocated in a deferred handle scope 696 // that is detached and handed off to the background thread when we return. 697 CompilationHandleScope handle_scope(info); 698 699 // Parsing is not required when optimizing from existing bytecode. 700 if (!info->is_optimizing_from_bytecode()) { 701 if (!Compiler::ParseAndAnalyze(info->parse_info())) return false; 702 EnsureFeedbackMetadata(info); 703 } 704 705 JSFunction::EnsureLiterals(info->closure()); 706 707 // Reopen handles in the new CompilationHandleScope. 708 info->ReopenHandlesInNewHandleScope(); 709 info->parse_info()->ReopenHandlesInNewHandleScope(); 710 711 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate()); 712 RuntimeCallTimerScope runtimeTimer(info->isolate(), 713 &RuntimeCallStats::RecompileSynchronous); 714 TRACE_EVENT0("v8", "V8.RecompileSynchronous"); 715 716 if (job->CreateGraph() != CompilationJob::SUCCEEDED) return false; 717 isolate->optimizing_compile_dispatcher()->QueueForOptimization(job); 718 719 if (FLAG_trace_concurrent_recompilation) { 720 PrintF(" ** Queued "); 721 info->closure()->ShortPrint(); 722 PrintF(" for concurrent optimization.\n"); 723 } 724 return true; 725 } 726 727 MaybeHandle<Code> GetOptimizedCode(Handle<JSFunction> function, 728 Compiler::ConcurrencyMode mode, 729 BailoutId osr_ast_id = BailoutId::None(), 730 JavaScriptFrame* osr_frame = nullptr) { 731 Isolate* isolate = function->GetIsolate(); 732 Handle<SharedFunctionInfo> shared(function->shared(), isolate); 733 734 Handle<Code> cached_code; 735 if (GetCodeFromOptimizedCodeMap(function, osr_ast_id) 736 .ToHandle(&cached_code)) { 737 if (FLAG_trace_opt) { 738 PrintF("[found optimized code for "); 739 function->ShortPrint(); 740 if (!osr_ast_id.IsNone()) { 741 PrintF(" at OSR AST id %d", osr_ast_id.ToInt()); 742 } 743 PrintF("]\n"); 744 } 745 return cached_code; 746 } 747 748 // Reset profiler ticks, function is no longer considered hot. 749 if (shared->is_compiled()) { 750 shared->code()->set_profiler_ticks(0); 751 } 752 753 VMState<COMPILER> state(isolate); 754 DCHECK(!isolate->has_pending_exception()); 755 PostponeInterruptsScope postpone(isolate); 756 bool use_turbofan = UseTurboFan(shared); 757 base::SmartPointer<CompilationJob> job( 758 use_turbofan ? compiler::Pipeline::NewCompilationJob(function) 759 : new HCompilationJob(function)); 760 CompilationInfo* info = job->info(); 761 ParseInfo* parse_info = info->parse_info(); 762 763 info->SetOptimizingForOsr(osr_ast_id, osr_frame); 764 765 // Do not use Crankshaft/TurboFan if we need to be able to set break points. 766 if (info->shared_info()->HasDebugInfo()) { 767 info->AbortOptimization(kFunctionBeingDebugged); 768 return MaybeHandle<Code>(); 769 } 770 771 // Limit the number of times we try to optimize functions. 772 const int kMaxOptCount = 773 FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000; 774 if (info->shared_info()->opt_count() > kMaxOptCount) { 775 info->AbortOptimization(kOptimizedTooManyTimes); 776 return MaybeHandle<Code>(); 777 } 778 779 CanonicalHandleScope canonical(isolate); 780 TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate); 781 RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::OptimizeCode); 782 TRACE_EVENT0("v8", "V8.OptimizeCode"); 783 784 // TurboFan can optimize directly from existing bytecode. 785 if (FLAG_turbo_from_bytecode && use_turbofan && 786 info->shared_info()->HasBytecodeArray()) { 787 info->MarkAsOptimizeFromBytecode(); 788 } 789 790 if (IsEvalToplevel(shared)) { 791 parse_info->set_eval(); 792 if (function->context()->IsNativeContext()) parse_info->set_global(); 793 parse_info->set_toplevel(); 794 parse_info->set_allow_lazy_parsing(false); 795 parse_info->set_lazy(false); 796 } 797 798 if (mode == Compiler::CONCURRENT) { 799 if (GetOptimizedCodeLater(job.get())) { 800 job.Detach(); // The background recompile job owns this now. 801 return isolate->builtins()->InOptimizationQueue(); 802 } 803 } else { 804 if (GetOptimizedCodeNow(job.get())) return info->code(); 805 } 806 807 if (isolate->has_pending_exception()) isolate->clear_pending_exception(); 808 return MaybeHandle<Code>(); 809 } 810 811 class InterpreterActivationsFinder : public ThreadVisitor, 812 public OptimizedFunctionVisitor { 813 public: 814 explicit InterpreterActivationsFinder(SharedFunctionInfo* shared) 815 : shared_(shared), has_activations_(false) {} 816 817 void VisitThread(Isolate* isolate, ThreadLocalTop* top) { 818 Address* activation_pc_address = nullptr; 819 JavaScriptFrameIterator it(isolate, top); 820 for (; !it.done(); it.Advance()) { 821 JavaScriptFrame* frame = it.frame(); 822 if (!frame->is_interpreted()) continue; 823 if (frame->function()->shared() == shared_) { 824 has_activations_ = true; 825 activation_pc_address = frame->pc_address(); 826 } 827 } 828 829 if (activation_pc_address) { 830 activation_pc_addresses_.push_back(activation_pc_address); 831 } 832 } 833 834 void VisitFunction(JSFunction* function) { 835 if (function->Inlines(shared_)) has_activations_ = true; 836 } 837 838 void EnterContext(Context* context) {} 839 void LeaveContext(Context* context) {} 840 841 bool MarkActivationsForBaselineOnReturn(Isolate* isolate) { 842 if (activation_pc_addresses_.empty()) return false; 843 844 for (Address* activation_pc_address : activation_pc_addresses_) { 845 DCHECK(isolate->inner_pointer_to_code_cache() 846 ->GetCacheEntry(*activation_pc_address) 847 ->code->is_interpreter_trampoline_builtin()); 848 *activation_pc_address = 849 isolate->builtins()->InterpreterMarkBaselineOnReturn()->entry(); 850 } 851 return true; 852 } 853 854 bool has_activations() { return has_activations_; } 855 856 private: 857 SharedFunctionInfo* shared_; 858 bool has_activations_; 859 std::vector<Address*> activation_pc_addresses_; 860 }; 861 862 bool HasInterpreterActivations( 863 Isolate* isolate, InterpreterActivationsFinder* activations_finder) { 864 activations_finder->VisitThread(isolate, isolate->thread_local_top()); 865 isolate->thread_manager()->IterateArchivedThreads(activations_finder); 866 if (FLAG_turbo_from_bytecode) { 867 // If we are able to optimize functions directly from bytecode, then there 868 // might be optimized functions that rely on bytecode being around. We need 869 // to prevent switching the given function to baseline code in those cases. 870 Deoptimizer::VisitAllOptimizedFunctions(isolate, activations_finder); 871 } 872 return activations_finder->has_activations(); 873 } 874 875 MaybeHandle<Code> GetBaselineCode(Handle<JSFunction> function) { 876 Isolate* isolate = function->GetIsolate(); 877 VMState<COMPILER> state(isolate); 878 PostponeInterruptsScope postpone(isolate); 879 Zone zone(isolate->allocator()); 880 ParseInfo parse_info(&zone, function); 881 CompilationInfo info(&parse_info, function); 882 883 // Reset profiler ticks, function is no longer considered hot. 884 if (function->shared()->HasBytecodeArray()) { 885 function->shared()->set_profiler_ticks(0); 886 } 887 888 // Nothing left to do if the function already has baseline code. 889 if (function->shared()->code()->kind() == Code::FUNCTION) { 890 return Handle<Code>(function->shared()->code()); 891 } 892 893 // We do not switch to baseline code when the debugger might have created a 894 // copy of the bytecode with break slots to be able to set break points. 895 if (function->shared()->HasDebugInfo()) { 896 return MaybeHandle<Code>(); 897 } 898 899 // TODO(4280): For now we do not switch generators or async functions to 900 // baseline code because there might be suspended activations stored in 901 // generator objects on the heap. We could eventually go directly to 902 // TurboFan in this case. 903 if (function->shared()->is_resumable()) { 904 return MaybeHandle<Code>(); 905 } 906 907 // TODO(4280): For now we disable switching to baseline code in the presence 908 // of interpreter activations of the given function. The reasons are: 909 // 1) The debugger assumes each function is either full-code or bytecode. 910 // 2) The underlying bytecode is cleared below, breaking stack unwinding. 911 InterpreterActivationsFinder activations_finder(function->shared()); 912 if (HasInterpreterActivations(isolate, &activations_finder)) { 913 if (FLAG_trace_opt) { 914 OFStream os(stdout); 915 os << "[unable to switch " << Brief(*function) << " due to activations]" 916 << std::endl; 917 } 918 919 if (activations_finder.MarkActivationsForBaselineOnReturn(isolate)) { 920 if (FLAG_trace_opt) { 921 OFStream os(stdout); 922 os << "[marking " << Brief(function->shared()) 923 << " for baseline recompilation on return]" << std::endl; 924 } 925 } 926 927 return MaybeHandle<Code>(); 928 } 929 930 if (FLAG_trace_opt) { 931 OFStream os(stdout); 932 os << "[switching method " << Brief(*function) << " to baseline code]" 933 << std::endl; 934 } 935 936 // Parse and update CompilationInfo with the results. 937 if (!Parser::ParseStatic(info.parse_info())) return MaybeHandle<Code>(); 938 Handle<SharedFunctionInfo> shared = info.shared_info(); 939 DCHECK_EQ(shared->language_mode(), info.literal()->language_mode()); 940 941 // Compile baseline code using the full code generator. 942 if (!Compiler::Analyze(info.parse_info()) || 943 !FullCodeGenerator::MakeCode(&info)) { 944 if (!isolate->has_pending_exception()) isolate->StackOverflow(); 945 return MaybeHandle<Code>(); 946 } 947 948 // TODO(4280): For now we play it safe and remove the bytecode array when we 949 // switch to baseline code. We might consider keeping around the bytecode so 950 // that it can be used as the "source of truth" eventually. 951 shared->ClearBytecodeArray(); 952 953 // Update the shared function info with the scope info. 954 InstallSharedScopeInfo(&info, shared); 955 956 // Install compilation result on the shared function info 957 InstallSharedCompilationResult(&info, shared); 958 959 // Record the function compilation event. 960 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, &info); 961 962 return info.code(); 963 } 964 965 MaybeHandle<Code> GetLazyCode(Handle<JSFunction> function) { 966 Isolate* isolate = function->GetIsolate(); 967 DCHECK(!isolate->has_pending_exception()); 968 DCHECK(!function->is_compiled()); 969 TimerEventScope<TimerEventCompileCode> compile_timer(isolate); 970 RuntimeCallTimerScope runtimeTimer(isolate, 971 &RuntimeCallStats::CompileCodeLazy); 972 TRACE_EVENT0("v8", "V8.CompileCode"); 973 AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy()); 974 975 if (FLAG_turbo_cache_shared_code) { 976 Handle<Code> cached_code; 977 if (GetCodeFromOptimizedCodeMap(function, BailoutId::None()) 978 .ToHandle(&cached_code)) { 979 if (FLAG_trace_opt) { 980 PrintF("[found optimized code for "); 981 function->ShortPrint(); 982 PrintF(" during unoptimized compile]\n"); 983 } 984 DCHECK(function->shared()->is_compiled()); 985 return cached_code; 986 } 987 } 988 989 if (function->shared()->is_compiled()) { 990 return Handle<Code>(function->shared()->code()); 991 } 992 993 Zone zone(isolate->allocator()); 994 ParseInfo parse_info(&zone, function); 995 CompilationInfo info(&parse_info, function); 996 Handle<Code> result; 997 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, GetUnoptimizedCode(&info), Code); 998 999 if (FLAG_always_opt) { 1000 Handle<Code> opt_code; 1001 if (GetOptimizedCode(function, Compiler::NOT_CONCURRENT) 1002 .ToHandle(&opt_code)) { 1003 result = opt_code; 1004 } 1005 } 1006 1007 return result; 1008 } 1009 1010 1011 Handle<SharedFunctionInfo> NewSharedFunctionInfoForLiteral( 1012 Isolate* isolate, FunctionLiteral* literal, Handle<Script> script) { 1013 Handle<Code> code = isolate->builtins()->CompileLazy(); 1014 Handle<ScopeInfo> scope_info = handle(ScopeInfo::Empty(isolate)); 1015 Handle<SharedFunctionInfo> result = isolate->factory()->NewSharedFunctionInfo( 1016 literal->name(), literal->materialized_literal_count(), literal->kind(), 1017 code, scope_info); 1018 SharedFunctionInfo::InitFromFunctionLiteral(result, literal); 1019 SharedFunctionInfo::SetScript(result, script); 1020 return result; 1021 } 1022 1023 Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) { 1024 Isolate* isolate = info->isolate(); 1025 TimerEventScope<TimerEventCompileCode> timer(isolate); 1026 RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode); 1027 TRACE_EVENT0("v8", "V8.CompileCode"); 1028 PostponeInterruptsScope postpone(isolate); 1029 DCHECK(!isolate->native_context().is_null()); 1030 ParseInfo* parse_info = info->parse_info(); 1031 Handle<Script> script = parse_info->script(); 1032 1033 // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile? 1034 FixedArray* array = isolate->native_context()->embedder_data(); 1035 script->set_context_data(array->get(v8::Context::kDebugIdIndex)); 1036 1037 isolate->debug()->OnBeforeCompile(script); 1038 1039 DCHECK(parse_info->is_eval() || parse_info->is_global() || 1040 parse_info->is_module()); 1041 1042 parse_info->set_toplevel(); 1043 1044 Handle<SharedFunctionInfo> result; 1045 1046 { VMState<COMPILER> state(info->isolate()); 1047 if (parse_info->literal() == NULL) { 1048 // Parse the script if needed (if it's already parsed, literal() is 1049 // non-NULL). If compiling for debugging, we may eagerly compile inner 1050 // functions, so do not parse lazily in that case. 1051 ScriptCompiler::CompileOptions options = parse_info->compile_options(); 1052 bool parse_allow_lazy = (options == ScriptCompiler::kConsumeParserCache || 1053 String::cast(script->source())->length() > 1054 FLAG_min_preparse_length) && 1055 !info->is_debug(); 1056 1057 // Consider parsing eagerly when targeting the code cache. 1058 parse_allow_lazy &= !(FLAG_serialize_eager && info->will_serialize()); 1059 1060 // Consider parsing eagerly when targeting Ignition. 1061 parse_allow_lazy &= !(FLAG_ignition && FLAG_ignition_eager && 1062 !isolate->serializer_enabled()); 1063 1064 parse_info->set_allow_lazy_parsing(parse_allow_lazy); 1065 if (!parse_allow_lazy && 1066 (options == ScriptCompiler::kProduceParserCache || 1067 options == ScriptCompiler::kConsumeParserCache)) { 1068 // We are going to parse eagerly, but we either 1) have cached data 1069 // produced by lazy parsing or 2) are asked to generate cached data. 1070 // Eager parsing cannot benefit from cached data, and producing cached 1071 // data while parsing eagerly is not implemented. 1072 parse_info->set_cached_data(nullptr); 1073 parse_info->set_compile_options(ScriptCompiler::kNoCompileOptions); 1074 } 1075 if (!Parser::ParseStatic(parse_info)) { 1076 return Handle<SharedFunctionInfo>::null(); 1077 } 1078 } 1079 1080 DCHECK(!info->is_debug() || !parse_info->allow_lazy_parsing()); 1081 1082 FunctionLiteral* lit = parse_info->literal(); 1083 1084 // Measure how long it takes to do the compilation; only take the 1085 // rest of the function into account to avoid overlap with the 1086 // parsing statistics. 1087 RuntimeCallTimerScope runtimeTimer( 1088 isolate, parse_info->is_eval() ? &RuntimeCallStats::CompileEval 1089 : &RuntimeCallStats::Compile); 1090 HistogramTimer* rate = parse_info->is_eval() 1091 ? info->isolate()->counters()->compile_eval() 1092 : info->isolate()->counters()->compile(); 1093 HistogramTimerScope timer(rate); 1094 TRACE_EVENT0("v8", parse_info->is_eval() ? "V8.CompileEval" : "V8.Compile"); 1095 1096 // Allocate a shared function info object. 1097 DCHECK_EQ(RelocInfo::kNoPosition, lit->function_token_position()); 1098 result = NewSharedFunctionInfoForLiteral(isolate, lit, script); 1099 result->set_is_toplevel(true); 1100 if (parse_info->is_eval()) { 1101 // Eval scripts cannot be (re-)compiled without context. 1102 result->set_allows_lazy_compilation_without_context(false); 1103 } 1104 parse_info->set_shared_info(result); 1105 1106 // Compile the code. 1107 if (!CompileUnoptimizedCode(info)) { 1108 return Handle<SharedFunctionInfo>::null(); 1109 } 1110 1111 // Update the shared function info with the scope info. 1112 InstallSharedScopeInfo(info, result); 1113 1114 // Install compilation result on the shared function info 1115 InstallSharedCompilationResult(info, result); 1116 1117 Handle<String> script_name = 1118 script->name()->IsString() 1119 ? Handle<String>(String::cast(script->name())) 1120 : isolate->factory()->empty_string(); 1121 CodeEventListener::LogEventsAndTags log_tag = 1122 parse_info->is_eval() 1123 ? CodeEventListener::EVAL_TAG 1124 : Logger::ToNativeByScript(CodeEventListener::SCRIPT_TAG, *script); 1125 1126 PROFILE(isolate, CodeCreateEvent(log_tag, result->abstract_code(), *result, 1127 *script_name)); 1128 1129 if (!script.is_null()) 1130 script->set_compilation_state(Script::COMPILATION_STATE_COMPILED); 1131 } 1132 1133 return result; 1134 } 1135 1136 } // namespace 1137 1138 // ---------------------------------------------------------------------------- 1139 // Implementation of Compiler 1140 1141 bool Compiler::Analyze(ParseInfo* info) { 1142 DCHECK_NOT_NULL(info->literal()); 1143 if (!Rewriter::Rewrite(info)) return false; 1144 if (!Scope::Analyze(info)) return false; 1145 if (!Renumber(info)) return false; 1146 DCHECK_NOT_NULL(info->scope()); 1147 return true; 1148 } 1149 1150 bool Compiler::ParseAndAnalyze(ParseInfo* info) { 1151 if (!Parser::ParseStatic(info)) return false; 1152 if (!Compiler::Analyze(info)) return false; 1153 DCHECK_NOT_NULL(info->literal()); 1154 DCHECK_NOT_NULL(info->scope()); 1155 return true; 1156 } 1157 1158 bool Compiler::Compile(Handle<JSFunction> function, ClearExceptionFlag flag) { 1159 if (function->is_compiled()) return true; 1160 Isolate* isolate = function->GetIsolate(); 1161 DCHECK(AllowCompilation::IsAllowed(isolate)); 1162 1163 // Start a compilation. 1164 Handle<Code> code; 1165 if (!GetLazyCode(function).ToHandle(&code)) { 1166 if (flag == CLEAR_EXCEPTION) { 1167 isolate->clear_pending_exception(); 1168 } 1169 return false; 1170 } 1171 1172 // Install code on closure. 1173 function->ReplaceCode(*code); 1174 JSFunction::EnsureLiterals(function); 1175 1176 // Check postconditions on success. 1177 DCHECK(!isolate->has_pending_exception()); 1178 DCHECK(function->shared()->is_compiled()); 1179 DCHECK(function->is_compiled()); 1180 return true; 1181 } 1182 1183 bool Compiler::CompileBaseline(Handle<JSFunction> function) { 1184 Isolate* isolate = function->GetIsolate(); 1185 DCHECK(AllowCompilation::IsAllowed(isolate)); 1186 1187 // Start a compilation. 1188 Handle<Code> code; 1189 if (!GetBaselineCode(function).ToHandle(&code)) { 1190 // Baseline generation failed, get unoptimized code. 1191 DCHECK(function->shared()->is_compiled()); 1192 code = handle(function->shared()->code()); 1193 isolate->clear_pending_exception(); 1194 } 1195 1196 // Install code on closure. 1197 function->ReplaceCode(*code); 1198 JSFunction::EnsureLiterals(function); 1199 1200 // Check postconditions on success. 1201 DCHECK(!isolate->has_pending_exception()); 1202 DCHECK(function->shared()->is_compiled()); 1203 DCHECK(function->is_compiled()); 1204 return true; 1205 } 1206 1207 bool Compiler::CompileOptimized(Handle<JSFunction> function, 1208 ConcurrencyMode mode) { 1209 if (function->IsOptimized()) return true; 1210 Isolate* isolate = function->GetIsolate(); 1211 DCHECK(AllowCompilation::IsAllowed(isolate)); 1212 1213 // Start a compilation. 1214 Handle<Code> code; 1215 if (!GetOptimizedCode(function, mode).ToHandle(&code)) { 1216 // Optimization failed, get unoptimized code. 1217 DCHECK(!isolate->has_pending_exception()); 1218 if (function->shared()->is_compiled()) { 1219 code = handle(function->shared()->code(), isolate); 1220 } else { 1221 Zone zone(isolate->allocator()); 1222 ParseInfo parse_info(&zone, function); 1223 CompilationInfo info(&parse_info, function); 1224 if (!GetUnoptimizedCode(&info).ToHandle(&code)) { 1225 return false; 1226 } 1227 } 1228 } 1229 1230 // Install code on closure. 1231 function->ReplaceCode(*code); 1232 JSFunction::EnsureLiterals(function); 1233 1234 // Check postconditions on success. 1235 DCHECK(!isolate->has_pending_exception()); 1236 DCHECK(function->shared()->is_compiled()); 1237 DCHECK(function->is_compiled()); 1238 return true; 1239 } 1240 1241 bool Compiler::CompileDebugCode(Handle<JSFunction> function) { 1242 Isolate* isolate = function->GetIsolate(); 1243 DCHECK(AllowCompilation::IsAllowed(isolate)); 1244 1245 // Start a compilation. 1246 Zone zone(isolate->allocator()); 1247 ParseInfo parse_info(&zone, function); 1248 CompilationInfo info(&parse_info, Handle<JSFunction>::null()); 1249 if (IsEvalToplevel(handle(function->shared()))) { 1250 parse_info.set_eval(); 1251 if (function->context()->IsNativeContext()) parse_info.set_global(); 1252 parse_info.set_toplevel(); 1253 parse_info.set_allow_lazy_parsing(false); 1254 parse_info.set_lazy(false); 1255 } 1256 info.MarkAsDebug(); 1257 if (GetUnoptimizedCode(&info).is_null()) { 1258 isolate->clear_pending_exception(); 1259 return false; 1260 } 1261 1262 // Check postconditions on success. 1263 DCHECK(!isolate->has_pending_exception()); 1264 DCHECK(function->shared()->is_compiled()); 1265 DCHECK(function->shared()->HasDebugCode()); 1266 return true; 1267 } 1268 1269 bool Compiler::CompileDebugCode(Handle<SharedFunctionInfo> shared) { 1270 Isolate* isolate = shared->GetIsolate(); 1271 DCHECK(AllowCompilation::IsAllowed(isolate)); 1272 1273 // Start a compilation. 1274 Zone zone(isolate->allocator()); 1275 ParseInfo parse_info(&zone, shared); 1276 CompilationInfo info(&parse_info, Handle<JSFunction>::null()); 1277 DCHECK(shared->allows_lazy_compilation_without_context()); 1278 DCHECK(!IsEvalToplevel(shared)); 1279 info.MarkAsDebug(); 1280 if (GetUnoptimizedCode(&info).is_null()) { 1281 isolate->clear_pending_exception(); 1282 return false; 1283 } 1284 1285 // Check postconditions on success. 1286 DCHECK(!isolate->has_pending_exception()); 1287 DCHECK(shared->is_compiled()); 1288 DCHECK(shared->HasDebugCode()); 1289 return true; 1290 } 1291 1292 MaybeHandle<JSArray> Compiler::CompileForLiveEdit(Handle<Script> script) { 1293 Isolate* isolate = script->GetIsolate(); 1294 DCHECK(AllowCompilation::IsAllowed(isolate)); 1295 1296 // In order to ensure that live edit function info collection finds the newly 1297 // generated shared function infos, clear the script's list temporarily 1298 // and restore it at the end of this method. 1299 Handle<Object> old_function_infos(script->shared_function_infos(), isolate); 1300 script->set_shared_function_infos(Smi::FromInt(0)); 1301 1302 // Start a compilation. 1303 Zone zone(isolate->allocator()); 1304 ParseInfo parse_info(&zone, script); 1305 CompilationInfo info(&parse_info, Handle<JSFunction>::null()); 1306 parse_info.set_global(); 1307 info.MarkAsDebug(); 1308 1309 // TODO(635): support extensions. 1310 const bool compilation_succeeded = !CompileToplevel(&info).is_null(); 1311 Handle<JSArray> infos; 1312 if (compilation_succeeded) { 1313 // Check postconditions on success. 1314 DCHECK(!isolate->has_pending_exception()); 1315 infos = LiveEditFunctionTracker::Collect(parse_info.literal(), script, 1316 &zone, isolate); 1317 } 1318 1319 // Restore the original function info list in order to remain side-effect 1320 // free as much as possible, since some code expects the old shared function 1321 // infos to stick around. 1322 script->set_shared_function_infos(*old_function_infos); 1323 1324 return infos; 1325 } 1326 1327 // TODO(turbofan): In the future, unoptimized code with deopt support could 1328 // be generated lazily once deopt is triggered. 1329 bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) { 1330 DCHECK_NOT_NULL(info->literal()); 1331 DCHECK_NOT_NULL(info->scope()); 1332 Handle<SharedFunctionInfo> shared = info->shared_info(); 1333 if (!shared->has_deoptimization_support()) { 1334 Zone zone(info->isolate()->allocator()); 1335 CompilationInfo unoptimized(info->parse_info(), info->closure()); 1336 unoptimized.EnableDeoptimizationSupport(); 1337 1338 // TODO(4280): For now we do not switch generators or async functions to 1339 // baseline code because there might be suspended activations stored in 1340 // generator objects on the heap. We could eventually go directly to 1341 // TurboFan in this case. 1342 if (shared->is_resumable()) return false; 1343 1344 // TODO(4280): For now we disable switching to baseline code in the presence 1345 // of interpreter activations of the given function. The reasons are: 1346 // 1) The debugger assumes each function is either full-code or bytecode. 1347 // 2) The underlying bytecode is cleared below, breaking stack unwinding. 1348 // The expensive check for activations only needs to be done when the given 1349 // function has bytecode, otherwise we can be sure there are no activations. 1350 if (shared->HasBytecodeArray()) { 1351 InterpreterActivationsFinder activations_finder(*shared); 1352 if (HasInterpreterActivations(info->isolate(), &activations_finder)) { 1353 return false; 1354 } 1355 } 1356 1357 // If the current code has reloc info for serialization, also include 1358 // reloc info for serialization for the new code, so that deopt support 1359 // can be added without losing IC state. 1360 if (shared->code()->kind() == Code::FUNCTION && 1361 shared->code()->has_reloc_info_for_serialization()) { 1362 unoptimized.PrepareForSerializing(); 1363 } 1364 EnsureFeedbackMetadata(&unoptimized); 1365 if (!FullCodeGenerator::MakeCode(&unoptimized)) return false; 1366 1367 // TODO(4280): For now we play it safe and remove the bytecode array when we 1368 // switch to baseline code. We might consider keeping around the bytecode so 1369 // that it can be used as the "source of truth" eventually. 1370 shared->ClearBytecodeArray(); 1371 1372 // The scope info might not have been set if a lazily compiled 1373 // function is inlined before being called for the first time. 1374 if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) { 1375 InstallSharedScopeInfo(info, shared); 1376 } 1377 1378 // Install compilation result on the shared function info 1379 shared->EnableDeoptimizationSupport(*unoptimized.code()); 1380 1381 // The existing unoptimized code was replaced with the new one. 1382 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, 1383 &unoptimized); 1384 } 1385 return true; 1386 } 1387 1388 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval( 1389 Handle<String> source, Handle<SharedFunctionInfo> outer_info, 1390 Handle<Context> context, LanguageMode language_mode, 1391 ParseRestriction restriction, int eval_scope_position, int eval_position, 1392 int line_offset, int column_offset, Handle<Object> script_name, 1393 ScriptOriginOptions options) { 1394 Isolate* isolate = source->GetIsolate(); 1395 int source_length = source->length(); 1396 isolate->counters()->total_eval_size()->Increment(source_length); 1397 isolate->counters()->total_compile_size()->Increment(source_length); 1398 1399 CompilationCache* compilation_cache = isolate->compilation_cache(); 1400 MaybeHandle<SharedFunctionInfo> maybe_shared_info = 1401 compilation_cache->LookupEval(source, outer_info, context, language_mode, 1402 eval_scope_position); 1403 Handle<SharedFunctionInfo> shared_info; 1404 1405 Handle<Script> script; 1406 if (!maybe_shared_info.ToHandle(&shared_info)) { 1407 script = isolate->factory()->NewScript(source); 1408 if (!script_name.is_null()) { 1409 script->set_name(*script_name); 1410 script->set_line_offset(line_offset); 1411 script->set_column_offset(column_offset); 1412 } 1413 script->set_origin_options(options); 1414 script->set_compilation_type(Script::COMPILATION_TYPE_EVAL); 1415 Script::SetEvalOrigin(script, outer_info, eval_position); 1416 1417 Zone zone(isolate->allocator()); 1418 ParseInfo parse_info(&zone, script); 1419 CompilationInfo info(&parse_info, Handle<JSFunction>::null()); 1420 parse_info.set_eval(); 1421 if (context->IsNativeContext()) parse_info.set_global(); 1422 parse_info.set_language_mode(language_mode); 1423 parse_info.set_parse_restriction(restriction); 1424 parse_info.set_context(context); 1425 1426 shared_info = CompileToplevel(&info); 1427 1428 if (shared_info.is_null()) { 1429 return MaybeHandle<JSFunction>(); 1430 } else { 1431 // If caller is strict mode, the result must be in strict mode as well. 1432 DCHECK(is_sloppy(language_mode) || 1433 is_strict(shared_info->language_mode())); 1434 compilation_cache->PutEval(source, outer_info, context, shared_info, 1435 eval_scope_position); 1436 } 1437 } 1438 1439 Handle<JSFunction> result = 1440 isolate->factory()->NewFunctionFromSharedFunctionInfo( 1441 shared_info, context, NOT_TENURED); 1442 1443 // OnAfterCompile has to be called after we create the JSFunction, which we 1444 // may require to recompile the eval for debugging, if we find a function 1445 // that contains break points in the eval script. 1446 isolate->debug()->OnAfterCompile(script); 1447 1448 return result; 1449 } 1450 1451 Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForScript( 1452 Handle<String> source, Handle<Object> script_name, int line_offset, 1453 int column_offset, ScriptOriginOptions resource_options, 1454 Handle<Object> source_map_url, Handle<Context> context, 1455 v8::Extension* extension, ScriptData** cached_data, 1456 ScriptCompiler::CompileOptions compile_options, NativesFlag natives, 1457 bool is_module) { 1458 Isolate* isolate = source->GetIsolate(); 1459 if (compile_options == ScriptCompiler::kNoCompileOptions) { 1460 cached_data = NULL; 1461 } else if (compile_options == ScriptCompiler::kProduceParserCache || 1462 compile_options == ScriptCompiler::kProduceCodeCache) { 1463 DCHECK(cached_data && !*cached_data); 1464 DCHECK(extension == NULL); 1465 DCHECK(!isolate->debug()->is_loaded()); 1466 } else { 1467 DCHECK(compile_options == ScriptCompiler::kConsumeParserCache || 1468 compile_options == ScriptCompiler::kConsumeCodeCache); 1469 DCHECK(cached_data && *cached_data); 1470 DCHECK(extension == NULL); 1471 } 1472 int source_length = source->length(); 1473 isolate->counters()->total_load_size()->Increment(source_length); 1474 isolate->counters()->total_compile_size()->Increment(source_length); 1475 1476 LanguageMode language_mode = construct_language_mode(FLAG_use_strict); 1477 CompilationCache* compilation_cache = isolate->compilation_cache(); 1478 1479 // Do a lookup in the compilation cache but not for extensions. 1480 MaybeHandle<SharedFunctionInfo> maybe_result; 1481 Handle<SharedFunctionInfo> result; 1482 if (extension == NULL) { 1483 // First check per-isolate compilation cache. 1484 maybe_result = compilation_cache->LookupScript( 1485 source, script_name, line_offset, column_offset, resource_options, 1486 context, language_mode); 1487 if (maybe_result.is_null() && FLAG_serialize_toplevel && 1488 compile_options == ScriptCompiler::kConsumeCodeCache && 1489 !isolate->debug()->is_loaded()) { 1490 // Then check cached code provided by embedder. 1491 HistogramTimerScope timer(isolate->counters()->compile_deserialize()); 1492 RuntimeCallTimerScope runtimeTimer(isolate, 1493 &RuntimeCallStats::CompileDeserialize); 1494 TRACE_EVENT0("v8", "V8.CompileDeserialize"); 1495 Handle<SharedFunctionInfo> result; 1496 if (CodeSerializer::Deserialize(isolate, *cached_data, source) 1497 .ToHandle(&result)) { 1498 // Promote to per-isolate compilation cache. 1499 compilation_cache->PutScript(source, context, language_mode, result); 1500 return result; 1501 } 1502 // Deserializer failed. Fall through to compile. 1503 } 1504 } 1505 1506 base::ElapsedTimer timer; 1507 if (FLAG_profile_deserialization && FLAG_serialize_toplevel && 1508 compile_options == ScriptCompiler::kProduceCodeCache) { 1509 timer.Start(); 1510 } 1511 1512 if (!maybe_result.ToHandle(&result) || 1513 (FLAG_serialize_toplevel && 1514 compile_options == ScriptCompiler::kProduceCodeCache)) { 1515 // No cache entry found, or embedder wants a code cache. Compile the script. 1516 1517 // Create a script object describing the script to be compiled. 1518 Handle<Script> script = isolate->factory()->NewScript(source); 1519 if (natives == NATIVES_CODE) { 1520 script->set_type(Script::TYPE_NATIVE); 1521 script->set_hide_source(true); 1522 } else if (natives == EXTENSION_CODE) { 1523 script->set_type(Script::TYPE_EXTENSION); 1524 script->set_hide_source(true); 1525 } 1526 if (!script_name.is_null()) { 1527 script->set_name(*script_name); 1528 script->set_line_offset(line_offset); 1529 script->set_column_offset(column_offset); 1530 } 1531 script->set_origin_options(resource_options); 1532 if (!source_map_url.is_null()) { 1533 script->set_source_mapping_url(*source_map_url); 1534 } 1535 1536 // Compile the function and add it to the cache. 1537 Zone zone(isolate->allocator()); 1538 ParseInfo parse_info(&zone, script); 1539 CompilationInfo info(&parse_info, Handle<JSFunction>::null()); 1540 if (is_module) { 1541 parse_info.set_module(); 1542 } else { 1543 parse_info.set_global(); 1544 } 1545 if (compile_options != ScriptCompiler::kNoCompileOptions) { 1546 parse_info.set_cached_data(cached_data); 1547 } 1548 parse_info.set_compile_options(compile_options); 1549 parse_info.set_extension(extension); 1550 parse_info.set_context(context); 1551 if (FLAG_serialize_toplevel && 1552 compile_options == ScriptCompiler::kProduceCodeCache) { 1553 info.PrepareForSerializing(); 1554 } 1555 1556 parse_info.set_language_mode( 1557 static_cast<LanguageMode>(parse_info.language_mode() | language_mode)); 1558 result = CompileToplevel(&info); 1559 if (extension == NULL && !result.is_null()) { 1560 compilation_cache->PutScript(source, context, language_mode, result); 1561 if (FLAG_serialize_toplevel && 1562 compile_options == ScriptCompiler::kProduceCodeCache) { 1563 HistogramTimerScope histogram_timer( 1564 isolate->counters()->compile_serialize()); 1565 RuntimeCallTimerScope runtimeTimer(isolate, 1566 &RuntimeCallStats::CompileSerialize); 1567 TRACE_EVENT0("v8", "V8.CompileSerialize"); 1568 *cached_data = CodeSerializer::Serialize(isolate, result, source); 1569 if (FLAG_profile_deserialization) { 1570 PrintF("[Compiling and serializing took %0.3f ms]\n", 1571 timer.Elapsed().InMillisecondsF()); 1572 } 1573 } 1574 } 1575 1576 if (result.is_null()) { 1577 isolate->ReportPendingMessages(); 1578 } else { 1579 isolate->debug()->OnAfterCompile(script); 1580 } 1581 } else if (result->ic_age() != isolate->heap()->global_ic_age()) { 1582 result->ResetForNewContext(isolate->heap()->global_ic_age()); 1583 } 1584 return result; 1585 } 1586 1587 Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForStreamedScript( 1588 Handle<Script> script, ParseInfo* parse_info, int source_length) { 1589 Isolate* isolate = script->GetIsolate(); 1590 // TODO(titzer): increment the counters in caller. 1591 isolate->counters()->total_load_size()->Increment(source_length); 1592 isolate->counters()->total_compile_size()->Increment(source_length); 1593 1594 LanguageMode language_mode = construct_language_mode(FLAG_use_strict); 1595 parse_info->set_language_mode( 1596 static_cast<LanguageMode>(parse_info->language_mode() | language_mode)); 1597 1598 CompilationInfo compile_info(parse_info, Handle<JSFunction>::null()); 1599 1600 // The source was parsed lazily, so compiling for debugging is not possible. 1601 DCHECK(!compile_info.is_debug()); 1602 1603 Handle<SharedFunctionInfo> result = CompileToplevel(&compile_info); 1604 if (!result.is_null()) isolate->debug()->OnAfterCompile(script); 1605 return result; 1606 } 1607 1608 1609 Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo( 1610 FunctionLiteral* literal, Handle<Script> script, 1611 CompilationInfo* outer_info) { 1612 // Precondition: code has been parsed and scopes have been analyzed. 1613 Isolate* isolate = outer_info->isolate(); 1614 MaybeHandle<SharedFunctionInfo> maybe_existing; 1615 1616 // Find any previously allocated shared function info for the given literal. 1617 if (outer_info->shared_info()->never_compiled()) { 1618 // On the first compile, there are no existing shared function info for 1619 // inner functions yet, so do not try to find them. All bets are off for 1620 // live edit though. 1621 SLOW_DCHECK(script->FindSharedFunctionInfo(literal).is_null() || 1622 isolate->debug()->live_edit_enabled()); 1623 } else { 1624 maybe_existing = script->FindSharedFunctionInfo(literal); 1625 } 1626 1627 // We found an existing shared function info. If it's already compiled, 1628 // don't worry about compiling it, and simply return it. If it's not yet 1629 // compiled, continue to decide whether to eagerly compile. 1630 // Carry on if we are compiling eager to obtain code for debugging, 1631 // unless we already have code with debut break slots. 1632 Handle<SharedFunctionInfo> existing; 1633 if (maybe_existing.ToHandle(&existing) && existing->is_compiled()) { 1634 DCHECK(!existing->is_toplevel()); 1635 if (!outer_info->is_debug() || existing->HasDebugCode()) { 1636 return existing; 1637 } 1638 } 1639 1640 // Allocate a shared function info object. 1641 Handle<SharedFunctionInfo> result; 1642 if (!maybe_existing.ToHandle(&result)) { 1643 result = NewSharedFunctionInfoForLiteral(isolate, literal, script); 1644 result->set_is_toplevel(false); 1645 1646 // If the outer function has been compiled before, we cannot be sure that 1647 // shared function info for this function literal has been created for the 1648 // first time. It may have already been compiled previously. 1649 result->set_never_compiled(outer_info->shared_info()->never_compiled()); 1650 } 1651 1652 Zone zone(isolate->allocator()); 1653 ParseInfo parse_info(&zone, script); 1654 CompilationInfo info(&parse_info, Handle<JSFunction>::null()); 1655 parse_info.set_literal(literal); 1656 parse_info.set_shared_info(result); 1657 parse_info.set_scope(literal->scope()); 1658 parse_info.set_language_mode(literal->scope()->language_mode()); 1659 if (outer_info->will_serialize()) info.PrepareForSerializing(); 1660 if (outer_info->is_debug()) info.MarkAsDebug(); 1661 1662 // Determine if the function can be lazily compiled. This is necessary to 1663 // allow some of our builtin JS files to be lazily compiled. These 1664 // builtins cannot be handled lazily by the parser, since we have to know 1665 // if a function uses the special natives syntax, which is something the 1666 // parser records. 1667 // If the debugger requests compilation for break points, we cannot be 1668 // aggressive about lazy compilation, because it might trigger compilation 1669 // of functions without an outer context when setting a breakpoint through 1670 // Debug::FindSharedFunctionInfoInScript. 1671 bool allow_lazy = literal->AllowsLazyCompilation() && !info.is_debug(); 1672 bool lazy = FLAG_lazy && allow_lazy && !literal->should_eager_compile(); 1673 1674 // Consider compiling eagerly when targeting the code cache. 1675 lazy &= !(FLAG_serialize_eager && info.will_serialize()); 1676 1677 // Consider compiling eagerly when compiling bytecode for Ignition. 1678 lazy &= 1679 !(FLAG_ignition && FLAG_ignition_eager && !isolate->serializer_enabled()); 1680 1681 // Generate code 1682 TimerEventScope<TimerEventCompileCode> timer(isolate); 1683 RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode); 1684 TRACE_EVENT0("v8", "V8.CompileCode"); 1685 if (lazy) { 1686 info.SetCode(isolate->builtins()->CompileLazy()); 1687 } else if (Renumber(info.parse_info()) && GenerateUnoptimizedCode(&info)) { 1688 // Code generation will ensure that the feedback vector is present and 1689 // appropriately sized. 1690 DCHECK(!info.code().is_null()); 1691 if (literal->should_eager_compile() && 1692 literal->should_be_used_once_hint()) { 1693 info.code()->MarkToBeExecutedOnce(isolate); 1694 } 1695 // Update the shared function info with the scope info. 1696 InstallSharedScopeInfo(&info, result); 1697 // Install compilation result on the shared function info. 1698 InstallSharedCompilationResult(&info, result); 1699 } else { 1700 return Handle<SharedFunctionInfo>::null(); 1701 } 1702 1703 if (maybe_existing.is_null()) { 1704 RecordFunctionCompilation(CodeEventListener::FUNCTION_TAG, &info); 1705 } 1706 1707 return result; 1708 } 1709 1710 Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForNative( 1711 v8::Extension* extension, Handle<String> name) { 1712 Isolate* isolate = name->GetIsolate(); 1713 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate); 1714 1715 // Compute the function template for the native function. 1716 v8::Local<v8::FunctionTemplate> fun_template = 1717 extension->GetNativeFunctionTemplate(v8_isolate, 1718 v8::Utils::ToLocal(name)); 1719 DCHECK(!fun_template.IsEmpty()); 1720 1721 // Instantiate the function and create a shared function info from it. 1722 Handle<JSFunction> fun = Handle<JSFunction>::cast(Utils::OpenHandle( 1723 *fun_template->GetFunction(v8_isolate->GetCurrentContext()) 1724 .ToLocalChecked())); 1725 Handle<Code> code = Handle<Code>(fun->shared()->code()); 1726 Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub()); 1727 Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo( 1728 name, fun->shared()->num_literals(), FunctionKind::kNormalFunction, code, 1729 Handle<ScopeInfo>(fun->shared()->scope_info())); 1730 shared->set_construct_stub(*construct_stub); 1731 shared->set_feedback_metadata(fun->shared()->feedback_metadata()); 1732 1733 // Copy the function data to the shared function info. 1734 shared->set_function_data(fun->shared()->function_data()); 1735 int parameters = fun->shared()->internal_formal_parameter_count(); 1736 shared->set_internal_formal_parameter_count(parameters); 1737 1738 return shared; 1739 } 1740 1741 MaybeHandle<Code> Compiler::GetOptimizedCodeForOSR(Handle<JSFunction> function, 1742 BailoutId osr_ast_id, 1743 JavaScriptFrame* osr_frame) { 1744 DCHECK(!osr_ast_id.IsNone()); 1745 DCHECK_NOT_NULL(osr_frame); 1746 return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame); 1747 } 1748 1749 void Compiler::FinalizeCompilationJob(CompilationJob* raw_job) { 1750 // Take ownership of compilation job. Deleting job also tears down the zone. 1751 base::SmartPointer<CompilationJob> job(raw_job); 1752 CompilationInfo* info = job->info(); 1753 Isolate* isolate = info->isolate(); 1754 1755 VMState<COMPILER> state(isolate); 1756 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate()); 1757 RuntimeCallTimerScope runtimeTimer(isolate, 1758 &RuntimeCallStats::RecompileSynchronous); 1759 TRACE_EVENT0("v8", "V8.RecompileSynchronous"); 1760 1761 Handle<SharedFunctionInfo> shared = info->shared_info(); 1762 shared->code()->set_profiler_ticks(0); 1763 1764 DCHECK(!shared->HasDebugInfo()); 1765 1766 // 1) Optimization on the concurrent thread may have failed. 1767 // 2) The function may have already been optimized by OSR. Simply continue. 1768 // Except when OSR already disabled optimization for some reason. 1769 // 3) The code may have already been invalidated due to dependency change. 1770 // 4) Code generation may have failed. 1771 if (job->last_status() == CompilationJob::SUCCEEDED) { 1772 if (shared->optimization_disabled()) { 1773 job->RetryOptimization(kOptimizationDisabled); 1774 } else if (info->dependencies()->HasAborted()) { 1775 job->RetryOptimization(kBailedOutDueToDependencyChange); 1776 } else if (job->GenerateCode() == CompilationJob::SUCCEEDED) { 1777 job->RecordOptimizationStats(); 1778 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info); 1779 if (shared->SearchOptimizedCodeMap(info->context()->native_context(), 1780 info->osr_ast_id()).code == nullptr) { 1781 InsertCodeIntoOptimizedCodeMap(info); 1782 } 1783 if (FLAG_trace_opt) { 1784 PrintF("[completed optimizing "); 1785 info->closure()->ShortPrint(); 1786 PrintF("]\n"); 1787 } 1788 info->closure()->ReplaceCode(*info->code()); 1789 return; 1790 } 1791 } 1792 1793 DCHECK(job->last_status() != CompilationJob::SUCCEEDED); 1794 if (FLAG_trace_opt) { 1795 PrintF("[aborted optimizing "); 1796 info->closure()->ShortPrint(); 1797 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason())); 1798 } 1799 info->closure()->ReplaceCode(shared->code()); 1800 } 1801 1802 void Compiler::PostInstantiation(Handle<JSFunction> function, 1803 PretenureFlag pretenure) { 1804 Handle<SharedFunctionInfo> shared(function->shared()); 1805 1806 if (FLAG_always_opt && shared->allows_lazy_compilation()) { 1807 function->MarkForOptimization(); 1808 } 1809 1810 CodeAndLiterals cached = shared->SearchOptimizedCodeMap( 1811 function->context()->native_context(), BailoutId::None()); 1812 if (cached.code != nullptr) { 1813 // Caching of optimized code enabled and optimized code found. 1814 DCHECK(!cached.code->marked_for_deoptimization()); 1815 DCHECK(function->shared()->is_compiled()); 1816 function->ReplaceCode(cached.code); 1817 } 1818 1819 if (cached.literals != nullptr) { 1820 DCHECK(shared->is_compiled()); 1821 function->set_literals(cached.literals); 1822 } else if (shared->is_compiled()) { 1823 // TODO(mvstanton): pass pretenure flag to EnsureLiterals. 1824 JSFunction::EnsureLiterals(function); 1825 } 1826 } 1827 1828 } // namespace internal 1829 } // namespace v8 1830