1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_COMPILER_H_ 29 #define V8_COMPILER_H_ 30 31 #include "allocation.h" 32 #include "ast.h" 33 #include "zone.h" 34 35 namespace v8 { 36 namespace internal { 37 38 static const int kPrologueOffsetNotSet = -1; 39 40 class ScriptDataImpl; 41 class HydrogenCodeStub; 42 43 // ParseRestriction is used to restrict the set of valid statements in a 44 // unit of compilation. Restriction violations cause a syntax error. 45 enum ParseRestriction { 46 NO_PARSE_RESTRICTION, // All expressions are allowed. 47 ONLY_SINGLE_FUNCTION_LITERAL // Only a single FunctionLiteral expression. 48 }; 49 50 struct OffsetRange { 51 OffsetRange(int from, int to) : from(from), to(to) {} 52 int from; 53 int to; 54 }; 55 56 // CompilationInfo encapsulates some information known at compile time. It 57 // is constructed based on the resources available at compile-time. 58 class CompilationInfo { 59 public: 60 CompilationInfo(Handle<JSFunction> closure, Zone* zone); 61 virtual ~CompilationInfo(); 62 63 Isolate* isolate() { 64 ASSERT(Isolate::Current() == isolate_); 65 return isolate_; 66 } 67 Zone* zone() { return zone_; } 68 bool is_lazy() const { return IsLazy::decode(flags_); } 69 bool is_eval() const { return IsEval::decode(flags_); } 70 bool is_global() const { return IsGlobal::decode(flags_); } 71 bool is_classic_mode() const { return language_mode() == CLASSIC_MODE; } 72 bool is_extended_mode() const { return language_mode() == EXTENDED_MODE; } 73 LanguageMode language_mode() const { 74 return LanguageModeField::decode(flags_); 75 } 76 bool is_in_loop() const { return IsInLoop::decode(flags_); } 77 FunctionLiteral* function() const { return function_; } 78 Scope* scope() const { return scope_; } 79 Scope* global_scope() const { return global_scope_; } 80 Handle<Code> code() const { return code_; } 81 Handle<JSFunction> closure() const { return closure_; } 82 Handle<SharedFunctionInfo> shared_info() const { return shared_info_; } 83 Handle<Script> script() const { return script_; } 84 HydrogenCodeStub* code_stub() const {return code_stub_; } 85 v8::Extension* extension() const { return extension_; } 86 ScriptDataImpl* pre_parse_data() const { return pre_parse_data_; } 87 Handle<Context> context() const { return context_; } 88 BailoutId osr_ast_id() const { return osr_ast_id_; } 89 int opt_count() const { return opt_count_; } 90 int num_parameters() const; 91 int num_heap_slots() const; 92 Code::Flags flags() const; 93 94 void MarkAsEval() { 95 ASSERT(!is_lazy()); 96 flags_ |= IsEval::encode(true); 97 } 98 void MarkAsGlobal() { 99 ASSERT(!is_lazy()); 100 flags_ |= IsGlobal::encode(true); 101 } 102 void SetLanguageMode(LanguageMode language_mode) { 103 ASSERT(this->language_mode() == CLASSIC_MODE || 104 this->language_mode() == language_mode || 105 language_mode == EXTENDED_MODE); 106 flags_ = LanguageModeField::update(flags_, language_mode); 107 } 108 void MarkAsInLoop() { 109 ASSERT(is_lazy()); 110 flags_ |= IsInLoop::encode(true); 111 } 112 void MarkAsNative() { 113 flags_ |= IsNative::encode(true); 114 } 115 116 bool is_native() const { 117 return IsNative::decode(flags_); 118 } 119 120 bool is_calling() const { 121 return is_deferred_calling() || is_non_deferred_calling(); 122 } 123 124 void MarkAsDeferredCalling() { 125 flags_ |= IsDeferredCalling::encode(true); 126 } 127 128 bool is_deferred_calling() const { 129 return IsDeferredCalling::decode(flags_); 130 } 131 132 void MarkAsNonDeferredCalling() { 133 flags_ |= IsNonDeferredCalling::encode(true); 134 } 135 136 bool is_non_deferred_calling() const { 137 return IsNonDeferredCalling::decode(flags_); 138 } 139 140 void MarkAsSavesCallerDoubles() { 141 flags_ |= SavesCallerDoubles::encode(true); 142 } 143 144 bool saves_caller_doubles() const { 145 return SavesCallerDoubles::decode(flags_); 146 } 147 148 void MarkAsRequiresFrame() { 149 flags_ |= RequiresFrame::encode(true); 150 } 151 152 bool requires_frame() const { 153 return RequiresFrame::decode(flags_); 154 } 155 156 void SetParseRestriction(ParseRestriction restriction) { 157 flags_ = ParseRestricitonField::update(flags_, restriction); 158 } 159 160 ParseRestriction parse_restriction() const { 161 return ParseRestricitonField::decode(flags_); 162 } 163 164 void SetFunction(FunctionLiteral* literal) { 165 ASSERT(function_ == NULL); 166 function_ = literal; 167 } 168 void SetScope(Scope* scope) { 169 ASSERT(scope_ == NULL); 170 scope_ = scope; 171 } 172 void SetGlobalScope(Scope* global_scope) { 173 ASSERT(global_scope_ == NULL); 174 global_scope_ = global_scope; 175 } 176 void SetCode(Handle<Code> code) { code_ = code; } 177 void SetExtension(v8::Extension* extension) { 178 ASSERT(!is_lazy()); 179 extension_ = extension; 180 } 181 void SetPreParseData(ScriptDataImpl* pre_parse_data) { 182 ASSERT(!is_lazy()); 183 pre_parse_data_ = pre_parse_data; 184 } 185 void SetContext(Handle<Context> context) { 186 context_ = context; 187 } 188 void MarkCompilingForDebugging(Handle<Code> current_code) { 189 ASSERT(mode_ != OPTIMIZE); 190 ASSERT(current_code->kind() == Code::FUNCTION); 191 flags_ |= IsCompilingForDebugging::encode(true); 192 if (current_code->is_compiled_optimizable()) { 193 EnableDeoptimizationSupport(); 194 } else { 195 mode_ = CompilationInfo::NONOPT; 196 } 197 } 198 bool IsCompilingForDebugging() { 199 return IsCompilingForDebugging::decode(flags_); 200 } 201 202 bool ShouldTrapOnDeopt() const { 203 return (FLAG_trap_on_deopt && IsOptimizing()) || 204 (FLAG_trap_on_stub_deopt && IsStub()); 205 } 206 207 bool has_global_object() const { 208 return !closure().is_null() && 209 (closure()->context()->global_object() != NULL); 210 } 211 212 GlobalObject* global_object() const { 213 return has_global_object() ? closure()->context()->global_object() : NULL; 214 } 215 216 // Accessors for the different compilation modes. 217 bool IsOptimizing() const { return mode_ == OPTIMIZE; } 218 bool IsOptimizable() const { return mode_ == BASE; } 219 bool IsStub() const { return mode_ == STUB; } 220 void SetOptimizing(BailoutId osr_ast_id) { 221 SetMode(OPTIMIZE); 222 osr_ast_id_ = osr_ast_id; 223 } 224 void DisableOptimization(); 225 226 // Deoptimization support. 227 bool HasDeoptimizationSupport() const { 228 return SupportsDeoptimization::decode(flags_); 229 } 230 void EnableDeoptimizationSupport() { 231 ASSERT(IsOptimizable()); 232 flags_ |= SupportsDeoptimization::encode(true); 233 } 234 235 // Determines whether or not to insert a self-optimization header. 236 bool ShouldSelfOptimize(); 237 238 // Disable all optimization attempts of this info for the rest of the 239 // current compilation pipeline. 240 void AbortOptimization(); 241 242 void set_deferred_handles(DeferredHandles* deferred_handles) { 243 ASSERT(deferred_handles_ == NULL); 244 deferred_handles_ = deferred_handles; 245 } 246 247 ZoneList<Handle<HeapObject> >* dependencies( 248 DependentCode::DependencyGroup group) { 249 if (dependencies_[group] == NULL) { 250 dependencies_[group] = new(zone_) ZoneList<Handle<HeapObject> >(2, zone_); 251 } 252 return dependencies_[group]; 253 } 254 255 void CommitDependencies(Handle<Code> code); 256 257 void RollbackDependencies(); 258 259 void SaveHandles() { 260 SaveHandle(&closure_); 261 SaveHandle(&shared_info_); 262 SaveHandle(&context_); 263 SaveHandle(&script_); 264 } 265 266 BailoutReason bailout_reason() const { return bailout_reason_; } 267 void set_bailout_reason(BailoutReason reason) { bailout_reason_ = reason; } 268 269 int prologue_offset() const { 270 ASSERT_NE(kPrologueOffsetNotSet, prologue_offset_); 271 return prologue_offset_; 272 } 273 274 void set_prologue_offset(int prologue_offset) { 275 ASSERT_EQ(kPrologueOffsetNotSet, prologue_offset_); 276 prologue_offset_ = prologue_offset; 277 } 278 279 // Adds offset range [from, to) where fp register does not point 280 // to the current frame base. Used in CPU profiler to detect stack 281 // samples where top frame is not set up. 282 inline void AddNoFrameRange(int from, int to) { 283 if (no_frame_ranges_) no_frame_ranges_->Add(OffsetRange(from, to)); 284 } 285 286 List<OffsetRange>* ReleaseNoFrameRanges() { 287 List<OffsetRange>* result = no_frame_ranges_; 288 no_frame_ranges_ = NULL; 289 return result; 290 } 291 292 Handle<Foreign> object_wrapper() { 293 if (object_wrapper_.is_null()) { 294 object_wrapper_ = 295 isolate()->factory()->NewForeign(reinterpret_cast<Address>(this)); 296 } 297 return object_wrapper_; 298 } 299 300 void AbortDueToDependencyChange() { 301 ASSERT(!isolate()->optimizing_compiler_thread()->IsOptimizerThread()); 302 abort_due_to_dependency_ = true; 303 } 304 305 bool HasAbortedDueToDependencyChange() { 306 ASSERT(!isolate()->optimizing_compiler_thread()->IsOptimizerThread()); 307 return abort_due_to_dependency_; 308 } 309 310 protected: 311 CompilationInfo(Handle<Script> script, 312 Zone* zone); 313 CompilationInfo(Handle<SharedFunctionInfo> shared_info, 314 Zone* zone); 315 CompilationInfo(HydrogenCodeStub* stub, 316 Isolate* isolate, 317 Zone* zone); 318 319 private: 320 Isolate* isolate_; 321 322 // Compilation mode. 323 // BASE is generated by the full codegen, optionally prepared for bailouts. 324 // OPTIMIZE is optimized code generated by the Hydrogen-based backend. 325 // NONOPT is generated by the full codegen and is not prepared for 326 // recompilation/bailouts. These functions are never recompiled. 327 enum Mode { 328 BASE, 329 OPTIMIZE, 330 NONOPT, 331 STUB 332 }; 333 334 void Initialize(Isolate* isolate, Mode mode, Zone* zone); 335 336 void SetMode(Mode mode) { 337 ASSERT(V8::UseCrankshaft()); 338 mode_ = mode; 339 } 340 341 // Flags using template class BitField<type, start, length>. All are 342 // false by default. 343 // 344 // Compilation is either eager or lazy. 345 class IsLazy: public BitField<bool, 0, 1> {}; 346 // Flags that can be set for eager compilation. 347 class IsEval: public BitField<bool, 1, 1> {}; 348 class IsGlobal: public BitField<bool, 2, 1> {}; 349 // Flags that can be set for lazy compilation. 350 class IsInLoop: public BitField<bool, 3, 1> {}; 351 // Strict mode - used in eager compilation. 352 class LanguageModeField: public BitField<LanguageMode, 4, 2> {}; 353 // Is this a function from our natives. 354 class IsNative: public BitField<bool, 6, 1> {}; 355 // Is this code being compiled with support for deoptimization.. 356 class SupportsDeoptimization: public BitField<bool, 7, 1> {}; 357 // If compiling for debugging produce just full code matching the 358 // initial mode setting. 359 class IsCompilingForDebugging: public BitField<bool, 8, 1> {}; 360 // If the compiled code contains calls that require building a frame 361 class IsCalling: public BitField<bool, 9, 1> {}; 362 // If the compiled code contains calls that require building a frame 363 class IsDeferredCalling: public BitField<bool, 10, 1> {}; 364 // If the compiled code contains calls that require building a frame 365 class IsNonDeferredCalling: public BitField<bool, 11, 1> {}; 366 // If the compiled code saves double caller registers that it clobbers. 367 class SavesCallerDoubles: public BitField<bool, 12, 1> {}; 368 // If the set of valid statements is restricted. 369 class ParseRestricitonField: public BitField<ParseRestriction, 13, 1> {}; 370 // If the function requires a frame (for unspecified reasons) 371 class RequiresFrame: public BitField<bool, 14, 1> {}; 372 373 unsigned flags_; 374 375 // Fields filled in by the compilation pipeline. 376 // AST filled in by the parser. 377 FunctionLiteral* function_; 378 // The scope of the function literal as a convenience. Set to indicate 379 // that scopes have been analyzed. 380 Scope* scope_; 381 // The global scope provided as a convenience. 382 Scope* global_scope_; 383 // For compiled stubs, the stub object 384 HydrogenCodeStub* code_stub_; 385 // The compiled code. 386 Handle<Code> code_; 387 388 // Possible initial inputs to the compilation process. 389 Handle<JSFunction> closure_; 390 Handle<SharedFunctionInfo> shared_info_; 391 Handle<Script> script_; 392 393 // Fields possibly needed for eager compilation, NULL by default. 394 v8::Extension* extension_; 395 ScriptDataImpl* pre_parse_data_; 396 397 // The context of the caller for eval code, and the global context for a 398 // global script. Will be a null handle otherwise. 399 Handle<Context> context_; 400 401 // Compilation mode flag and whether deoptimization is allowed. 402 Mode mode_; 403 BailoutId osr_ast_id_; 404 405 // Flag whether compilation needs to be aborted due to dependency change. 406 bool abort_due_to_dependency_; 407 408 // The zone from which the compilation pipeline working on this 409 // CompilationInfo allocates. 410 Zone* zone_; 411 412 DeferredHandles* deferred_handles_; 413 414 ZoneList<Handle<HeapObject> >* dependencies_[DependentCode::kGroupCount]; 415 416 template<typename T> 417 void SaveHandle(Handle<T> *object) { 418 if (!object->is_null()) { 419 Handle<T> handle(*(*object)); 420 *object = handle; 421 } 422 } 423 424 BailoutReason bailout_reason_; 425 426 int prologue_offset_; 427 428 List<OffsetRange>* no_frame_ranges_; 429 430 // A copy of shared_info()->opt_count() to avoid handle deref 431 // during graph optimization. 432 int opt_count_; 433 434 Handle<Foreign> object_wrapper_; 435 436 DISALLOW_COPY_AND_ASSIGN(CompilationInfo); 437 }; 438 439 440 // Exactly like a CompilationInfo, except also creates and enters a 441 // Zone on construction and deallocates it on exit. 442 class CompilationInfoWithZone: public CompilationInfo { 443 public: 444 explicit CompilationInfoWithZone(Handle<Script> script) 445 : CompilationInfo(script, &zone_), 446 zone_(script->GetIsolate()) {} 447 explicit CompilationInfoWithZone(Handle<SharedFunctionInfo> shared_info) 448 : CompilationInfo(shared_info, &zone_), 449 zone_(shared_info->GetIsolate()) {} 450 explicit CompilationInfoWithZone(Handle<JSFunction> closure) 451 : CompilationInfo(closure, &zone_), 452 zone_(closure->GetIsolate()) {} 453 CompilationInfoWithZone(HydrogenCodeStub* stub, Isolate* isolate) 454 : CompilationInfo(stub, isolate, &zone_), 455 zone_(isolate) {} 456 457 // Virtual destructor because a CompilationInfoWithZone has to exit the 458 // zone scope and get rid of dependent maps even when the destructor is 459 // called when cast as a CompilationInfo. 460 virtual ~CompilationInfoWithZone() { 461 RollbackDependencies(); 462 } 463 464 private: 465 Zone zone_; 466 }; 467 468 469 // A wrapper around a CompilationInfo that detaches the Handles from 470 // the underlying DeferredHandleScope and stores them in info_ on 471 // destruction. 472 class CompilationHandleScope BASE_EMBEDDED { 473 public: 474 explicit CompilationHandleScope(CompilationInfo* info) 475 : deferred_(info->isolate()), info_(info) {} 476 ~CompilationHandleScope() { 477 info_->set_deferred_handles(deferred_.Detach()); 478 } 479 480 private: 481 DeferredHandleScope deferred_; 482 CompilationInfo* info_; 483 }; 484 485 486 class HGraph; 487 class HOptimizedGraphBuilder; 488 class LChunk; 489 490 // A helper class that calls the three compilation phases in 491 // Crankshaft and keeps track of its state. The three phases 492 // CreateGraph, OptimizeGraph and GenerateAndInstallCode can either 493 // fail, bail-out to the full code generator or succeed. Apart from 494 // their return value, the status of the phase last run can be checked 495 // using last_status(). 496 class OptimizingCompiler: public ZoneObject { 497 public: 498 explicit OptimizingCompiler(CompilationInfo* info) 499 : info_(info), 500 graph_builder_(NULL), 501 graph_(NULL), 502 chunk_(NULL), 503 time_taken_to_create_graph_(0), 504 time_taken_to_optimize_(0), 505 time_taken_to_codegen_(0), 506 last_status_(FAILED) { } 507 508 enum Status { 509 FAILED, BAILED_OUT, SUCCEEDED 510 }; 511 512 MUST_USE_RESULT Status CreateGraph(); 513 MUST_USE_RESULT Status OptimizeGraph(); 514 MUST_USE_RESULT Status GenerateAndInstallCode(); 515 516 Status last_status() const { return last_status_; } 517 CompilationInfo* info() const { return info_; } 518 Isolate* isolate() const { return info()->isolate(); } 519 520 MUST_USE_RESULT Status AbortOptimization() { 521 info_->AbortOptimization(); 522 info_->shared_info()->DisableOptimization(info_->bailout_reason()); 523 return SetLastStatus(BAILED_OUT); 524 } 525 526 private: 527 CompilationInfo* info_; 528 HOptimizedGraphBuilder* graph_builder_; 529 HGraph* graph_; 530 LChunk* chunk_; 531 int64_t time_taken_to_create_graph_; 532 int64_t time_taken_to_optimize_; 533 int64_t time_taken_to_codegen_; 534 Status last_status_; 535 536 MUST_USE_RESULT Status SetLastStatus(Status status) { 537 last_status_ = status; 538 return last_status_; 539 } 540 void RecordOptimizationStats(); 541 542 struct Timer { 543 Timer(OptimizingCompiler* compiler, int64_t* location) 544 : compiler_(compiler), 545 start_(OS::Ticks()), 546 location_(location) { } 547 548 ~Timer() { 549 *location_ += (OS::Ticks() - start_); 550 } 551 552 OptimizingCompiler* compiler_; 553 int64_t start_; 554 int64_t* location_; 555 }; 556 }; 557 558 559 // The V8 compiler 560 // 561 // General strategy: Source code is translated into an anonymous function w/o 562 // parameters which then can be executed. If the source code contains other 563 // functions, they will be compiled and allocated as part of the compilation 564 // of the source code. 565 566 // Please note this interface returns shared function infos. This means you 567 // need to call Factory::NewFunctionFromSharedFunctionInfo before you have a 568 // real function with a context. 569 570 class Compiler : public AllStatic { 571 public: 572 // Call count before primitive functions trigger their own optimization. 573 static const int kCallsUntilPrimitiveOpt = 200; 574 575 // All routines return a SharedFunctionInfo. 576 // If an error occurs an exception is raised and the return handle 577 // contains NULL. 578 579 // Compile a String source within a context. 580 static Handle<SharedFunctionInfo> Compile(Handle<String> source, 581 Handle<Object> script_name, 582 int line_offset, 583 int column_offset, 584 bool is_shared_cross_origin, 585 Handle<Context> context, 586 v8::Extension* extension, 587 ScriptDataImpl* pre_data, 588 Handle<Object> script_data, 589 NativesFlag is_natives_code); 590 591 // Compile a String source within a context for Eval. 592 static Handle<SharedFunctionInfo> CompileEval(Handle<String> source, 593 Handle<Context> context, 594 bool is_global, 595 LanguageMode language_mode, 596 ParseRestriction restriction, 597 int scope_position); 598 599 // Compile from function info (used for lazy compilation). Returns true on 600 // success and false if the compilation resulted in a stack overflow. 601 static bool CompileLazy(CompilationInfo* info); 602 603 static void RecompileParallel(Handle<JSFunction> function); 604 605 // Compile a shared function info object (the function is possibly lazily 606 // compiled). 607 static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node, 608 Handle<Script> script); 609 610 // Set the function info for a newly compiled function. 611 static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info, 612 FunctionLiteral* lit, 613 bool is_toplevel, 614 Handle<Script> script); 615 616 static void InstallOptimizedCode(OptimizingCompiler* info); 617 618 #ifdef ENABLE_DEBUGGER_SUPPORT 619 static bool MakeCodeForLiveEdit(CompilationInfo* info); 620 #endif 621 622 static void RecordFunctionCompilation(Logger::LogEventsAndTags tag, 623 CompilationInfo* info, 624 Handle<SharedFunctionInfo> shared); 625 }; 626 627 628 class CompilationPhase BASE_EMBEDDED { 629 public: 630 CompilationPhase(const char* name, CompilationInfo* info); 631 ~CompilationPhase(); 632 633 protected: 634 bool ShouldProduceTraceOutput() const; 635 636 const char* name() const { return name_; } 637 CompilationInfo* info() const { return info_; } 638 Isolate* isolate() const { return info()->isolate(); } 639 Zone* zone() { return &zone_; } 640 641 private: 642 const char* name_; 643 CompilationInfo* info_; 644 Zone zone_; 645 unsigned info_zone_start_allocation_size_; 646 int64_t start_ticks_; 647 648 DISALLOW_COPY_AND_ASSIGN(CompilationPhase); 649 }; 650 651 652 } } // namespace v8::internal 653 654 #endif // V8_COMPILER_H_ 655