Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_COMPILER_H_
      6 #define V8_COMPILER_H_
      7 
      8 #include "src/allocation.h"
      9 #include "src/ast/ast.h"
     10 #include "src/bailout-reason.h"
     11 #include "src/compilation-dependencies.h"
     12 #include "src/signature.h"
     13 #include "src/zone.h"
     14 
     15 namespace v8 {
     16 namespace internal {
     17 
     18 class AstValueFactory;
     19 class HydrogenCodeStub;
     20 class JavaScriptFrame;
     21 class ParseInfo;
     22 class ScriptData;
     23 
     24 
     25 // This class encapsulates encoding and decoding of sources positions from
     26 // which hydrogen values originated.
     27 // When FLAG_track_hydrogen_positions is set this object encodes the
     28 // identifier of the inlining and absolute offset from the start of the
     29 // inlined function.
     30 // When the flag is not set we simply track absolute offset from the
     31 // script start.
     32 class SourcePosition {
     33  public:
     34   static SourcePosition Unknown() {
     35     return SourcePosition::FromRaw(kNoPosition);
     36   }
     37 
     38   bool IsUnknown() const { return value_ == kNoPosition; }
     39 
     40   uint32_t position() const { return PositionField::decode(value_); }
     41   void set_position(uint32_t position) {
     42     if (FLAG_hydrogen_track_positions) {
     43       value_ = static_cast<uint32_t>(PositionField::update(value_, position));
     44     } else {
     45       value_ = position;
     46     }
     47   }
     48 
     49   uint32_t inlining_id() const { return InliningIdField::decode(value_); }
     50   void set_inlining_id(uint32_t inlining_id) {
     51     if (FLAG_hydrogen_track_positions) {
     52       value_ =
     53           static_cast<uint32_t>(InliningIdField::update(value_, inlining_id));
     54     }
     55   }
     56 
     57   uint32_t raw() const { return value_; }
     58 
     59  private:
     60   static const uint32_t kNoPosition =
     61       static_cast<uint32_t>(RelocInfo::kNoPosition);
     62   typedef BitField<uint32_t, 0, 9> InliningIdField;
     63 
     64   // Offset from the start of the inlined function.
     65   typedef BitField<uint32_t, 9, 23> PositionField;
     66 
     67   friend class HPositionInfo;
     68   friend class Deoptimizer;
     69 
     70   static SourcePosition FromRaw(uint32_t raw_position) {
     71     SourcePosition position;
     72     position.value_ = raw_position;
     73     return position;
     74   }
     75 
     76   // If FLAG_hydrogen_track_positions is set contains bitfields InliningIdField
     77   // and PositionField.
     78   // Otherwise contains absolute offset from the script start.
     79   uint32_t value_;
     80 };
     81 
     82 
     83 std::ostream& operator<<(std::ostream& os, const SourcePosition& p);
     84 
     85 
     86 struct InlinedFunctionInfo {
     87   InlinedFunctionInfo(int parent_id, SourcePosition inline_position,
     88                       int script_id, int start_position)
     89       : parent_id(parent_id),
     90         inline_position(inline_position),
     91         script_id(script_id),
     92         start_position(start_position) {}
     93   int parent_id;
     94   SourcePosition inline_position;
     95   int script_id;
     96   int start_position;
     97   std::vector<size_t> deopt_pc_offsets;
     98 
     99   static const int kNoParentId = -1;
    100 };
    101 
    102 
    103 // CompilationInfo encapsulates some information known at compile time.  It
    104 // is constructed based on the resources available at compile-time.
    105 class CompilationInfo {
    106  public:
    107   // Various configuration flags for a compilation, as well as some properties
    108   // of the compiled code produced by a compilation.
    109   enum Flag {
    110     kDeferredCalling = 1 << 0,
    111     kNonDeferredCalling = 1 << 1,
    112     kSavesCallerDoubles = 1 << 2,
    113     kRequiresFrame = 1 << 3,
    114     kMustNotHaveEagerFrame = 1 << 4,
    115     kDeoptimizationSupport = 1 << 5,
    116     kDebug = 1 << 6,
    117     kSerializing = 1 << 7,
    118     kFunctionContextSpecializing = 1 << 8,
    119     kFrameSpecializing = 1 << 9,
    120     kNativeContextSpecializing = 1 << 10,
    121     kInliningEnabled = 1 << 11,
    122     kTypingEnabled = 1 << 12,
    123     kDisableFutureOptimization = 1 << 13,
    124     kSplittingEnabled = 1 << 14,
    125     kDeoptimizationEnabled = 1 << 16,
    126     kSourcePositionsEnabled = 1 << 17,
    127     kFirstCompile = 1 << 18,
    128   };
    129 
    130   explicit CompilationInfo(ParseInfo* parse_info);
    131   CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone);
    132   CompilationInfo(const char* debug_name, Isolate* isolate, Zone* zone);
    133   virtual ~CompilationInfo();
    134 
    135   ParseInfo* parse_info() const { return parse_info_; }
    136 
    137   // -----------------------------------------------------------
    138   // TODO(titzer): inline and delete accessors of ParseInfo
    139   // -----------------------------------------------------------
    140   Handle<Script> script() const;
    141   bool is_eval() const;
    142   bool is_native() const;
    143   bool is_module() const;
    144   LanguageMode language_mode() const;
    145   Handle<JSFunction> closure() const;
    146   FunctionLiteral* literal() const;
    147   Scope* scope() const;
    148   Handle<Context> context() const;
    149   Handle<SharedFunctionInfo> shared_info() const;
    150   bool has_shared_info() const;
    151   bool has_context() const;
    152   bool has_literal() const;
    153   bool has_scope() const;
    154   // -----------------------------------------------------------
    155 
    156   Isolate* isolate() const {
    157     return isolate_;
    158   }
    159   Zone* zone() { return zone_; }
    160   bool is_osr() const { return !osr_ast_id_.IsNone(); }
    161   Handle<Code> code() const { return code_; }
    162   CodeStub* code_stub() const { return code_stub_; }
    163   BailoutId osr_ast_id() const { return osr_ast_id_; }
    164   Handle<Code> unoptimized_code() const { return unoptimized_code_; }
    165   int opt_count() const { return opt_count_; }
    166   int num_parameters() const;
    167   int num_parameters_including_this() const;
    168   bool is_this_defined() const;
    169   int num_heap_slots() const;
    170 
    171   void set_parameter_count(int parameter_count) {
    172     DCHECK(IsStub());
    173     parameter_count_ = parameter_count;
    174   }
    175 
    176   bool has_bytecode_array() const { return !bytecode_array_.is_null(); }
    177   Handle<BytecodeArray> bytecode_array() const { return bytecode_array_; }
    178 
    179   bool is_tracking_positions() const { return track_positions_; }
    180 
    181   bool is_calling() const {
    182     return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
    183   }
    184 
    185   void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
    186 
    187   bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
    188 
    189   void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
    190 
    191   bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
    192 
    193   void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
    194 
    195   bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
    196 
    197   void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
    198 
    199   bool requires_frame() const { return GetFlag(kRequiresFrame); }
    200 
    201   void MarkMustNotHaveEagerFrame() { SetFlag(kMustNotHaveEagerFrame); }
    202 
    203   bool GetMustNotHaveEagerFrame() const {
    204     return GetFlag(kMustNotHaveEagerFrame);
    205   }
    206 
    207   // Compiles marked as debug produce unoptimized code with debug break slots.
    208   // Inner functions that cannot be compiled w/o context are compiled eagerly.
    209   // Always include deoptimization support to avoid having to recompile again.
    210   void MarkAsDebug() {
    211     SetFlag(kDebug);
    212     SetFlag(kDeoptimizationSupport);
    213   }
    214 
    215   bool is_debug() const { return GetFlag(kDebug); }
    216 
    217   void PrepareForSerializing() { SetFlag(kSerializing); }
    218 
    219   bool will_serialize() const { return GetFlag(kSerializing); }
    220 
    221   void MarkAsFunctionContextSpecializing() {
    222     SetFlag(kFunctionContextSpecializing);
    223   }
    224 
    225   bool is_function_context_specializing() const {
    226     return GetFlag(kFunctionContextSpecializing);
    227   }
    228 
    229   void MarkAsFrameSpecializing() { SetFlag(kFrameSpecializing); }
    230 
    231   bool is_frame_specializing() const { return GetFlag(kFrameSpecializing); }
    232 
    233   void MarkAsNativeContextSpecializing() {
    234     SetFlag(kNativeContextSpecializing);
    235   }
    236 
    237   bool is_native_context_specializing() const {
    238     return GetFlag(kNativeContextSpecializing);
    239   }
    240 
    241   void MarkAsDeoptimizationEnabled() { SetFlag(kDeoptimizationEnabled); }
    242 
    243   bool is_deoptimization_enabled() const {
    244     return GetFlag(kDeoptimizationEnabled);
    245   }
    246 
    247   void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
    248 
    249   bool is_source_positions_enabled() const {
    250     return GetFlag(kSourcePositionsEnabled);
    251   }
    252 
    253   void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
    254 
    255   bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
    256 
    257   void MarkAsTypingEnabled() { SetFlag(kTypingEnabled); }
    258 
    259   bool is_typing_enabled() const { return GetFlag(kTypingEnabled); }
    260 
    261   void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
    262 
    263   bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
    264 
    265   void MarkAsFirstCompile() { SetFlag(kFirstCompile); }
    266 
    267   void MarkAsCompiled() { SetFlag(kFirstCompile, false); }
    268 
    269   bool is_first_compile() const { return GetFlag(kFirstCompile); }
    270 
    271   bool GeneratePreagedPrologue() const {
    272     // Generate a pre-aged prologue if we are optimizing for size, which
    273     // will make code flushing more aggressive. Only apply to Code::FUNCTION,
    274     // since StaticMarkingVisitor::IsFlushable only flushes proper functions.
    275     return FLAG_optimize_for_size && FLAG_age_code && !will_serialize() &&
    276            !is_debug() && output_code_kind_ == Code::FUNCTION;
    277   }
    278 
    279   void EnsureFeedbackVector();
    280   Handle<TypeFeedbackVector> feedback_vector() const {
    281     return feedback_vector_;
    282   }
    283   void SetCode(Handle<Code> code) { code_ = code; }
    284 
    285   void SetBytecodeArray(Handle<BytecodeArray> bytecode_array) {
    286     bytecode_array_ = bytecode_array;
    287   }
    288 
    289   bool ShouldTrapOnDeopt() const {
    290     return (FLAG_trap_on_deopt && IsOptimizing()) ||
    291         (FLAG_trap_on_stub_deopt && IsStub());
    292   }
    293 
    294   bool has_native_context() const {
    295     return !closure().is_null() && (closure()->native_context() != nullptr);
    296   }
    297 
    298   Context* native_context() const {
    299     return has_native_context() ? closure()->native_context() : nullptr;
    300   }
    301 
    302   bool has_global_object() const { return has_native_context(); }
    303 
    304   JSGlobalObject* global_object() const {
    305     return has_global_object() ? native_context()->global_object() : nullptr;
    306   }
    307 
    308   // Accessors for the different compilation modes.
    309   bool IsOptimizing() const { return mode_ == OPTIMIZE; }
    310   bool IsStub() const { return mode_ == STUB; }
    311   void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
    312     DCHECK(has_shared_info());
    313     SetMode(OPTIMIZE);
    314     osr_ast_id_ = osr_ast_id;
    315     unoptimized_code_ = unoptimized;
    316     optimization_id_ = isolate()->NextOptimizationId();
    317     set_output_code_kind(Code::OPTIMIZED_FUNCTION);
    318   }
    319 
    320   // Deoptimization support.
    321   bool HasDeoptimizationSupport() const {
    322     return GetFlag(kDeoptimizationSupport);
    323   }
    324   void EnableDeoptimizationSupport() {
    325     DCHECK_EQ(BASE, mode_);
    326     SetFlag(kDeoptimizationSupport);
    327   }
    328   bool ShouldEnsureSpaceForLazyDeopt() { return !IsStub(); }
    329 
    330   bool ExpectsJSReceiverAsReceiver();
    331 
    332   // Determines whether or not to insert a self-optimization header.
    333   bool ShouldSelfOptimize();
    334 
    335   void set_deferred_handles(DeferredHandles* deferred_handles) {
    336     DCHECK(deferred_handles_ == NULL);
    337     deferred_handles_ = deferred_handles;
    338   }
    339 
    340   void ReopenHandlesInNewHandleScope() {
    341     unoptimized_code_ = Handle<Code>(*unoptimized_code_);
    342   }
    343 
    344   void AbortOptimization(BailoutReason reason) {
    345     DCHECK(reason != kNoReason);
    346     if (bailout_reason_ == kNoReason) bailout_reason_ = reason;
    347     SetFlag(kDisableFutureOptimization);
    348   }
    349 
    350   void RetryOptimization(BailoutReason reason) {
    351     DCHECK(reason != kNoReason);
    352     if (GetFlag(kDisableFutureOptimization)) return;
    353     bailout_reason_ = reason;
    354   }
    355 
    356   BailoutReason bailout_reason() const { return bailout_reason_; }
    357 
    358   int prologue_offset() const {
    359     DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
    360     return prologue_offset_;
    361   }
    362 
    363   void set_prologue_offset(int prologue_offset) {
    364     DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
    365     prologue_offset_ = prologue_offset;
    366   }
    367 
    368   int start_position_for(uint32_t inlining_id) {
    369     return inlined_function_infos_.at(inlining_id).start_position;
    370   }
    371   const std::vector<InlinedFunctionInfo>& inlined_function_infos() {
    372     return inlined_function_infos_;
    373   }
    374 
    375   void LogDeoptCallPosition(int pc_offset, int inlining_id);
    376   int TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
    377                            SourcePosition position, int pareint_id);
    378 
    379   CompilationDependencies* dependencies() { return &dependencies_; }
    380 
    381   bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
    382     return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure());
    383   }
    384 
    385   int optimization_id() const { return optimization_id_; }
    386 
    387   int osr_expr_stack_height() { return osr_expr_stack_height_; }
    388   void set_osr_expr_stack_height(int height) {
    389     DCHECK(height >= 0);
    390     osr_expr_stack_height_ = height;
    391   }
    392   JavaScriptFrame* osr_frame() const { return osr_frame_; }
    393   void set_osr_frame(JavaScriptFrame* osr_frame) { osr_frame_ = osr_frame; }
    394 
    395 #if DEBUG
    396   void PrintAstForTesting();
    397 #endif
    398 
    399   bool has_simple_parameters();
    400 
    401   struct InlinedFunctionHolder {
    402     Handle<SharedFunctionInfo> shared_info;
    403 
    404     // Root that holds the unoptimized code of the inlined function alive
    405     // (and out of reach of code flushing) until we finish compilation.
    406     // Do not remove.
    407     Handle<Code> inlined_code_object_root;
    408 
    409     explicit InlinedFunctionHolder(
    410         Handle<SharedFunctionInfo> inlined_shared_info)
    411         : shared_info(inlined_shared_info),
    412           inlined_code_object_root(inlined_shared_info->code()) {}
    413   };
    414 
    415   typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
    416   InlinedFunctionList const& inlined_functions() const {
    417     return inlined_functions_;
    418   }
    419 
    420   void AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function) {
    421     inlined_functions_.push_back(InlinedFunctionHolder(inlined_function));
    422   }
    423 
    424   base::SmartArrayPointer<char> GetDebugName() const;
    425 
    426   Code::Kind output_code_kind() const { return output_code_kind_; }
    427 
    428   void set_output_code_kind(Code::Kind kind) { output_code_kind_ = kind; }
    429 
    430  protected:
    431   ParseInfo* parse_info_;
    432 
    433   void DisableFutureOptimization() {
    434     if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
    435       shared_info()->DisableOptimization(bailout_reason());
    436     }
    437   }
    438 
    439  private:
    440   // Compilation mode.
    441   // BASE is generated by the full codegen, optionally prepared for bailouts.
    442   // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
    443   enum Mode {
    444     BASE,
    445     OPTIMIZE,
    446     STUB
    447   };
    448 
    449   CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub,
    450                   const char* debug_name, Mode mode, Isolate* isolate,
    451                   Zone* zone);
    452 
    453   Isolate* isolate_;
    454 
    455   void SetMode(Mode mode) {
    456     mode_ = mode;
    457   }
    458 
    459   void SetFlag(Flag flag) { flags_ |= flag; }
    460 
    461   void SetFlag(Flag flag, bool value) {
    462     flags_ = value ? flags_ | flag : flags_ & ~flag;
    463   }
    464 
    465   bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
    466 
    467   unsigned flags_;
    468 
    469   Code::Kind output_code_kind_;
    470 
    471   // For compiled stubs, the stub object
    472   CodeStub* code_stub_;
    473   // The compiled code.
    474   Handle<Code> code_;
    475 
    476   // Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
    477   Handle<TypeFeedbackVector> feedback_vector_;
    478 
    479   // Compilation mode flag and whether deoptimization is allowed.
    480   Mode mode_;
    481   BailoutId osr_ast_id_;
    482   // The unoptimized code we patched for OSR may not be the shared code
    483   // afterwards, since we may need to compile it again to include deoptimization
    484   // data.  Keep track which code we patched.
    485   Handle<Code> unoptimized_code_;
    486 
    487   // Holds the bytecode array generated by the interpreter.
    488   // TODO(rmcilroy/mstarzinger): Temporary work-around until compiler.cc is
    489   // refactored to avoid us needing to carry the BytcodeArray around.
    490   Handle<BytecodeArray> bytecode_array_;
    491 
    492   // The zone from which the compilation pipeline working on this
    493   // CompilationInfo allocates.
    494   Zone* zone_;
    495 
    496   DeferredHandles* deferred_handles_;
    497 
    498   // Dependencies for this compilation, e.g. stable maps.
    499   CompilationDependencies dependencies_;
    500 
    501   BailoutReason bailout_reason_;
    502 
    503   int prologue_offset_;
    504 
    505   std::vector<InlinedFunctionInfo> inlined_function_infos_;
    506   bool track_positions_;
    507 
    508   InlinedFunctionList inlined_functions_;
    509 
    510   // A copy of shared_info()->opt_count() to avoid handle deref
    511   // during graph optimization.
    512   int opt_count_;
    513 
    514   // Number of parameters used for compilation of stubs that require arguments.
    515   int parameter_count_;
    516 
    517   int optimization_id_;
    518 
    519   int osr_expr_stack_height_;
    520 
    521   // The current OSR frame for specialization or {nullptr}.
    522   JavaScriptFrame* osr_frame_ = nullptr;
    523 
    524   const char* debug_name_;
    525 
    526   DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
    527 };
    528 
    529 
    530 // A wrapper around a CompilationInfo that detaches the Handles from
    531 // the underlying DeferredHandleScope and stores them in info_ on
    532 // destruction.
    533 class CompilationHandleScope BASE_EMBEDDED {
    534  public:
    535   explicit CompilationHandleScope(CompilationInfo* info)
    536       : deferred_(info->isolate()), info_(info) {}
    537   ~CompilationHandleScope() {
    538     info_->set_deferred_handles(deferred_.Detach());
    539   }
    540 
    541  private:
    542   DeferredHandleScope deferred_;
    543   CompilationInfo* info_;
    544 };
    545 
    546 
    547 class HGraph;
    548 class HOptimizedGraphBuilder;
    549 class LChunk;
    550 
    551 // A helper class that calls the three compilation phases in
    552 // Crankshaft and keeps track of its state.  The three phases
    553 // CreateGraph, OptimizeGraph and GenerateAndInstallCode can either
    554 // fail, bail-out to the full code generator or succeed.  Apart from
    555 // their return value, the status of the phase last run can be checked
    556 // using last_status().
    557 class OptimizedCompileJob: public ZoneObject {
    558  public:
    559   explicit OptimizedCompileJob(CompilationInfo* info)
    560       : info_(info),
    561         graph_builder_(NULL),
    562         graph_(NULL),
    563         chunk_(NULL),
    564         last_status_(FAILED),
    565         awaiting_install_(false) { }
    566 
    567   enum Status {
    568     FAILED, BAILED_OUT, SUCCEEDED
    569   };
    570 
    571   MUST_USE_RESULT Status CreateGraph();
    572   MUST_USE_RESULT Status OptimizeGraph();
    573   MUST_USE_RESULT Status GenerateCode();
    574 
    575   Status last_status() const { return last_status_; }
    576   CompilationInfo* info() const { return info_; }
    577   Isolate* isolate() const { return info()->isolate(); }
    578 
    579   Status RetryOptimization(BailoutReason reason) {
    580     info_->RetryOptimization(reason);
    581     return SetLastStatus(BAILED_OUT);
    582   }
    583 
    584   Status AbortOptimization(BailoutReason reason) {
    585     info_->AbortOptimization(reason);
    586     return SetLastStatus(BAILED_OUT);
    587   }
    588 
    589   void WaitForInstall() {
    590     DCHECK(info_->is_osr());
    591     awaiting_install_ = true;
    592   }
    593 
    594   bool IsWaitingForInstall() { return awaiting_install_; }
    595 
    596  private:
    597   CompilationInfo* info_;
    598   HOptimizedGraphBuilder* graph_builder_;
    599   HGraph* graph_;
    600   LChunk* chunk_;
    601   base::TimeDelta time_taken_to_create_graph_;
    602   base::TimeDelta time_taken_to_optimize_;
    603   base::TimeDelta time_taken_to_codegen_;
    604   Status last_status_;
    605   bool awaiting_install_;
    606 
    607   MUST_USE_RESULT Status SetLastStatus(Status status) {
    608     last_status_ = status;
    609     return last_status_;
    610   }
    611   void RecordOptimizationStats();
    612 
    613   struct Timer {
    614     Timer(OptimizedCompileJob* job, base::TimeDelta* location)
    615         : job_(job), location_(location) {
    616       DCHECK(location_ != NULL);
    617       timer_.Start();
    618     }
    619 
    620     ~Timer() {
    621       *location_ += timer_.Elapsed();
    622     }
    623 
    624     OptimizedCompileJob* job_;
    625     base::ElapsedTimer timer_;
    626     base::TimeDelta* location_;
    627   };
    628 };
    629 
    630 
    631 // The V8 compiler
    632 //
    633 // General strategy: Source code is translated into an anonymous function w/o
    634 // parameters which then can be executed. If the source code contains other
    635 // functions, they will be compiled and allocated as part of the compilation
    636 // of the source code.
    637 
    638 // Please note this interface returns shared function infos.  This means you
    639 // need to call Factory::NewFunctionFromSharedFunctionInfo before you have a
    640 // real function with a context.
    641 
    642 class Compiler : public AllStatic {
    643  public:
    644   MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
    645       Handle<JSFunction> function);
    646   MUST_USE_RESULT static MaybeHandle<Code> GetLazyCode(
    647       Handle<JSFunction> function);
    648 
    649   static bool Compile(Handle<JSFunction> function, ClearExceptionFlag flag);
    650   static bool CompileDebugCode(Handle<JSFunction> function);
    651   static bool CompileDebugCode(Handle<SharedFunctionInfo> shared);
    652   static void CompileForLiveEdit(Handle<Script> script);
    653 
    654   // Parser::Parse, then Compiler::Analyze.
    655   static bool ParseAndAnalyze(ParseInfo* info);
    656   // Rewrite, analyze scopes, and renumber.
    657   static bool Analyze(ParseInfo* info);
    658   // Adds deoptimization support, requires ParseAndAnalyze.
    659   static bool EnsureDeoptimizationSupport(CompilationInfo* info);
    660 
    661   // Compile a String source within a context for eval.
    662   MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval(
    663       Handle<String> source, Handle<SharedFunctionInfo> outer_info,
    664       Handle<Context> context, LanguageMode language_mode,
    665       ParseRestriction restriction, int line_offset, int column_offset = 0,
    666       Handle<Object> script_name = Handle<Object>(),
    667       ScriptOriginOptions options = ScriptOriginOptions());
    668 
    669   // Compile a String source within a context.
    670   static Handle<SharedFunctionInfo> CompileScript(
    671       Handle<String> source, Handle<Object> script_name, int line_offset,
    672       int column_offset, ScriptOriginOptions resource_options,
    673       Handle<Object> source_map_url, Handle<Context> context,
    674       v8::Extension* extension, ScriptData** cached_data,
    675       ScriptCompiler::CompileOptions compile_options,
    676       NativesFlag is_natives_code, bool is_module);
    677 
    678   static Handle<SharedFunctionInfo> CompileStreamedScript(Handle<Script> script,
    679                                                           ParseInfo* info,
    680                                                           int source_length);
    681 
    682   // Create a shared function info object (the code may be lazily compiled).
    683   static Handle<SharedFunctionInfo> GetSharedFunctionInfo(
    684       FunctionLiteral* node, Handle<Script> script, CompilationInfo* outer);
    685 
    686   enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
    687 
    688   // Generate and return optimized code or start a concurrent optimization job.
    689   // In the latter case, return the InOptimizationQueue builtin.  On failure,
    690   // return the empty handle.
    691   MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode(
    692       Handle<JSFunction> function, Handle<Code> current_code,
    693       ConcurrencyMode mode, BailoutId osr_ast_id = BailoutId::None(),
    694       JavaScriptFrame* osr_frame = nullptr);
    695 
    696   // Generate and return code from previously queued optimization job.
    697   // On failure, return the empty handle.
    698   static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
    699 };
    700 
    701 
    702 class CompilationPhase BASE_EMBEDDED {
    703  public:
    704   CompilationPhase(const char* name, CompilationInfo* info);
    705   ~CompilationPhase();
    706 
    707  protected:
    708   bool ShouldProduceTraceOutput() const;
    709 
    710   const char* name() const { return name_; }
    711   CompilationInfo* info() const { return info_; }
    712   Isolate* isolate() const { return info()->isolate(); }
    713   Zone* zone() { return &zone_; }
    714 
    715  private:
    716   const char* name_;
    717   CompilationInfo* info_;
    718   Zone zone_;
    719   size_t info_zone_start_allocation_size_;
    720   base::ElapsedTimer timer_;
    721 
    722   DISALLOW_COPY_AND_ASSIGN(CompilationPhase);
    723 };
    724 
    725 }  // namespace internal
    726 }  // namespace v8
    727 
    728 #endif  // V8_COMPILER_H_
    729