Home | History | Annotate | Download | only in src
      1 // Copyright 2016 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_COMPILATION_INFO_H_
      6 #define V8_COMPILATION_INFO_H_
      7 
      8 #include <memory>
      9 
     10 #include "src/compilation-dependencies.h"
     11 #include "src/frames.h"
     12 #include "src/handles.h"
     13 #include "src/objects.h"
     14 #include "src/source-position-table.h"
     15 #include "src/utils.h"
     16 #include "src/vector.h"
     17 
     18 namespace v8 {
     19 namespace internal {
     20 
     21 class DeclarationScope;
     22 class DeferredHandles;
     23 class FunctionLiteral;
     24 class JavaScriptFrame;
     25 class ParseInfo;
     26 class Isolate;
     27 class Zone;
     28 
     29 // CompilationInfo encapsulates some information known at compile time.  It
     30 // is constructed based on the resources available at compile-time.
     31 class CompilationInfo final {
     32  public:
     33   // Various configuration flags for a compilation, as well as some properties
     34   // of the compiled code produced by a compilation.
     35   enum Flag {
     36     kDeferredCalling = 1 << 0,
     37     kNonDeferredCalling = 1 << 1,
     38     kSavesCallerDoubles = 1 << 2,
     39     kRequiresFrame = 1 << 3,
     40     kDeoptimizationSupport = 1 << 4,
     41     kAccessorInliningEnabled = 1 << 5,
     42     kSerializing = 1 << 6,
     43     kFunctionContextSpecializing = 1 << 7,
     44     kFrameSpecializing = 1 << 8,
     45     kInliningEnabled = 1 << 9,
     46     kDisableFutureOptimization = 1 << 10,
     47     kSplittingEnabled = 1 << 11,
     48     kDeoptimizationEnabled = 1 << 12,
     49     kSourcePositionsEnabled = 1 << 13,
     50     kBailoutOnUninitialized = 1 << 14,
     51     kOptimizeFromBytecode = 1 << 15,
     52     kTypeFeedbackEnabled = 1 << 16,
     53   };
     54 
     55   CompilationInfo(ParseInfo* parse_info, Handle<JSFunction> closure);
     56   CompilationInfo(Vector<const char> debug_name, Isolate* isolate, Zone* zone,
     57                   Code::Flags code_flags);
     58   ~CompilationInfo();
     59 
     60   ParseInfo* parse_info() const { return parse_info_; }
     61 
     62   // -----------------------------------------------------------
     63   // TODO(titzer): inline and delete accessors of ParseInfo
     64   // -----------------------------------------------------------
     65   Handle<Script> script() const;
     66   FunctionLiteral* literal() const;
     67   DeclarationScope* scope() const;
     68   Handle<SharedFunctionInfo> shared_info() const;
     69   bool has_shared_info() const;
     70   // -----------------------------------------------------------
     71 
     72   Isolate* isolate() const { return isolate_; }
     73   Zone* zone() { return zone_; }
     74   bool is_osr() const { return !osr_ast_id_.IsNone(); }
     75   Handle<JSFunction> closure() const { return closure_; }
     76   Handle<Code> code() const { return code_; }
     77   Code::Flags code_flags() const { return code_flags_; }
     78   BailoutId osr_ast_id() const { return osr_ast_id_; }
     79   JavaScriptFrame* osr_frame() const { return osr_frame_; }
     80   int num_parameters() const;
     81   int num_parameters_including_this() const;
     82   bool is_this_defined() const;
     83 
     84   void set_parameter_count(int parameter_count) {
     85     DCHECK(IsStub());
     86     parameter_count_ = parameter_count;
     87   }
     88 
     89   bool has_bytecode_array() const { return !bytecode_array_.is_null(); }
     90   Handle<BytecodeArray> bytecode_array() const { return bytecode_array_; }
     91 
     92   bool is_calling() const {
     93     return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
     94   }
     95 
     96   void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
     97 
     98   bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
     99 
    100   void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
    101 
    102   bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
    103 
    104   void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
    105 
    106   bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
    107 
    108   void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
    109 
    110   bool requires_frame() const { return GetFlag(kRequiresFrame); }
    111 
    112   // Compiles marked as debug produce unoptimized code with debug break slots.
    113   // Inner functions that cannot be compiled w/o context are compiled eagerly.
    114   // Always include deoptimization support to avoid having to recompile again.
    115   void MarkAsDebug() {
    116     set_is_debug();
    117     SetFlag(kDeoptimizationSupport);
    118   }
    119 
    120   bool is_debug() const;
    121 
    122   void PrepareForSerializing();
    123 
    124   bool will_serialize() const { return GetFlag(kSerializing); }
    125 
    126   void MarkAsFunctionContextSpecializing() {
    127     SetFlag(kFunctionContextSpecializing);
    128   }
    129 
    130   bool is_function_context_specializing() const {
    131     return GetFlag(kFunctionContextSpecializing);
    132   }
    133 
    134   void MarkAsFrameSpecializing() { SetFlag(kFrameSpecializing); }
    135 
    136   bool is_frame_specializing() const { return GetFlag(kFrameSpecializing); }
    137 
    138   void MarkAsDeoptimizationEnabled() { SetFlag(kDeoptimizationEnabled); }
    139 
    140   bool is_deoptimization_enabled() const {
    141     return GetFlag(kDeoptimizationEnabled);
    142   }
    143 
    144   void MarkAsTypeFeedbackEnabled() { SetFlag(kTypeFeedbackEnabled); }
    145 
    146   bool is_type_feedback_enabled() const {
    147     return GetFlag(kTypeFeedbackEnabled);
    148   }
    149 
    150   void MarkAsAccessorInliningEnabled() { SetFlag(kAccessorInliningEnabled); }
    151 
    152   bool is_accessor_inlining_enabled() const {
    153     return GetFlag(kAccessorInliningEnabled);
    154   }
    155 
    156   void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
    157 
    158   bool is_source_positions_enabled() const {
    159     return GetFlag(kSourcePositionsEnabled);
    160   }
    161 
    162   void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
    163 
    164   bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
    165 
    166   void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
    167 
    168   bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
    169 
    170   void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
    171 
    172   bool is_bailout_on_uninitialized() const {
    173     return GetFlag(kBailoutOnUninitialized);
    174   }
    175 
    176   void MarkAsOptimizeFromBytecode() { SetFlag(kOptimizeFromBytecode); }
    177 
    178   bool is_optimizing_from_bytecode() const {
    179     return GetFlag(kOptimizeFromBytecode);
    180   }
    181 
    182   bool GeneratePreagedPrologue() const {
    183     // Generate a pre-aged prologue if we are optimizing for size, which
    184     // will make code flushing more aggressive. Only apply to Code::FUNCTION,
    185     // since StaticMarkingVisitor::IsFlushable only flushes proper functions.
    186     return FLAG_optimize_for_size && FLAG_age_code && !is_debug() &&
    187            output_code_kind() == Code::FUNCTION;
    188   }
    189 
    190   void SetCode(Handle<Code> code) { code_ = code; }
    191 
    192   void SetBytecodeArray(Handle<BytecodeArray> bytecode_array) {
    193     bytecode_array_ = bytecode_array;
    194   }
    195 
    196   bool ShouldTrapOnDeopt() const {
    197     return (FLAG_trap_on_deopt && IsOptimizing()) ||
    198            (FLAG_trap_on_stub_deopt && IsStub());
    199   }
    200 
    201   bool has_context() const;
    202   Context* context() const;
    203 
    204   bool has_native_context() const;
    205   Context* native_context() const;
    206 
    207   bool has_global_object() const;
    208   JSGlobalObject* global_object() const;
    209 
    210   // Accessors for the different compilation modes.
    211   bool IsOptimizing() const { return mode_ == OPTIMIZE; }
    212   bool IsStub() const { return mode_ == STUB; }
    213   void SetOptimizing();
    214   void SetOptimizingForOsr(BailoutId osr_ast_id, JavaScriptFrame* osr_frame) {
    215     SetOptimizing();
    216     osr_ast_id_ = osr_ast_id;
    217     osr_frame_ = osr_frame;
    218   }
    219 
    220   // Deoptimization support.
    221   bool HasDeoptimizationSupport() const {
    222     return GetFlag(kDeoptimizationSupport);
    223   }
    224   void EnableDeoptimizationSupport() {
    225     DCHECK_EQ(BASE, mode_);
    226     SetFlag(kDeoptimizationSupport);
    227   }
    228   bool ShouldEnsureSpaceForLazyDeopt() { return !IsStub(); }
    229 
    230   bool ExpectsJSReceiverAsReceiver();
    231 
    232   // Determines whether or not to insert a self-optimization header.
    233   bool ShouldSelfOptimize();
    234 
    235   void set_deferred_handles(DeferredHandles* deferred_handles) {
    236     DCHECK(deferred_handles_ == NULL);
    237     deferred_handles_ = deferred_handles;
    238   }
    239 
    240   void ReopenHandlesInNewHandleScope();
    241 
    242   void AbortOptimization(BailoutReason reason) {
    243     DCHECK(reason != kNoReason);
    244     if (bailout_reason_ == kNoReason) bailout_reason_ = reason;
    245     SetFlag(kDisableFutureOptimization);
    246   }
    247 
    248   void RetryOptimization(BailoutReason reason) {
    249     DCHECK(reason != kNoReason);
    250     if (GetFlag(kDisableFutureOptimization)) return;
    251     bailout_reason_ = reason;
    252   }
    253 
    254   BailoutReason bailout_reason() const { return bailout_reason_; }
    255 
    256   int prologue_offset() const {
    257     DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
    258     return prologue_offset_;
    259   }
    260 
    261   void set_prologue_offset(int prologue_offset) {
    262     DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
    263     prologue_offset_ = prologue_offset;
    264   }
    265 
    266   CompilationDependencies* dependencies() { return &dependencies_; }
    267 
    268   int optimization_id() const { return optimization_id_; }
    269 
    270   int osr_expr_stack_height() { return osr_expr_stack_height_; }
    271   void set_osr_expr_stack_height(int height) {
    272     DCHECK(height >= 0);
    273     osr_expr_stack_height_ = height;
    274   }
    275 
    276   bool has_simple_parameters();
    277 
    278   struct InlinedFunctionHolder {
    279     Handle<SharedFunctionInfo> shared_info;
    280 
    281     // Root that holds the unoptimized code of the inlined function alive
    282     // (and out of reach of code flushing) until we finish compilation.
    283     // Do not remove.
    284     Handle<Code> inlined_code_object_root;
    285 
    286     InliningPosition position;
    287 
    288     InlinedFunctionHolder(Handle<SharedFunctionInfo> inlined_shared_info,
    289                           Handle<Code> inlined_code_object_root,
    290                           SourcePosition pos)
    291         : shared_info(inlined_shared_info),
    292           inlined_code_object_root(inlined_code_object_root) {
    293       position.position = pos;
    294       // initialized when generating the deoptimization literals
    295       position.inlined_function_id = DeoptimizationInputData::kNotInlinedIndex;
    296     }
    297 
    298     void RegisterInlinedFunctionId(size_t inlined_function_id) {
    299       position.inlined_function_id = static_cast<int>(inlined_function_id);
    300     }
    301   };
    302 
    303   typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
    304   InlinedFunctionList& inlined_functions() { return inlined_functions_; }
    305 
    306   // Returns the inlining id for source position tracking.
    307   int AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function,
    308                          SourcePosition pos);
    309 
    310   std::unique_ptr<char[]> GetDebugName() const;
    311 
    312   Code::Kind output_code_kind() const;
    313 
    314   StackFrame::Type GetOutputStackFrameType() const;
    315 
    316   int GetDeclareGlobalsFlags() const;
    317 
    318   SourcePositionTableBuilder::RecordingMode SourcePositionRecordingMode() const;
    319 
    320  private:
    321   // Compilation mode.
    322   // BASE is generated by the full codegen, optionally prepared for bailouts.
    323   // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
    324   enum Mode { BASE, OPTIMIZE, STUB };
    325 
    326   CompilationInfo(ParseInfo* parse_info, Vector<const char> debug_name,
    327                   Code::Flags code_flags, Mode mode, Isolate* isolate,
    328                   Zone* zone);
    329 
    330   ParseInfo* parse_info_;
    331   Isolate* isolate_;
    332 
    333   void SetMode(Mode mode) { mode_ = mode; }
    334 
    335   void SetFlag(Flag flag) { flags_ |= flag; }
    336 
    337   void SetFlag(Flag flag, bool value) {
    338     flags_ = value ? flags_ | flag : flags_ & ~flag;
    339   }
    340 
    341   bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
    342 
    343   void set_is_debug();
    344 
    345   unsigned flags_;
    346 
    347   Code::Flags code_flags_;
    348 
    349   Handle<JSFunction> closure_;
    350 
    351   // The compiled code.
    352   Handle<Code> code_;
    353 
    354   // Compilation mode flag and whether deoptimization is allowed.
    355   Mode mode_;
    356   BailoutId osr_ast_id_;
    357 
    358   // Holds the bytecode array generated by the interpreter.
    359   // TODO(rmcilroy/mstarzinger): Temporary work-around until compiler.cc is
    360   // refactored to avoid us needing to carry the BytcodeArray around.
    361   Handle<BytecodeArray> bytecode_array_;
    362 
    363   // The zone from which the compilation pipeline working on this
    364   // CompilationInfo allocates.
    365   Zone* zone_;
    366 
    367   DeferredHandles* deferred_handles_;
    368 
    369   // Dependencies for this compilation, e.g. stable maps.
    370   CompilationDependencies dependencies_;
    371 
    372   BailoutReason bailout_reason_;
    373 
    374   int prologue_offset_;
    375 
    376   InlinedFunctionList inlined_functions_;
    377 
    378   // Number of parameters used for compilation of stubs that require arguments.
    379   int parameter_count_;
    380 
    381   int optimization_id_;
    382 
    383   int osr_expr_stack_height_;
    384 
    385   // The current OSR frame for specialization or {nullptr}.
    386   JavaScriptFrame* osr_frame_ = nullptr;
    387 
    388   Vector<const char> debug_name_;
    389 
    390   DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
    391 };
    392 
    393 }  // namespace internal
    394 }  // namespace v8
    395 
    396 #endif  // V8_COMPILATION_INFO_H_
    397