Home | History | Annotate | Download | only in src
      1 // Copyright 2016 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_OPTIMIZED_COMPILATION_INFO_H_
      6 #define V8_OPTIMIZED_COMPILATION_INFO_H_
      7 
      8 #include <memory>
      9 
     10 #include "src/bailout-reason.h"
     11 #include "src/code-reference.h"
     12 #include "src/feedback-vector.h"
     13 #include "src/frames.h"
     14 #include "src/globals.h"
     15 #include "src/handles.h"
     16 #include "src/objects.h"
     17 #include "src/source-position-table.h"
     18 #include "src/utils.h"
     19 #include "src/vector.h"
     20 
     21 namespace v8 {
     22 namespace internal {
     23 
     24 class CoverageInfo;
     25 class DeclarationScope;
     26 class DeferredHandles;
     27 class FunctionLiteral;
     28 class Isolate;
     29 class JavaScriptFrame;
     30 class ParseInfo;
     31 class SourceRangeMap;
     32 class Zone;
     33 
     34 // OptimizedCompilationInfo encapsulates the information needed to compile
     35 // optimized code for a given function, and the results of the optimized
     36 // compilation.
     37 class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
     38  public:
     39   // Various configuration flags for a compilation, as well as some properties
     40   // of the compiled code produced by a compilation.
     41   enum Flag {
     42     kAccessorInliningEnabled = 1 << 0,
     43     kFunctionContextSpecializing = 1 << 1,
     44     kInliningEnabled = 1 << 2,
     45     kDisableFutureOptimization = 1 << 3,
     46     kSplittingEnabled = 1 << 4,
     47     kSourcePositionsEnabled = 1 << 5,
     48     kBailoutOnUninitialized = 1 << 6,
     49     kLoopPeelingEnabled = 1 << 7,
     50     kUntrustedCodeMitigations = 1 << 8,
     51     kSwitchJumpTableEnabled = 1 << 9,
     52     kCalledWithCodeStartRegister = 1 << 10,
     53     kPoisonRegisterArguments = 1 << 11,
     54     kAllocationFoldingEnabled = 1 << 12,
     55     kAnalyzeEnvironmentLiveness = 1 << 13,
     56     kTraceTurboJson = 1 << 14,
     57     kTraceTurboGraph = 1 << 15,
     58     kTraceTurboScheduled = 1 << 16,
     59     kWasmRuntimeExceptionSupport = 1 << 17
     60   };
     61 
     62   // Construct a compilation info for optimized compilation.
     63   OptimizedCompilationInfo(Zone* zone, Isolate* isolate,
     64                            Handle<SharedFunctionInfo> shared,
     65                            Handle<JSFunction> closure);
     66   // Construct a compilation info for stub compilation, Wasm, and testing.
     67   OptimizedCompilationInfo(Vector<const char> debug_name, Zone* zone,
     68                            Code::Kind code_kind);
     69 
     70   ~OptimizedCompilationInfo();
     71 
     72   Zone* zone() { return zone_; }
     73   bool is_osr() const { return !osr_offset_.IsNone(); }
     74   Handle<SharedFunctionInfo> shared_info() const { return shared_info_; }
     75   bool has_shared_info() const { return !shared_info().is_null(); }
     76   Handle<JSFunction> closure() const { return closure_; }
     77   Handle<Code> code() const { return code_.as_js_code(); }
     78 
     79   wasm::WasmCode* wasm_code() const {
     80     return const_cast<wasm::WasmCode*>(code_.as_wasm_code());
     81   }
     82   AbstractCode::Kind abstract_code_kind() const { return code_kind_; }
     83   Code::Kind code_kind() const {
     84     DCHECK(code_kind_ < static_cast<AbstractCode::Kind>(Code::NUMBER_OF_KINDS));
     85     return static_cast<Code::Kind>(code_kind_);
     86   }
     87   uint32_t stub_key() const { return stub_key_; }
     88   void set_stub_key(uint32_t stub_key) { stub_key_ = stub_key; }
     89   int32_t builtin_index() const { return builtin_index_; }
     90   void set_builtin_index(int32_t index) { builtin_index_ = index; }
     91   BailoutId osr_offset() const { return osr_offset_; }
     92   JavaScriptFrame* osr_frame() const { return osr_frame_; }
     93 
     94   // Flags used by optimized compilation.
     95 
     96   void MarkAsFunctionContextSpecializing() {
     97     SetFlag(kFunctionContextSpecializing);
     98   }
     99   bool is_function_context_specializing() const {
    100     return GetFlag(kFunctionContextSpecializing);
    101   }
    102 
    103   void MarkAsAccessorInliningEnabled() { SetFlag(kAccessorInliningEnabled); }
    104   bool is_accessor_inlining_enabled() const {
    105     return GetFlag(kAccessorInliningEnabled);
    106   }
    107 
    108   void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
    109   bool is_source_positions_enabled() const {
    110     return GetFlag(kSourcePositionsEnabled);
    111   }
    112 
    113   void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
    114   bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
    115 
    116   void SetPoisoningMitigationLevel(PoisoningMitigationLevel poisoning_level) {
    117     poisoning_level_ = poisoning_level;
    118   }
    119   PoisoningMitigationLevel GetPoisoningMitigationLevel() const {
    120     return poisoning_level_;
    121   }
    122 
    123   void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
    124   bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
    125 
    126   void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
    127   bool is_bailout_on_uninitialized() const {
    128     return GetFlag(kBailoutOnUninitialized);
    129   }
    130 
    131   void MarkAsLoopPeelingEnabled() { SetFlag(kLoopPeelingEnabled); }
    132   bool is_loop_peeling_enabled() const { return GetFlag(kLoopPeelingEnabled); }
    133 
    134   bool has_untrusted_code_mitigations() const {
    135     return GetFlag(kUntrustedCodeMitigations);
    136   }
    137 
    138   bool switch_jump_table_enabled() const {
    139     return GetFlag(kSwitchJumpTableEnabled);
    140   }
    141 
    142   bool called_with_code_start_register() const {
    143     bool enabled = GetFlag(kCalledWithCodeStartRegister);
    144     return enabled;
    145   }
    146 
    147   void MarkAsPoisoningRegisterArguments() {
    148     DCHECK(has_untrusted_code_mitigations());
    149     SetFlag(kPoisonRegisterArguments);
    150   }
    151   bool is_poisoning_register_arguments() const {
    152     bool enabled = GetFlag(kPoisonRegisterArguments);
    153     DCHECK_IMPLIES(enabled, has_untrusted_code_mitigations());
    154     DCHECK_IMPLIES(enabled, called_with_code_start_register());
    155     return enabled;
    156   }
    157 
    158   void MarkAsAllocationFoldingEnabled() { SetFlag(kAllocationFoldingEnabled); }
    159   bool is_allocation_folding_enabled() const {
    160     return GetFlag(kAllocationFoldingEnabled);
    161   }
    162 
    163   void MarkAsAnalyzeEnvironmentLiveness() {
    164     SetFlag(kAnalyzeEnvironmentLiveness);
    165   }
    166   bool is_analyze_environment_liveness() const {
    167     return GetFlag(kAnalyzeEnvironmentLiveness);
    168   }
    169 
    170   void SetWasmRuntimeExceptionSupport() {
    171     SetFlag(kWasmRuntimeExceptionSupport);
    172   }
    173 
    174   bool wasm_runtime_exception_support() {
    175     return GetFlag(kWasmRuntimeExceptionSupport);
    176   }
    177 
    178   bool trace_turbo_json_enabled() const { return GetFlag(kTraceTurboJson); }
    179 
    180   bool trace_turbo_graph_enabled() const { return GetFlag(kTraceTurboGraph); }
    181 
    182   bool trace_turbo_scheduled_enabled() const {
    183     return GetFlag(kTraceTurboScheduled);
    184   }
    185 
    186   // Code getters and setters.
    187 
    188   template <typename T>
    189   void SetCode(T code) {
    190     code_ = CodeReference(code);
    191   }
    192 
    193   bool has_context() const;
    194   Context* context() const;
    195 
    196   bool has_native_context() const;
    197   Context* native_context() const;
    198 
    199   bool has_global_object() const;
    200   JSGlobalObject* global_object() const;
    201 
    202   // Accessors for the different compilation modes.
    203   bool IsOptimizing() const {
    204     return abstract_code_kind() == AbstractCode::OPTIMIZED_FUNCTION;
    205   }
    206   bool IsWasm() const {
    207     return abstract_code_kind() == AbstractCode::WASM_FUNCTION;
    208   }
    209   bool IsStub() const {
    210     return abstract_code_kind() != AbstractCode::OPTIMIZED_FUNCTION &&
    211            abstract_code_kind() != AbstractCode::WASM_FUNCTION;
    212   }
    213   void SetOptimizingForOsr(BailoutId osr_offset, JavaScriptFrame* osr_frame) {
    214     DCHECK(IsOptimizing());
    215     osr_offset_ = osr_offset;
    216     osr_frame_ = osr_frame;
    217   }
    218 
    219   void set_deferred_handles(std::shared_ptr<DeferredHandles> deferred_handles);
    220   void set_deferred_handles(DeferredHandles* deferred_handles);
    221   std::shared_ptr<DeferredHandles> deferred_handles() {
    222     return deferred_handles_;
    223   }
    224 
    225   void ReopenHandlesInNewHandleScope(Isolate* isolate);
    226 
    227   void AbortOptimization(BailoutReason reason) {
    228     DCHECK_NE(reason, BailoutReason::kNoReason);
    229     if (bailout_reason_ == BailoutReason::kNoReason) bailout_reason_ = reason;
    230     SetFlag(kDisableFutureOptimization);
    231   }
    232 
    233   void RetryOptimization(BailoutReason reason) {
    234     DCHECK_NE(reason, BailoutReason::kNoReason);
    235     if (GetFlag(kDisableFutureOptimization)) return;
    236     bailout_reason_ = reason;
    237   }
    238 
    239   BailoutReason bailout_reason() const { return bailout_reason_; }
    240 
    241   int optimization_id() const {
    242     DCHECK(IsOptimizing());
    243     return optimization_id_;
    244   }
    245 
    246   struct InlinedFunctionHolder {
    247     Handle<SharedFunctionInfo> shared_info;
    248 
    249     InliningPosition position;
    250 
    251     InlinedFunctionHolder(Handle<SharedFunctionInfo> inlined_shared_info,
    252                           SourcePosition pos)
    253         : shared_info(inlined_shared_info) {
    254       position.position = pos;
    255       // initialized when generating the deoptimization literals
    256       position.inlined_function_id = DeoptimizationData::kNotInlinedIndex;
    257     }
    258 
    259     void RegisterInlinedFunctionId(size_t inlined_function_id) {
    260       position.inlined_function_id = static_cast<int>(inlined_function_id);
    261     }
    262   };
    263 
    264   typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
    265   InlinedFunctionList& inlined_functions() { return inlined_functions_; }
    266 
    267   // Returns the inlining id for source position tracking.
    268   int AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function,
    269                          SourcePosition pos);
    270 
    271   std::unique_ptr<char[]> GetDebugName() const;
    272 
    273   StackFrame::Type GetOutputStackFrameType() const;
    274 
    275   const char* trace_turbo_filename() const {
    276     return trace_turbo_filename_.get();
    277   }
    278 
    279   void set_trace_turbo_filename(std::unique_ptr<char[]> filename) {
    280     trace_turbo_filename_ = std::move(filename);
    281   }
    282 
    283  private:
    284   OptimizedCompilationInfo(Vector<const char> debug_name,
    285                            AbstractCode::Kind code_kind, Zone* zone);
    286 
    287   void SetFlag(Flag flag) { flags_ |= flag; }
    288   bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
    289 
    290   void SetTracingFlags(bool passes_filter);
    291 
    292   // Compilation flags.
    293   unsigned flags_;
    294   PoisoningMitigationLevel poisoning_level_ =
    295       PoisoningMitigationLevel::kDontPoison;
    296 
    297   AbstractCode::Kind code_kind_;
    298   uint32_t stub_key_;
    299   int32_t builtin_index_;
    300 
    301   Handle<SharedFunctionInfo> shared_info_;
    302 
    303   Handle<JSFunction> closure_;
    304 
    305   // The compiled code.
    306   CodeReference code_;
    307 
    308   // Entry point when compiling for OSR, {BailoutId::None} otherwise.
    309   BailoutId osr_offset_;
    310 
    311   // The zone from which the compilation pipeline working on this
    312   // OptimizedCompilationInfo allocates.
    313   Zone* zone_;
    314 
    315   std::shared_ptr<DeferredHandles> deferred_handles_;
    316 
    317   BailoutReason bailout_reason_;
    318 
    319   InlinedFunctionList inlined_functions_;
    320 
    321   int optimization_id_;
    322 
    323   // The current OSR frame for specialization or {nullptr}.
    324   JavaScriptFrame* osr_frame_ = nullptr;
    325 
    326   Vector<const char> debug_name_;
    327   std::unique_ptr<char[]> trace_turbo_filename_;
    328 
    329   DISALLOW_COPY_AND_ASSIGN(OptimizedCompilationInfo);
    330 };
    331 
    332 }  // namespace internal
    333 }  // namespace v8
    334 
    335 #endif  // V8_OPTIMIZED_COMPILATION_INFO_H_
    336