Home | History | Annotate | Download | only in compiler
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_COMPILER_CODE_GENERATOR_H_
      6 #define V8_COMPILER_CODE_GENERATOR_H_
      7 
      8 #include "src/base/optional.h"
      9 #include "src/compiler/gap-resolver.h"
     10 #include "src/compiler/instruction.h"
     11 #include "src/compiler/osr.h"
     12 #include "src/compiler/unwinding-info-writer.h"
     13 #include "src/deoptimizer.h"
     14 #include "src/macro-assembler.h"
     15 #include "src/safepoint-table.h"
     16 #include "src/source-position-table.h"
     17 #include "src/trap-handler/trap-handler.h"
     18 
     19 namespace v8 {
     20 namespace internal {
     21 
     22 class OptimizedCompilationInfo;
     23 
     24 namespace compiler {
     25 
     26 // Forward declarations.
     27 class DeoptimizationExit;
     28 class FrameAccessState;
     29 class Linkage;
     30 class OutOfLineCode;
     31 
     32 struct BranchInfo {
     33   FlagsCondition condition;
     34   Label* true_label;
     35   Label* false_label;
     36   bool fallthru;
     37 };
     38 
     39 
     40 class InstructionOperandIterator {
     41  public:
     42   InstructionOperandIterator(Instruction* instr, size_t pos)
     43       : instr_(instr), pos_(pos) {}
     44 
     45   Instruction* instruction() const { return instr_; }
     46   InstructionOperand* Advance() { return instr_->InputAt(pos_++); }
     47 
     48  private:
     49   Instruction* instr_;
     50   size_t pos_;
     51 };
     52 
     53 // Either a non-null Handle<Object> or a double.
     54 class DeoptimizationLiteral {
     55  public:
     56   DeoptimizationLiteral() : object_(), number_(0) {}
     57   explicit DeoptimizationLiteral(Handle<Object> object)
     58       : object_(object), number_(0) {
     59     DCHECK(!object_.is_null());
     60   }
     61   explicit DeoptimizationLiteral(double number) : object_(), number_(number) {}
     62 
     63   Handle<Object> object() const { return object_; }
     64 
     65   bool operator==(const DeoptimizationLiteral& other) const {
     66     return object_.equals(other.object_) &&
     67            bit_cast<uint64_t>(number_) == bit_cast<uint64_t>(other.number_);
     68   }
     69 
     70   Handle<Object> Reify(Isolate* isolate) const;
     71 
     72  private:
     73   Handle<Object> object_;
     74   double number_;
     75 };
     76 
     77 // Generates native code for a sequence of instructions.
     78 class CodeGenerator final : public GapResolver::Assembler {
     79  public:
     80   explicit CodeGenerator(Zone* codegen_zone, Frame* frame, Linkage* linkage,
     81                          InstructionSequence* code,
     82                          OptimizedCompilationInfo* info, Isolate* isolate,
     83                          base::Optional<OsrHelper> osr_helper,
     84                          int start_source_position,
     85                          JumpOptimizationInfo* jump_opt,
     86                          PoisoningMitigationLevel poisoning_level,
     87                          const AssemblerOptions& options,
     88                          int32_t builtin_index);
     89 
     90   // Generate native code. After calling AssembleCode, call FinalizeCode to
     91   // produce the actual code object. If an error occurs during either phase,
     92   // FinalizeCode returns an empty MaybeHandle.
     93   void AssembleCode();  // Does not need to run on main thread.
     94   MaybeHandle<Code> FinalizeCode();
     95 
     96   OwnedVector<byte> GetSourcePositionTable();
     97   OwnedVector<trap_handler::ProtectedInstructionData>
     98   GetProtectedInstructions();
     99 
    100   InstructionSequence* code() const { return code_; }
    101   FrameAccessState* frame_access_state() const { return frame_access_state_; }
    102   const Frame* frame() const { return frame_access_state_->frame(); }
    103   Isolate* isolate() const { return isolate_; }
    104   Linkage* linkage() const { return linkage_; }
    105 
    106   Label* GetLabel(RpoNumber rpo) { return &labels_[rpo.ToSize()]; }
    107 
    108   void AddProtectedInstructionLanding(uint32_t instr_offset,
    109                                       uint32_t landing_offset);
    110 
    111   bool wasm_runtime_exception_support() const;
    112 
    113   SourcePosition start_source_position() const {
    114     return start_source_position_;
    115   }
    116 
    117   void AssembleSourcePosition(Instruction* instr);
    118   void AssembleSourcePosition(SourcePosition source_position);
    119 
    120   // Record a safepoint with the given pointer map.
    121   void RecordSafepoint(ReferenceMap* references, Safepoint::Kind kind,
    122                        int arguments, Safepoint::DeoptMode deopt_mode);
    123 
    124   Zone* zone() const { return zone_; }
    125   TurboAssembler* tasm() { return &tasm_; }
    126   size_t GetSafepointTableOffset() const { return safepoints_.GetCodeOffset(); }
    127   size_t GetHandlerTableOffset() const { return handler_table_offset_; }
    128 
    129   const ZoneVector<int>& block_starts() const { return block_starts_; }
    130   const ZoneVector<int>& instr_starts() const { return instr_starts_; }
    131 
    132   static constexpr int kBinarySearchSwitchMinimalCases = 4;
    133 
    134  private:
    135   GapResolver* resolver() { return &resolver_; }
    136   SafepointTableBuilder* safepoints() { return &safepoints_; }
    137   OptimizedCompilationInfo* info() const { return info_; }
    138   OsrHelper* osr_helper() { return &(*osr_helper_); }
    139 
    140   // Create the FrameAccessState object. The Frame is immutable from here on.
    141   void CreateFrameAccessState(Frame* frame);
    142 
    143   // Architecture - specific frame finalization.
    144   void FinishFrame(Frame* frame);
    145 
    146   // Checks if {block} will appear directly after {current_block_} when
    147   // assembling code, in which case, a fall-through can be used.
    148   bool IsNextInAssemblyOrder(RpoNumber block) const;
    149 
    150   // Check if a heap object can be materialized by loading from a heap root,
    151   // which is cheaper on some platforms than materializing the actual heap
    152   // object constant.
    153   bool IsMaterializableFromRoot(Handle<HeapObject> object,
    154                                 Heap::RootListIndex* index_return);
    155 
    156   enum CodeGenResult { kSuccess, kTooManyDeoptimizationBailouts };
    157 
    158   // Assemble instructions for the specified block.
    159   CodeGenResult AssembleBlock(const InstructionBlock* block);
    160 
    161   // Inserts mask update at the beginning of an instruction block if the
    162   // predecessor blocks ends with a masking branch.
    163   void TryInsertBranchPoisoning(const InstructionBlock* block);
    164 
    165   // Initializes the masking register in the prologue of a function.
    166   void InitializeSpeculationPoison();
    167   // Reset the masking register during execution of a function.
    168   void ResetSpeculationPoison();
    169   // Generates a mask from the pc passed in {kJavaScriptCallCodeStartRegister}.
    170   void GenerateSpeculationPoisonFromCodeStartRegister();
    171 
    172   // Assemble code for the specified instruction.
    173   CodeGenResult AssembleInstruction(Instruction* instr,
    174                                     const InstructionBlock* block);
    175   void AssembleGaps(Instruction* instr);
    176 
    177   // Compute branch info from given instruction. Returns a valid rpo number
    178   // if the branch is redundant, the returned rpo number point to the target
    179   // basic block.
    180   RpoNumber ComputeBranchInfo(BranchInfo* branch, Instruction* instr);
    181 
    182   // Returns true if a instruction is a tail call that needs to adjust the stack
    183   // pointer before execution. The stack slot index to the empty slot above the
    184   // adjusted stack pointer is returned in |slot|.
    185   bool GetSlotAboveSPBeforeTailCall(Instruction* instr, int* slot);
    186 
    187   // Determines how to call helper stubs depending on the code kind.
    188   StubCallMode DetermineStubCallMode() const;
    189 
    190   CodeGenResult AssembleDeoptimizerCall(int deoptimization_id,
    191                                         SourcePosition pos);
    192 
    193   // ===========================================================================
    194   // ============= Architecture-specific code generation methods. ==============
    195   // ===========================================================================
    196 
    197   CodeGenResult AssembleArchInstruction(Instruction* instr);
    198   void AssembleArchJump(RpoNumber target);
    199   void AssembleArchBranch(Instruction* instr, BranchInfo* branch);
    200 
    201   // Generates special branch for deoptimization condition.
    202   void AssembleArchDeoptBranch(Instruction* instr, BranchInfo* branch);
    203 
    204   void AssembleArchBoolean(Instruction* instr, FlagsCondition condition);
    205   void AssembleArchTrap(Instruction* instr, FlagsCondition condition);
    206   void AssembleArchBinarySearchSwitchRange(Register input, RpoNumber def_block,
    207                                            std::pair<int32_t, Label*>* begin,
    208                                            std::pair<int32_t, Label*>* end);
    209   void AssembleArchBinarySearchSwitch(Instruction* instr);
    210   void AssembleArchLookupSwitch(Instruction* instr);
    211   void AssembleArchTableSwitch(Instruction* instr);
    212 
    213   // Generates code that checks whether the {kJavaScriptCallCodeStartRegister}
    214   // contains the expected pointer to the start of the instruction stream.
    215   void AssembleCodeStartRegisterCheck();
    216 
    217   void AssembleBranchPoisoning(FlagsCondition condition, Instruction* instr);
    218 
    219   // When entering a code that is marked for deoptimization, rather continuing
    220   // with its execution, we jump to a lazy compiled code. We need to do this
    221   // because this code has already been deoptimized and needs to be unlinked
    222   // from the JS functions referring it.
    223   void BailoutIfDeoptimized();
    224 
    225   // Generates code to poison the stack pointer and implicit register arguments
    226   // like the context register and the function register.
    227   void AssembleRegisterArgumentPoisoning();
    228 
    229   // Generates an architecture-specific, descriptor-specific prologue
    230   // to set up a stack frame.
    231   void AssembleConstructFrame();
    232 
    233   // Generates an architecture-specific, descriptor-specific return sequence
    234   // to tear down a stack frame.
    235   void AssembleReturn(InstructionOperand* pop);
    236 
    237   void AssembleDeconstructFrame();
    238 
    239   // Generates code to manipulate the stack in preparation for a tail call.
    240   void AssemblePrepareTailCall();
    241 
    242   // Generates code to pop current frame if it is an arguments adaptor frame.
    243   void AssemblePopArgumentsAdaptorFrame(Register args_reg, Register scratch1,
    244                                         Register scratch2, Register scratch3);
    245 
    246   enum PushTypeFlag {
    247     kImmediatePush = 0x1,
    248     kRegisterPush = 0x2,
    249     kStackSlotPush = 0x4,
    250     kScalarPush = kRegisterPush | kStackSlotPush
    251   };
    252 
    253   typedef base::Flags<PushTypeFlag> PushTypeFlags;
    254 
    255   static bool IsValidPush(InstructionOperand source, PushTypeFlags push_type);
    256 
    257   // Generate a list moves from an instruction that are candidates to be turned
    258   // into push instructions on platforms that support them. In general, the list
    259   // of push candidates are moves to a set of contiguous destination
    260   // InstructionOperand locations on the stack that don't clobber values that
    261   // are needed for resolve the gap or use values generated by the gap,
    262   // i.e. moves that can be hoisted together before the actual gap and assembled
    263   // together.
    264   static void GetPushCompatibleMoves(Instruction* instr,
    265                                      PushTypeFlags push_type,
    266                                      ZoneVector<MoveOperands*>* pushes);
    267 
    268   class MoveType {
    269    public:
    270     enum Type {
    271       kRegisterToRegister,
    272       kRegisterToStack,
    273       kStackToRegister,
    274       kStackToStack,
    275       kConstantToRegister,
    276       kConstantToStack
    277     };
    278 
    279     // Detect what type of move or swap needs to be performed. Note that these
    280     // functions do not take into account the representation (Tagged, FP,
    281     // ...etc).
    282 
    283     static Type InferMove(InstructionOperand* source,
    284                           InstructionOperand* destination);
    285     static Type InferSwap(InstructionOperand* source,
    286                           InstructionOperand* destination);
    287   };
    288   // Called before a tail call |instr|'s gap moves are assembled and allows
    289   // gap-specific pre-processing, e.g. adjustment of the sp for tail calls that
    290   // need it before gap moves or conversion of certain gap moves into pushes.
    291   void AssembleTailCallBeforeGap(Instruction* instr,
    292                                  int first_unused_stack_slot);
    293   // Called after a tail call |instr|'s gap moves are assembled and allows
    294   // gap-specific post-processing, e.g. adjustment of the sp for tail calls that
    295   // need it after gap moves.
    296   void AssembleTailCallAfterGap(Instruction* instr,
    297                                 int first_unused_stack_slot);
    298 
    299   void FinishCode();
    300 
    301   // ===========================================================================
    302   // ============== Architecture-specific gap resolver methods. ================
    303   // ===========================================================================
    304 
    305   // Interface used by the gap resolver to emit moves and swaps.
    306   void AssembleMove(InstructionOperand* source,
    307                     InstructionOperand* destination) final;
    308   void AssembleSwap(InstructionOperand* source,
    309                     InstructionOperand* destination) final;
    310 
    311   // ===========================================================================
    312   // =================== Jump table construction methods. ======================
    313   // ===========================================================================
    314 
    315   class JumpTable;
    316   // Adds a jump table that is emitted after the actual code.  Returns label
    317   // pointing to the beginning of the table.  {targets} is assumed to be static
    318   // or zone allocated.
    319   Label* AddJumpTable(Label** targets, size_t target_count);
    320   // Emits a jump table.
    321   void AssembleJumpTable(Label** targets, size_t target_count);
    322 
    323   // ===========================================================================
    324   // ================== Deoptimization table construction. =====================
    325   // ===========================================================================
    326 
    327   void RecordCallPosition(Instruction* instr);
    328   Handle<DeoptimizationData> GenerateDeoptimizationData();
    329   int DefineDeoptimizationLiteral(DeoptimizationLiteral literal);
    330   DeoptimizationEntry const& GetDeoptimizationEntry(Instruction* instr,
    331                                                     size_t frame_state_offset);
    332   DeoptimizeKind GetDeoptimizationKind(int deoptimization_id) const;
    333   DeoptimizeReason GetDeoptimizationReason(int deoptimization_id) const;
    334   int BuildTranslation(Instruction* instr, int pc_offset,
    335                        size_t frame_state_offset,
    336                        OutputFrameStateCombine state_combine);
    337   void BuildTranslationForFrameStateDescriptor(
    338       FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
    339       Translation* translation, OutputFrameStateCombine state_combine);
    340   void TranslateStateValueDescriptor(StateValueDescriptor* desc,
    341                                      StateValueList* nested,
    342                                      Translation* translation,
    343                                      InstructionOperandIterator* iter);
    344   void TranslateFrameStateDescriptorOperands(FrameStateDescriptor* desc,
    345                                              InstructionOperandIterator* iter,
    346                                              OutputFrameStateCombine combine,
    347                                              Translation* translation);
    348   void AddTranslationForOperand(Translation* translation, Instruction* instr,
    349                                 InstructionOperand* op, MachineType type);
    350   void MarkLazyDeoptSite();
    351 
    352   DeoptimizationExit* AddDeoptimizationExit(Instruction* instr,
    353                                             size_t frame_state_offset);
    354 
    355   // ===========================================================================
    356 
    357   class DeoptimizationState final : public ZoneObject {
    358    public:
    359     DeoptimizationState(BailoutId bailout_id, int translation_id, int pc_offset,
    360                         DeoptimizeKind kind, DeoptimizeReason reason)
    361         : bailout_id_(bailout_id),
    362           translation_id_(translation_id),
    363           pc_offset_(pc_offset),
    364           kind_(kind),
    365           reason_(reason) {}
    366 
    367     BailoutId bailout_id() const { return bailout_id_; }
    368     int translation_id() const { return translation_id_; }
    369     int pc_offset() const { return pc_offset_; }
    370     DeoptimizeKind kind() const { return kind_; }
    371     DeoptimizeReason reason() const { return reason_; }
    372 
    373    private:
    374     BailoutId bailout_id_;
    375     int translation_id_;
    376     int pc_offset_;
    377     DeoptimizeKind kind_;
    378     DeoptimizeReason reason_;
    379   };
    380 
    381   struct HandlerInfo {
    382     Label* handler;
    383     int pc_offset;
    384   };
    385 
    386   friend class OutOfLineCode;
    387   friend class CodeGeneratorTester;
    388 
    389   Zone* zone_;
    390   Isolate* isolate_;
    391   FrameAccessState* frame_access_state_;
    392   Linkage* const linkage_;
    393   InstructionSequence* const code_;
    394   UnwindingInfoWriter unwinding_info_writer_;
    395   OptimizedCompilationInfo* const info_;
    396   Label* const labels_;
    397   Label return_label_;
    398   RpoNumber current_block_;
    399   SourcePosition start_source_position_;
    400   SourcePosition current_source_position_;
    401   TurboAssembler tasm_;
    402   GapResolver resolver_;
    403   SafepointTableBuilder safepoints_;
    404   ZoneVector<HandlerInfo> handlers_;
    405   ZoneDeque<DeoptimizationExit*> deoptimization_exits_;
    406   ZoneDeque<DeoptimizationState*> deoptimization_states_;
    407   ZoneDeque<DeoptimizationLiteral> deoptimization_literals_;
    408   size_t inlined_function_count_;
    409   TranslationBuffer translations_;
    410   int handler_table_offset_;
    411   int last_lazy_deopt_pc_;
    412 
    413   // kArchCallCFunction could be reached either:
    414   //   kArchCallCFunction;
    415   // or:
    416   //   kArchSaveCallerRegisters;
    417   //   kArchCallCFunction;
    418   //   kArchRestoreCallerRegisters;
    419   // The boolean is used to distinguish the two cases. In the latter case, we
    420   // also need to decide if FP registers need to be saved, which is controlled
    421   // by fp_mode_.
    422   bool caller_registers_saved_;
    423   SaveFPRegsMode fp_mode_;
    424 
    425   JumpTable* jump_tables_;
    426   OutOfLineCode* ools_;
    427   base::Optional<OsrHelper> osr_helper_;
    428   int osr_pc_offset_;
    429   int optimized_out_literal_id_;
    430   SourcePositionTableBuilder source_position_table_builder_;
    431   ZoneVector<trap_handler::ProtectedInstructionData> protected_instructions_;
    432   CodeGenResult result_;
    433   PoisoningMitigationLevel poisoning_level_;
    434   ZoneVector<int> block_starts_;
    435   ZoneVector<int> instr_starts_;
    436 };
    437 
    438 }  // namespace compiler
    439 }  // namespace internal
    440 }  // namespace v8
    441 
    442 #endif  // V8_COMPILER_CODE_GENERATOR_H_
    443