Home | History | Annotate | Download | only in interpreter
      1 // Copyright 2015 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_INTERPRETER_INTERPRETER_ASSEMBLER_H_
      6 #define V8_INTERPRETER_INTERPRETER_ASSEMBLER_H_
      7 
      8 #include "src/allocation.h"
      9 #include "src/builtins/builtins.h"
     10 #include "src/code-stub-assembler.h"
     11 #include "src/frames.h"
     12 #include "src/globals.h"
     13 #include "src/interpreter/bytecode-register.h"
     14 #include "src/interpreter/bytecodes.h"
     15 #include "src/runtime/runtime.h"
     16 
     17 namespace v8 {
     18 namespace internal {
     19 namespace interpreter {
     20 
     21 class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
     22  public:
     23   InterpreterAssembler(compiler::CodeAssemblerState* state, Bytecode bytecode,
     24                        OperandScale operand_scale);
     25   ~InterpreterAssembler();
     26 
     27   // Returns the 32-bit unsigned count immediate for bytecode operand
     28   // |operand_index| in the current bytecode.
     29   compiler::Node* BytecodeOperandCount(int operand_index);
     30   // Returns the 32-bit unsigned flag for bytecode operand |operand_index|
     31   // in the current bytecode.
     32   compiler::Node* BytecodeOperandFlag(int operand_index);
     33   // Returns the 32-bit zero-extended index immediate for bytecode operand
     34   // |operand_index| in the current bytecode.
     35   compiler::Node* BytecodeOperandIdx(int operand_index);
     36   // Returns the smi index immediate for bytecode operand |operand_index|
     37   // in the current bytecode.
     38   compiler::Node* BytecodeOperandIdxSmi(int operand_index);
     39   // Returns the 32-bit unsigned immediate for bytecode operand |operand_index|
     40   // in the current bytecode.
     41   compiler::Node* BytecodeOperandUImm(int operand_index);
     42   // Returns the word-size unsigned immediate for bytecode operand
     43   // |operand_index| in the current bytecode.
     44   compiler::Node* BytecodeOperandUImmWord(int operand_index);
     45   // Returns the 32-bit signed immediate for bytecode operand |operand_index|
     46   // in the current bytecode.
     47   compiler::Node* BytecodeOperandImm(int operand_index);
     48   // Returns the word-size signed immediate for bytecode operand |operand_index|
     49   // in the current bytecode.
     50   compiler::Node* BytecodeOperandImmIntPtr(int operand_index);
     51   // Returns the smi immediate for bytecode operand |operand_index| in the
     52   // current bytecode.
     53   compiler::Node* BytecodeOperandImmSmi(int operand_index);
     54   // Returns the word-size sign-extended register index for bytecode operand
     55   // |operand_index| in the current bytecode.
     56   compiler::Node* BytecodeOperandReg(int operand_index);
     57   // Returns the 32-bit unsigned runtime id immediate for bytecode operand
     58   // |operand_index| in the current bytecode.
     59   compiler::Node* BytecodeOperandRuntimeId(int operand_index);
     60   // Returns the 32-bit unsigned intrinsic id immediate for bytecode operand
     61   // |operand_index| in the current bytecode.
     62   compiler::Node* BytecodeOperandIntrinsicId(int operand_index);
     63 
     64   // Accumulator.
     65   compiler::Node* GetAccumulator();
     66   void SetAccumulator(compiler::Node* value);
     67 
     68   // Context.
     69   compiler::Node* GetContext();
     70   void SetContext(compiler::Node* value);
     71 
     72   // Context at |depth| in the context chain starting at |context|.
     73   compiler::Node* GetContextAtDepth(compiler::Node* context,
     74                                     compiler::Node* depth);
     75 
     76   // Goto the given |target| if the context chain starting at |context| has any
     77   // extensions up to the given |depth|.
     78   void GotoIfHasContextExtensionUpToDepth(compiler::Node* context,
     79                                           compiler::Node* depth, Label* target);
     80 
     81   // Number of registers.
     82   compiler::Node* RegisterCount();
     83 
     84   // Backup/restore register file to/from a fixed array of the correct length.
     85   compiler::Node* ExportRegisterFile(compiler::Node* array);
     86   compiler::Node* ImportRegisterFile(compiler::Node* array);
     87 
     88   // Loads from and stores to the interpreter register file.
     89   compiler::Node* LoadRegister(Register reg);
     90   compiler::Node* LoadRegister(compiler::Node* reg_index);
     91   compiler::Node* LoadAndUntagRegister(Register reg);
     92   compiler::Node* StoreRegister(compiler::Node* value, Register reg);
     93   compiler::Node* StoreRegister(compiler::Node* value,
     94                                 compiler::Node* reg_index);
     95   compiler::Node* StoreAndTagRegister(compiler::Node* value, Register reg);
     96 
     97   // Returns the next consecutive register.
     98   compiler::Node* NextRegister(compiler::Node* reg_index);
     99 
    100   // Returns the location in memory of the register |reg_index| in the
    101   // interpreter register file.
    102   compiler::Node* RegisterLocation(compiler::Node* reg_index);
    103 
    104   // Load constant at |index| in the constant pool.
    105   compiler::Node* LoadConstantPoolEntry(compiler::Node* index);
    106 
    107   // Load and untag constant at |index| in the constant pool.
    108   compiler::Node* LoadAndUntagConstantPoolEntry(compiler::Node* index);
    109 
    110   // Load the FeedbackVector for the current function.
    111   compiler::Node* LoadFeedbackVector();
    112 
    113   // Increment the call count for a CALL_IC or construct call.
    114   // The call count is located at feedback_vector[slot_id + 1].
    115   compiler::Node* IncrementCallCount(compiler::Node* feedback_vector,
    116                                      compiler::Node* slot_id);
    117 
    118   // Call JSFunction or Callable |function| with |arg_count|
    119   // arguments (not including receiver) and the first argument
    120   // located at |first_arg|. Type feedback is collected in the
    121   // slot at index |slot_id|.
    122   compiler::Node* CallJSWithFeedback(compiler::Node* function,
    123                                      compiler::Node* context,
    124                                      compiler::Node* first_arg,
    125                                      compiler::Node* arg_count,
    126                                      compiler::Node* slot_id,
    127                                      compiler::Node* feedback_vector,
    128                                      TailCallMode tail_call_mode);
    129 
    130   // Call JSFunction or Callable |function| with |arg_count|
    131   // arguments (not including receiver) and the first argument
    132   // located at |first_arg|.
    133   compiler::Node* CallJS(compiler::Node* function, compiler::Node* context,
    134                          compiler::Node* first_arg, compiler::Node* arg_count,
    135                          TailCallMode tail_call_mode);
    136 
    137   // Call JSFunction or Callable |function| with |arg_count|
    138   // arguments (not including receiver) and the first argument
    139   // located at |first_arg|.
    140   compiler::Node* CallJSWithSpread(compiler::Node* function,
    141                                    compiler::Node* context,
    142                                    compiler::Node* first_arg,
    143                                    compiler::Node* arg_count);
    144 
    145   // Call constructor |constructor| with |arg_count| arguments (not
    146   // including receiver) and the first argument located at
    147   // |first_arg|. The |new_target| is the same as the
    148   // |constructor| for the new keyword, but differs for the super
    149   // keyword.
    150   compiler::Node* Construct(compiler::Node* constructor,
    151                             compiler::Node* context, compiler::Node* new_target,
    152                             compiler::Node* first_arg,
    153                             compiler::Node* arg_count, compiler::Node* slot_id,
    154                             compiler::Node* feedback_vector);
    155 
    156   // Call constructor |constructor| with |arg_count| arguments (not including
    157   // receiver) and the first argument located at |first_arg|. The last argument
    158   // is always a spread. The |new_target| is the same as the |constructor| for
    159   // the new keyword, but differs for the super keyword.
    160   compiler::Node* ConstructWithSpread(compiler::Node* constructor,
    161                                       compiler::Node* context,
    162                                       compiler::Node* new_target,
    163                                       compiler::Node* first_arg,
    164                                       compiler::Node* arg_count);
    165 
    166   // Call runtime function with |arg_count| arguments and the first argument
    167   // located at |first_arg|.
    168   compiler::Node* CallRuntimeN(compiler::Node* function_id,
    169                                compiler::Node* context,
    170                                compiler::Node* first_arg,
    171                                compiler::Node* arg_count, int return_size = 1);
    172 
    173   // Jump forward relative to the current bytecode by the |jump_offset|.
    174   compiler::Node* Jump(compiler::Node* jump_offset);
    175 
    176   // Jump backward relative to the current bytecode by the |jump_offset|.
    177   compiler::Node* JumpBackward(compiler::Node* jump_offset);
    178 
    179   // Jump forward relative to the current bytecode by |jump_offset| if the
    180   // word values |lhs| and |rhs| are equal.
    181   void JumpIfWordEqual(compiler::Node* lhs, compiler::Node* rhs,
    182                        compiler::Node* jump_offset);
    183 
    184   // Jump forward relative to the current bytecode by |jump_offset| if the
    185   // word values |lhs| and |rhs| are not equal.
    186   void JumpIfWordNotEqual(compiler::Node* lhs, compiler::Node* rhs,
    187                           compiler::Node* jump_offset);
    188 
    189   // Returns true if the stack guard check triggers an interrupt.
    190   compiler::Node* StackCheckTriggeredInterrupt();
    191 
    192   // Updates the profiler interrupt budget for a return.
    193   void UpdateInterruptBudgetOnReturn();
    194 
    195   // Returns the OSR nesting level from the bytecode header.
    196   compiler::Node* LoadOSRNestingLevel();
    197 
    198   // Dispatch to the bytecode.
    199   compiler::Node* Dispatch();
    200 
    201   // Dispatch to bytecode handler.
    202   compiler::Node* DispatchToBytecodeHandler(compiler::Node* handler) {
    203     return DispatchToBytecodeHandler(handler, BytecodeOffset());
    204   }
    205 
    206   // Dispatch bytecode as wide operand variant.
    207   void DispatchWide(OperandScale operand_scale);
    208 
    209   // Truncate tagged |value| to word32 and store the type feedback in
    210   // |var_type_feedback|.
    211   compiler::Node* TruncateTaggedToWord32WithFeedback(
    212       compiler::Node* context, compiler::Node* value,
    213       Variable* var_type_feedback);
    214 
    215   // Abort with the given bailout reason.
    216   void Abort(BailoutReason bailout_reason);
    217   void AbortIfWordNotEqual(compiler::Node* lhs, compiler::Node* rhs,
    218                            BailoutReason bailout_reason);
    219 
    220   // Dispatch to frame dropper trampoline if necessary.
    221   void MaybeDropFrames(compiler::Node* context);
    222 
    223   // Returns the offset from the BytecodeArrayPointer of the current bytecode.
    224   compiler::Node* BytecodeOffset();
    225 
    226   // Save the bytecode offset to the interpreter frame.
    227   void SaveBytecodeOffset();
    228 
    229  protected:
    230   Bytecode bytecode() const { return bytecode_; }
    231   static bool TargetSupportsUnalignedAccess();
    232 
    233  private:
    234   // Returns a tagged pointer to the current function's BytecodeArray object.
    235   compiler::Node* BytecodeArrayTaggedPointer();
    236 
    237   // Returns a raw pointer to first entry in the interpreter dispatch table.
    238   compiler::Node* DispatchTableRawPointer();
    239 
    240   // Returns the accumulator value without checking whether bytecode
    241   // uses it. This is intended to be used only in dispatch and in
    242   // tracing as these need to bypass accumulator use validity checks.
    243   compiler::Node* GetAccumulatorUnchecked();
    244 
    245   // Returns the frame pointer for the interpreted frame of the function being
    246   // interpreted.
    247   compiler::Node* GetInterpretedFramePointer();
    248 
    249   // Saves and restores interpreter bytecode offset to the interpreter stack
    250   // frame when performing a call.
    251   void CallPrologue();
    252   void CallEpilogue();
    253 
    254   // Increment the dispatch counter for the (current, next) bytecode pair.
    255   void TraceBytecodeDispatch(compiler::Node* target_index);
    256 
    257   // Traces the current bytecode by calling |function_id|.
    258   void TraceBytecode(Runtime::FunctionId function_id);
    259 
    260   // Updates the bytecode array's interrupt budget by a 32-bit unsigned |weight|
    261   // and calls Runtime::kInterrupt if counter reaches zero. If |backward|, then
    262   // the interrupt budget is decremented, otherwise it is incremented.
    263   void UpdateInterruptBudget(compiler::Node* weight, bool backward);
    264 
    265   // Returns the offset of register |index| relative to RegisterFilePointer().
    266   compiler::Node* RegisterFrameOffset(compiler::Node* index);
    267 
    268   // Returns the offset of an operand relative to the current bytecode offset.
    269   compiler::Node* OperandOffset(int operand_index);
    270 
    271   // Returns a value built from an sequence of bytes in the bytecode
    272   // array starting at |relative_offset| from the current bytecode.
    273   // The |result_type| determines the size and signedness.  of the
    274   // value read. This method should only be used on architectures that
    275   // do not support unaligned memory accesses.
    276   compiler::Node* BytecodeOperandReadUnaligned(int relative_offset,
    277                                                MachineType result_type);
    278 
    279   // Returns zero- or sign-extended to word32 value of the operand.
    280   compiler::Node* BytecodeOperandUnsignedByte(int operand_index);
    281   compiler::Node* BytecodeOperandSignedByte(int operand_index);
    282   compiler::Node* BytecodeOperandUnsignedShort(int operand_index);
    283   compiler::Node* BytecodeOperandSignedShort(int operand_index);
    284   compiler::Node* BytecodeOperandUnsignedQuad(int operand_index);
    285   compiler::Node* BytecodeOperandSignedQuad(int operand_index);
    286 
    287   // Returns zero- or sign-extended to word32 value of the operand of
    288   // given size.
    289   compiler::Node* BytecodeSignedOperand(int operand_index,
    290                                         OperandSize operand_size);
    291   compiler::Node* BytecodeUnsignedOperand(int operand_index,
    292                                           OperandSize operand_size);
    293 
    294   // Jump relative to the current bytecode by the |jump_offset|. If |backward|,
    295   // then jump backward (subtract the offset), otherwise jump forward (add the
    296   // offset). Helper function for Jump and JumpBackward.
    297   compiler::Node* Jump(compiler::Node* jump_offset, bool backward);
    298 
    299   // Jump forward relative to the current bytecode by |jump_offset| if the
    300   // |condition| is true. Helper function for JumpIfWordEqual and
    301   // JumpIfWordNotEqual.
    302   void JumpConditional(compiler::Node* condition, compiler::Node* jump_offset);
    303 
    304   // Updates and returns BytecodeOffset() advanced by the current bytecode's
    305   // size. Traces the exit of the current bytecode.
    306   compiler::Node* Advance();
    307 
    308   // Updates and returns BytecodeOffset() advanced by delta bytecodes.
    309   // Traces the exit of the current bytecode.
    310   compiler::Node* Advance(int delta);
    311   compiler::Node* Advance(compiler::Node* delta, bool backward = false);
    312 
    313   // Load the bytecode at |bytecode_offset|.
    314   compiler::Node* LoadBytecode(compiler::Node* bytecode_offset);
    315 
    316   // Look ahead for Star and inline it in a branch. Returns a new target
    317   // bytecode node for dispatch.
    318   compiler::Node* StarDispatchLookahead(compiler::Node* target_bytecode);
    319 
    320   // Build code for Star at the current BytecodeOffset() and Advance() to the
    321   // next dispatch offset.
    322   void InlineStar();
    323 
    324   // Dispatch to |target_bytecode| at |new_bytecode_offset|.
    325   // |target_bytecode| should be equivalent to loading from the offset.
    326   compiler::Node* DispatchToBytecode(compiler::Node* target_bytecode,
    327                                      compiler::Node* new_bytecode_offset);
    328 
    329   // Dispatch to the bytecode handler with code offset |handler|.
    330   compiler::Node* DispatchToBytecodeHandler(compiler::Node* handler,
    331                                             compiler::Node* bytecode_offset);
    332 
    333   // Dispatch to the bytecode handler with code entry point |handler_entry|.
    334   compiler::Node* DispatchToBytecodeHandlerEntry(
    335       compiler::Node* handler_entry, compiler::Node* bytecode_offset);
    336 
    337   OperandScale operand_scale() const { return operand_scale_; }
    338 
    339   Bytecode bytecode_;
    340   OperandScale operand_scale_;
    341   CodeStubAssembler::Variable bytecode_offset_;
    342   CodeStubAssembler::Variable interpreted_frame_pointer_;
    343   CodeStubAssembler::Variable bytecode_array_;
    344   CodeStubAssembler::Variable dispatch_table_;
    345   CodeStubAssembler::Variable accumulator_;
    346   AccumulatorUse accumulator_use_;
    347   bool made_call_;
    348   bool reloaded_frame_ptr_;
    349   bool saved_bytecode_offset_;
    350 
    351   bool disable_stack_check_across_call_;
    352   compiler::Node* stack_pointer_before_call_;
    353 
    354   DISALLOW_COPY_AND_ASSIGN(InterpreterAssembler);
    355 };
    356 
    357 }  // namespace interpreter
    358 }  // namespace internal
    359 }  // namespace v8
    360 
    361 #endif  // V8_INTERPRETER_INTERPRETER_ASSEMBLER_H_
    362