Home | History | Annotate | Download | only in interpreter
      1 // Copyright 2015 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/interpreter/interpreter-assembler.h"
      6 
      7 #include <limits>
      8 #include <ostream>
      9 
     10 #include "src/code-factory.h"
     11 #include "src/frames.h"
     12 #include "src/interface-descriptors.h"
     13 #include "src/interpreter/bytecodes.h"
     14 #include "src/interpreter/interpreter.h"
     15 #include "src/machine-type.h"
     16 #include "src/macro-assembler.h"
     17 #include "src/zone.h"
     18 
     19 namespace v8 {
     20 namespace internal {
     21 namespace interpreter {
     22 
     23 using compiler::Node;
     24 
     25 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
     26                                            Bytecode bytecode,
     27                                            OperandScale operand_scale)
     28     : CodeStubAssembler(isolate, zone, InterpreterDispatchDescriptor(isolate),
     29                         Code::ComputeFlags(Code::BYTECODE_HANDLER),
     30                         Bytecodes::ToString(bytecode),
     31                         Bytecodes::ReturnCount(bytecode)),
     32       bytecode_(bytecode),
     33       operand_scale_(operand_scale),
     34       interpreted_frame_pointer_(this, MachineType::PointerRepresentation()),
     35       accumulator_(this, MachineRepresentation::kTagged),
     36       accumulator_use_(AccumulatorUse::kNone),
     37       made_call_(false),
     38       disable_stack_check_across_call_(false),
     39       stack_pointer_before_call_(nullptr) {
     40   accumulator_.Bind(
     41       Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
     42   if (FLAG_trace_ignition) {
     43     TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
     44   }
     45 }
     46 
     47 InterpreterAssembler::~InterpreterAssembler() {
     48   // If the following check fails the handler does not use the
     49   // accumulator in the way described in the bytecode definitions in
     50   // bytecodes.h.
     51   DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
     52 }
     53 
     54 Node* InterpreterAssembler::GetInterpretedFramePointer() {
     55   if (!interpreted_frame_pointer_.IsBound()) {
     56     interpreted_frame_pointer_.Bind(LoadParentFramePointer());
     57   }
     58   return interpreted_frame_pointer_.value();
     59 }
     60 
     61 Node* InterpreterAssembler::GetAccumulatorUnchecked() {
     62   return accumulator_.value();
     63 }
     64 
     65 Node* InterpreterAssembler::GetAccumulator() {
     66   DCHECK(Bytecodes::ReadsAccumulator(bytecode_));
     67   accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead;
     68   return GetAccumulatorUnchecked();
     69 }
     70 
     71 void InterpreterAssembler::SetAccumulator(Node* value) {
     72   DCHECK(Bytecodes::WritesAccumulator(bytecode_));
     73   accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
     74   accumulator_.Bind(value);
     75 }
     76 
     77 Node* InterpreterAssembler::GetContext() {
     78   return LoadRegister(Register::current_context());
     79 }
     80 
     81 void InterpreterAssembler::SetContext(Node* value) {
     82   StoreRegister(value, Register::current_context());
     83 }
     84 
     85 Node* InterpreterAssembler::BytecodeOffset() {
     86   return Parameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter);
     87 }
     88 
     89 Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
     90   if (made_call_) {
     91     // If we have made a call, restore bytecode array from stack frame in case
     92     // the debugger has swapped us to the patched debugger bytecode array.
     93     return LoadRegister(Register::bytecode_array());
     94   } else {
     95     return Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter);
     96   }
     97 }
     98 
     99 Node* InterpreterAssembler::DispatchTableRawPointer() {
    100   return Parameter(InterpreterDispatchDescriptor::kDispatchTableParameter);
    101 }
    102 
    103 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
    104   return IntPtrAdd(GetInterpretedFramePointer(),
    105                    RegisterFrameOffset(reg_index));
    106 }
    107 
    108 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
    109   return WordShl(index, kPointerSizeLog2);
    110 }
    111 
    112 Node* InterpreterAssembler::LoadRegister(Register reg) {
    113   return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
    114               IntPtrConstant(reg.ToOperand() << kPointerSizeLog2));
    115 }
    116 
    117 Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
    118   return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
    119               RegisterFrameOffset(reg_index));
    120 }
    121 
    122 Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) {
    123   return StoreNoWriteBarrier(
    124       MachineRepresentation::kTagged, GetInterpretedFramePointer(),
    125       IntPtrConstant(reg.ToOperand() << kPointerSizeLog2), value);
    126 }
    127 
    128 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
    129   return StoreNoWriteBarrier(MachineRepresentation::kTagged,
    130                              GetInterpretedFramePointer(),
    131                              RegisterFrameOffset(reg_index), value);
    132 }
    133 
    134 Node* InterpreterAssembler::NextRegister(Node* reg_index) {
    135   // Register indexes are negative, so the next index is minus one.
    136   return IntPtrAdd(reg_index, IntPtrConstant(-1));
    137 }
    138 
    139 Node* InterpreterAssembler::OperandOffset(int operand_index) {
    140   return IntPtrConstant(
    141       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
    142 }
    143 
    144 Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
    145   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
    146   DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
    147                                     bytecode_, operand_index, operand_scale()));
    148   Node* operand_offset = OperandOffset(operand_index);
    149   return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
    150               IntPtrAdd(BytecodeOffset(), operand_offset));
    151 }
    152 
    153 Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
    154   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
    155   DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
    156                                     bytecode_, operand_index, operand_scale()));
    157   Node* operand_offset = OperandOffset(operand_index);
    158   Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
    159                     IntPtrAdd(BytecodeOffset(), operand_offset));
    160 
    161   // Ensure that we sign extend to full pointer size
    162   if (kPointerSize == 8) {
    163     load = ChangeInt32ToInt64(load);
    164   }
    165   return load;
    166 }
    167 
    168 compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
    169     int relative_offset, MachineType result_type) {
    170   static const int kMaxCount = 4;
    171   DCHECK(!TargetSupportsUnalignedAccess());
    172 
    173   int count;
    174   switch (result_type.representation()) {
    175     case MachineRepresentation::kWord16:
    176       count = 2;
    177       break;
    178     case MachineRepresentation::kWord32:
    179       count = 4;
    180       break;
    181     default:
    182       UNREACHABLE();
    183       break;
    184   }
    185   MachineType msb_type =
    186       result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
    187 
    188 #if V8_TARGET_LITTLE_ENDIAN
    189   const int kStep = -1;
    190   int msb_offset = count - 1;
    191 #elif V8_TARGET_BIG_ENDIAN
    192   const int kStep = 1;
    193   int msb_offset = 0;
    194 #else
    195 #error "Unknown Architecture"
    196 #endif
    197 
    198   // Read the most signicant bytecode into bytes[0] and then in order
    199   // down to least significant in bytes[count - 1].
    200   DCHECK(count <= kMaxCount);
    201   compiler::Node* bytes[kMaxCount];
    202   for (int i = 0; i < count; i++) {
    203     MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8();
    204     Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep);
    205     Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
    206     bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset);
    207   }
    208 
    209   // Pack LSB to MSB.
    210   Node* result = bytes[--count];
    211   for (int i = 1; --count >= 0; i++) {
    212     Node* shift = Int32Constant(i * kBitsPerByte);
    213     Node* value = Word32Shl(bytes[count], shift);
    214     result = Word32Or(value, result);
    215   }
    216   return result;
    217 }
    218 
    219 Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
    220   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
    221   DCHECK_EQ(
    222       OperandSize::kShort,
    223       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
    224   int operand_offset =
    225       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
    226   if (TargetSupportsUnalignedAccess()) {
    227     return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
    228                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
    229   } else {
    230     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16());
    231   }
    232 }
    233 
    234 Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
    235   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
    236   DCHECK_EQ(
    237       OperandSize::kShort,
    238       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
    239   int operand_offset =
    240       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
    241   Node* load;
    242   if (TargetSupportsUnalignedAccess()) {
    243     load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
    244                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
    245   } else {
    246     load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
    247   }
    248 
    249   // Ensure that we sign extend to full pointer size
    250   if (kPointerSize == 8) {
    251     load = ChangeInt32ToInt64(load);
    252   }
    253   return load;
    254 }
    255 
    256 Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
    257   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
    258   DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
    259                                     bytecode_, operand_index, operand_scale()));
    260   int operand_offset =
    261       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
    262   if (TargetSupportsUnalignedAccess()) {
    263     return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
    264                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
    265   } else {
    266     return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
    267   }
    268 }
    269 
    270 Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
    271   DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
    272   DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
    273                                     bytecode_, operand_index, operand_scale()));
    274   int operand_offset =
    275       Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
    276   Node* load;
    277   if (TargetSupportsUnalignedAccess()) {
    278     load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
    279                 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
    280   } else {
    281     load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
    282   }
    283 
    284   // Ensure that we sign extend to full pointer size
    285   if (kPointerSize == 8) {
    286     load = ChangeInt32ToInt64(load);
    287   }
    288   return load;
    289 }
    290 
    291 Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
    292                                                   OperandSize operand_size) {
    293   DCHECK(!Bytecodes::IsUnsignedOperandType(
    294       Bytecodes::GetOperandType(bytecode_, operand_index)));
    295   switch (operand_size) {
    296     case OperandSize::kByte:
    297       return BytecodeOperandSignedByte(operand_index);
    298     case OperandSize::kShort:
    299       return BytecodeOperandSignedShort(operand_index);
    300     case OperandSize::kQuad:
    301       return BytecodeOperandSignedQuad(operand_index);
    302     case OperandSize::kNone:
    303       UNREACHABLE();
    304   }
    305   return nullptr;
    306 }
    307 
    308 Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index,
    309                                                     OperandSize operand_size) {
    310   DCHECK(Bytecodes::IsUnsignedOperandType(
    311       Bytecodes::GetOperandType(bytecode_, operand_index)));
    312   switch (operand_size) {
    313     case OperandSize::kByte:
    314       return BytecodeOperandUnsignedByte(operand_index);
    315     case OperandSize::kShort:
    316       return BytecodeOperandUnsignedShort(operand_index);
    317     case OperandSize::kQuad:
    318       return BytecodeOperandUnsignedQuad(operand_index);
    319     case OperandSize::kNone:
    320       UNREACHABLE();
    321   }
    322   return nullptr;
    323 }
    324 
    325 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
    326   DCHECK_EQ(OperandType::kRegCount,
    327             Bytecodes::GetOperandType(bytecode_, operand_index));
    328   OperandSize operand_size =
    329       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
    330   return BytecodeUnsignedOperand(operand_index, operand_size);
    331 }
    332 
    333 Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
    334   DCHECK_EQ(OperandType::kFlag8,
    335             Bytecodes::GetOperandType(bytecode_, operand_index));
    336   OperandSize operand_size =
    337       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
    338   DCHECK_EQ(operand_size, OperandSize::kByte);
    339   return BytecodeUnsignedOperand(operand_index, operand_size);
    340 }
    341 
    342 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
    343   DCHECK_EQ(OperandType::kImm,
    344             Bytecodes::GetOperandType(bytecode_, operand_index));
    345   OperandSize operand_size =
    346       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
    347   return BytecodeSignedOperand(operand_index, operand_size);
    348 }
    349 
    350 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
    351   DCHECK(OperandType::kIdx ==
    352          Bytecodes::GetOperandType(bytecode_, operand_index));
    353   OperandSize operand_size =
    354       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
    355   return BytecodeUnsignedOperand(operand_index, operand_size);
    356 }
    357 
    358 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
    359   DCHECK(Bytecodes::IsRegisterOperandType(
    360       Bytecodes::GetOperandType(bytecode_, operand_index)));
    361   OperandSize operand_size =
    362       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
    363   return BytecodeSignedOperand(operand_index, operand_size);
    364 }
    365 
    366 Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
    367   DCHECK(OperandType::kRuntimeId ==
    368          Bytecodes::GetOperandType(bytecode_, operand_index));
    369   OperandSize operand_size =
    370       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
    371   DCHECK_EQ(operand_size, OperandSize::kShort);
    372   return BytecodeUnsignedOperand(operand_index, operand_size);
    373 }
    374 
    375 Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) {
    376   DCHECK(OperandType::kIntrinsicId ==
    377          Bytecodes::GetOperandType(bytecode_, operand_index));
    378   OperandSize operand_size =
    379       Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
    380   DCHECK_EQ(operand_size, OperandSize::kByte);
    381   return BytecodeUnsignedOperand(operand_index, operand_size);
    382 }
    383 
    384 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
    385   Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
    386                                         BytecodeArray::kConstantPoolOffset);
    387   Node* entry_offset =
    388       IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
    389                 WordShl(index, kPointerSizeLog2));
    390   return Load(MachineType::AnyTagged(), constant_pool, entry_offset);
    391 }
    392 
    393 Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) {
    394   return Load(MachineType::AnyTagged(), context,
    395               IntPtrConstant(Context::SlotOffset(slot_index)));
    396 }
    397 
    398 Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) {
    399   Node* offset =
    400       IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
    401                 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
    402   return Load(MachineType::AnyTagged(), context, offset);
    403 }
    404 
    405 Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index,
    406                                              Node* value) {
    407   Node* offset =
    408       IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
    409                 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
    410   return Store(MachineRepresentation::kTagged, context, offset, value);
    411 }
    412 
    413 Node* InterpreterAssembler::LoadTypeFeedbackVector() {
    414   Node* function = LoadRegister(Register::function_closure());
    415   Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset);
    416   Node* vector =
    417       LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset);
    418   return vector;
    419 }
    420 
    421 void InterpreterAssembler::CallPrologue() {
    422   StoreRegister(SmiTag(BytecodeOffset()), Register::bytecode_offset());
    423 
    424   if (FLAG_debug_code && !disable_stack_check_across_call_) {
    425     DCHECK(stack_pointer_before_call_ == nullptr);
    426     stack_pointer_before_call_ = LoadStackPointer();
    427   }
    428   made_call_ = true;
    429 }
    430 
    431 void InterpreterAssembler::CallEpilogue() {
    432   if (FLAG_debug_code && !disable_stack_check_across_call_) {
    433     Node* stack_pointer_after_call = LoadStackPointer();
    434     Node* stack_pointer_before_call = stack_pointer_before_call_;
    435     stack_pointer_before_call_ = nullptr;
    436     AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
    437                         kUnexpectedStackPointer);
    438   }
    439 }
    440 
    441 Node* InterpreterAssembler::CallJS(Node* function, Node* context,
    442                                    Node* first_arg, Node* arg_count,
    443                                    TailCallMode tail_call_mode) {
    444   Callable callable =
    445       CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode);
    446   Node* code_target = HeapConstant(callable.code());
    447   return CallStub(callable.descriptor(), code_target, context, arg_count,
    448                   first_arg, function);
    449 }
    450 
    451 Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
    452                                           Node* new_target, Node* first_arg,
    453                                           Node* arg_count) {
    454   Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate());
    455   Node* code_target = HeapConstant(callable.code());
    456   return CallStub(callable.descriptor(), code_target, context, arg_count,
    457                   new_target, constructor, first_arg);
    458 }
    459 
    460 Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
    461                                          Node* first_arg, Node* arg_count,
    462                                          int result_size) {
    463   Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
    464   Node* code_target = HeapConstant(callable.code());
    465 
    466   // Get the function entry from the function id.
    467   Node* function_table = ExternalConstant(
    468       ExternalReference::runtime_function_table_address(isolate()));
    469   Node* function_offset =
    470       Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function)));
    471   Node* function = IntPtrAdd(function_table, function_offset);
    472   Node* function_entry =
    473       Load(MachineType::Pointer(), function,
    474            IntPtrConstant(offsetof(Runtime::Function, entry)));
    475 
    476   return CallStub(callable.descriptor(), code_target, context, arg_count,
    477                   first_arg, function_entry, result_size);
    478 }
    479 
    480 void InterpreterAssembler::UpdateInterruptBudget(Node* weight) {
    481   Label ok(this), interrupt_check(this, Label::kDeferred), end(this);
    482   Node* budget_offset =
    483       IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag);
    484 
    485   // Update budget by |weight| and check if it reaches zero.
    486   Variable new_budget(this, MachineRepresentation::kWord32);
    487   Node* old_budget =
    488       Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset);
    489   new_budget.Bind(Int32Add(old_budget, weight));
    490   Node* condition =
    491       Int32GreaterThanOrEqual(new_budget.value(), Int32Constant(0));
    492   Branch(condition, &ok, &interrupt_check);
    493 
    494   // Perform interrupt and reset budget.
    495   Bind(&interrupt_check);
    496   {
    497     CallRuntime(Runtime::kInterrupt, GetContext());
    498     new_budget.Bind(Int32Constant(Interpreter::InterruptBudget()));
    499     Goto(&ok);
    500   }
    501 
    502   // Update budget.
    503   Bind(&ok);
    504   StoreNoWriteBarrier(MachineRepresentation::kWord32,
    505                       BytecodeArrayTaggedPointer(), budget_offset,
    506                       new_budget.value());
    507 }
    508 
    509 Node* InterpreterAssembler::Advance(int delta) {
    510   return IntPtrAdd(BytecodeOffset(), IntPtrConstant(delta));
    511 }
    512 
    513 Node* InterpreterAssembler::Advance(Node* delta) {
    514   return IntPtrAdd(BytecodeOffset(), delta);
    515 }
    516 
    517 Node* InterpreterAssembler::Jump(Node* delta) {
    518   UpdateInterruptBudget(delta);
    519   return DispatchTo(Advance(delta));
    520 }
    521 
    522 void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
    523   Label match(this), no_match(this);
    524 
    525   BranchIf(condition, &match, &no_match);
    526   Bind(&match);
    527   Jump(delta);
    528   Bind(&no_match);
    529   Dispatch();
    530 }
    531 
    532 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
    533   JumpConditional(WordEqual(lhs, rhs), delta);
    534 }
    535 
    536 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
    537                                               Node* delta) {
    538   JumpConditional(WordNotEqual(lhs, rhs), delta);
    539 }
    540 
    541 Node* InterpreterAssembler::Dispatch() {
    542   return DispatchTo(Advance(Bytecodes::Size(bytecode_, operand_scale_)));
    543 }
    544 
    545 Node* InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) {
    546   Node* target_bytecode = Load(
    547       MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset);
    548   if (kPointerSize == 8) {
    549     target_bytecode = ChangeUint32ToUint64(target_bytecode);
    550   }
    551 
    552   if (FLAG_trace_ignition_dispatches) {
    553     TraceBytecodeDispatch(target_bytecode);
    554   }
    555 
    556   Node* target_code_entry =
    557       Load(MachineType::Pointer(), DispatchTableRawPointer(),
    558            WordShl(target_bytecode, IntPtrConstant(kPointerSizeLog2)));
    559 
    560   return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset);
    561 }
    562 
    563 Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
    564                                                       Node* bytecode_offset) {
    565   Node* handler_entry =
    566       IntPtrAdd(handler, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
    567   return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset);
    568 }
    569 
    570 Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
    571     Node* handler_entry, Node* bytecode_offset) {
    572   if (FLAG_trace_ignition) {
    573     TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
    574   }
    575 
    576   InterpreterDispatchDescriptor descriptor(isolate());
    577   Node* args[] = {GetAccumulatorUnchecked(), bytecode_offset,
    578                   BytecodeArrayTaggedPointer(), DispatchTableRawPointer()};
    579   return TailCallBytecodeDispatch(descriptor, handler_entry, args);
    580 }
    581 
    582 void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
    583   // Dispatching a wide bytecode requires treating the prefix
    584   // bytecode a base pointer into the dispatch table and dispatching
    585   // the bytecode that follows relative to this base.
    586   //
    587   //   Indices 0-255 correspond to bytecodes with operand_scale == 0
    588   //   Indices 256-511 correspond to bytecodes with operand_scale == 1
    589   //   Indices 512-767 correspond to bytecodes with operand_scale == 2
    590   Node* next_bytecode_offset = Advance(1);
    591   Node* next_bytecode = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
    592                              next_bytecode_offset);
    593   if (kPointerSize == 8) {
    594     next_bytecode = ChangeUint32ToUint64(next_bytecode);
    595   }
    596 
    597   if (FLAG_trace_ignition_dispatches) {
    598     TraceBytecodeDispatch(next_bytecode);
    599   }
    600 
    601   Node* base_index;
    602   switch (operand_scale) {
    603     case OperandScale::kDouble:
    604       base_index = IntPtrConstant(1 << kBitsPerByte);
    605       break;
    606     case OperandScale::kQuadruple:
    607       base_index = IntPtrConstant(2 << kBitsPerByte);
    608       break;
    609     default:
    610       UNREACHABLE();
    611       base_index = nullptr;
    612   }
    613   Node* target_index = IntPtrAdd(base_index, next_bytecode);
    614   Node* target_code_entry =
    615       Load(MachineType::Pointer(), DispatchTableRawPointer(),
    616            WordShl(target_index, kPointerSizeLog2));
    617 
    618   DispatchToBytecodeHandlerEntry(target_code_entry, next_bytecode_offset);
    619 }
    620 
    621 void InterpreterAssembler::UpdateInterruptBudgetOnReturn() {
    622   // TODO(rmcilroy): Investigate whether it is worth supporting self
    623   // optimization of primitive functions like FullCodegen.
    624 
    625   // Update profiling count by -BytecodeOffset to simulate backedge to start of
    626   // function.
    627   Node* profiling_weight =
    628       Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize),
    629                BytecodeOffset());
    630   UpdateInterruptBudget(profiling_weight);
    631 }
    632 
    633 Node* InterpreterAssembler::StackCheckTriggeredInterrupt() {
    634   Node* sp = LoadStackPointer();
    635   Node* stack_limit = Load(
    636       MachineType::Pointer(),
    637       ExternalConstant(ExternalReference::address_of_stack_limit(isolate())));
    638   return UintPtrLessThan(sp, stack_limit);
    639 }
    640 
    641 void InterpreterAssembler::Abort(BailoutReason bailout_reason) {
    642   disable_stack_check_across_call_ = true;
    643   Node* abort_id = SmiTag(Int32Constant(bailout_reason));
    644   CallRuntime(Runtime::kAbort, GetContext(), abort_id);
    645   disable_stack_check_across_call_ = false;
    646 }
    647 
    648 void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
    649                                                BailoutReason bailout_reason) {
    650   Label ok(this), abort(this, Label::kDeferred);
    651   BranchIfWordEqual(lhs, rhs, &ok, &abort);
    652 
    653   Bind(&abort);
    654   Abort(bailout_reason);
    655   Goto(&ok);
    656 
    657   Bind(&ok);
    658 }
    659 
    660 void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
    661   CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
    662               SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
    663 }
    664 
    665 void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) {
    666   Node* counters_table = ExternalConstant(
    667       ExternalReference::interpreter_dispatch_counters(isolate()));
    668   Node* source_bytecode_table_index = IntPtrConstant(
    669       static_cast<int>(bytecode_) * (static_cast<int>(Bytecode::kLast) + 1));
    670 
    671   Node* counter_offset =
    672       WordShl(IntPtrAdd(source_bytecode_table_index, target_bytecode),
    673               IntPtrConstant(kPointerSizeLog2));
    674   Node* old_counter =
    675       Load(MachineType::IntPtr(), counters_table, counter_offset);
    676 
    677   Label counter_ok(this), counter_saturated(this, Label::kDeferred);
    678 
    679   Node* counter_reached_max = WordEqual(
    680       old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max()));
    681   BranchIf(counter_reached_max, &counter_saturated, &counter_ok);
    682 
    683   Bind(&counter_ok);
    684   {
    685     Node* new_counter = IntPtrAdd(old_counter, IntPtrConstant(1));
    686     StoreNoWriteBarrier(MachineType::PointerRepresentation(), counters_table,
    687                         counter_offset, new_counter);
    688     Goto(&counter_saturated);
    689   }
    690 
    691   Bind(&counter_saturated);
    692 }
    693 
    694 // static
    695 bool InterpreterAssembler::TargetSupportsUnalignedAccess() {
    696 #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
    697   return false;
    698 #elif V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC
    699   return CpuFeatures::IsSupported(UNALIGNED_ACCESSES);
    700 #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 || \
    701     V8_TARGET_ARCH_S390
    702   return true;
    703 #else
    704 #error "Unknown Architecture"
    705 #endif
    706 }
    707 
    708 Node* InterpreterAssembler::RegisterCount() {
    709   Node* bytecode_array = LoadRegister(Register::bytecode_array());
    710   Node* frame_size = LoadObjectField(
    711       bytecode_array, BytecodeArray::kFrameSizeOffset, MachineType::Int32());
    712   return Word32Sar(frame_size, Int32Constant(kPointerSizeLog2));
    713 }
    714 
    715 Node* InterpreterAssembler::ExportRegisterFile(Node* array) {
    716   if (FLAG_debug_code) {
    717     Node* array_size = SmiUntag(LoadFixedArrayBaseLength(array));
    718     AbortIfWordNotEqual(
    719         array_size, RegisterCount(), kInvalidRegisterFileInGenerator);
    720   }
    721 
    722   Variable var_index(this, MachineRepresentation::kWord32);
    723   var_index.Bind(Int32Constant(0));
    724 
    725   // Iterate over register file and write values into array.
    726   // The mapping of register to array index must match that used in
    727   // BytecodeGraphBuilder::VisitResumeGenerator.
    728   Label loop(this, &var_index), done_loop(this);
    729   Goto(&loop);
    730   Bind(&loop);
    731   {
    732     Node* index = var_index.value();
    733     Node* condition = Int32LessThan(index, RegisterCount());
    734     GotoUnless(condition, &done_loop);
    735 
    736     Node* reg_index =
    737         Int32Sub(Int32Constant(Register(0).ToOperand()), index);
    738     Node* value = LoadRegister(ChangeInt32ToIntPtr(reg_index));
    739 
    740     StoreFixedArrayElement(array, index, value);
    741 
    742     var_index.Bind(Int32Add(index, Int32Constant(1)));
    743     Goto(&loop);
    744   }
    745   Bind(&done_loop);
    746 
    747   return array;
    748 }
    749 
    750 Node* InterpreterAssembler::ImportRegisterFile(Node* array) {
    751   if (FLAG_debug_code) {
    752     Node* array_size = SmiUntag(LoadFixedArrayBaseLength(array));
    753     AbortIfWordNotEqual(
    754         array_size, RegisterCount(), kInvalidRegisterFileInGenerator);
    755   }
    756 
    757   Variable var_index(this, MachineRepresentation::kWord32);
    758   var_index.Bind(Int32Constant(0));
    759 
    760   // Iterate over array and write values into register file.  Also erase the
    761   // array contents to not keep them alive artificially.
    762   Label loop(this, &var_index), done_loop(this);
    763   Goto(&loop);
    764   Bind(&loop);
    765   {
    766     Node* index = var_index.value();
    767     Node* condition = Int32LessThan(index, RegisterCount());
    768     GotoUnless(condition, &done_loop);
    769 
    770     Node* value = LoadFixedArrayElement(array, index);
    771 
    772     Node* reg_index =
    773         Int32Sub(Int32Constant(Register(0).ToOperand()), index);
    774     StoreRegister(value, ChangeInt32ToIntPtr(reg_index));
    775 
    776     StoreFixedArrayElement(array, index, StaleRegisterConstant());
    777 
    778     var_index.Bind(Int32Add(index, Int32Constant(1)));
    779     Goto(&loop);
    780   }
    781   Bind(&done_loop);
    782 
    783   return array;
    784 }
    785 
    786 }  // namespace interpreter
    787 }  // namespace internal
    788 }  // namespace v8
    789