Home | History | Annotate | Download | only in compiler
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/compiler/instruction.h"
      6 
      7 #include <iomanip>
      8 
      9 #include "src/compiler/common-operator.h"
     10 #include "src/compiler/graph.h"
     11 #include "src/compiler/schedule.h"
     12 #include "src/compiler/state-values-utils.h"
     13 #include "src/source-position.h"
     14 
     15 namespace v8 {
     16 namespace internal {
     17 namespace compiler {
     18 
     19 const RegisterConfiguration* (*GetRegConfig)() = RegisterConfiguration::Default;
     20 
     21 FlagsCondition CommuteFlagsCondition(FlagsCondition condition) {
     22   switch (condition) {
     23     case kSignedLessThan:
     24       return kSignedGreaterThan;
     25     case kSignedGreaterThanOrEqual:
     26       return kSignedLessThanOrEqual;
     27     case kSignedLessThanOrEqual:
     28       return kSignedGreaterThanOrEqual;
     29     case kSignedGreaterThan:
     30       return kSignedLessThan;
     31     case kUnsignedLessThan:
     32       return kUnsignedGreaterThan;
     33     case kUnsignedGreaterThanOrEqual:
     34       return kUnsignedLessThanOrEqual;
     35     case kUnsignedLessThanOrEqual:
     36       return kUnsignedGreaterThanOrEqual;
     37     case kUnsignedGreaterThan:
     38       return kUnsignedLessThan;
     39     case kFloatLessThanOrUnordered:
     40       return kFloatGreaterThanOrUnordered;
     41     case kFloatGreaterThanOrEqual:
     42       return kFloatLessThanOrEqual;
     43     case kFloatLessThanOrEqual:
     44       return kFloatGreaterThanOrEqual;
     45     case kFloatGreaterThanOrUnordered:
     46       return kFloatLessThanOrUnordered;
     47     case kFloatLessThan:
     48       return kFloatGreaterThan;
     49     case kFloatGreaterThanOrEqualOrUnordered:
     50       return kFloatLessThanOrEqualOrUnordered;
     51     case kFloatLessThanOrEqualOrUnordered:
     52       return kFloatGreaterThanOrEqualOrUnordered;
     53     case kFloatGreaterThan:
     54       return kFloatLessThan;
     55     case kPositiveOrZero:
     56     case kNegative:
     57       UNREACHABLE();
     58       break;
     59     case kEqual:
     60     case kNotEqual:
     61     case kOverflow:
     62     case kNotOverflow:
     63     case kUnorderedEqual:
     64     case kUnorderedNotEqual:
     65       return condition;
     66   }
     67   UNREACHABLE();
     68 }
     69 
     70 bool InstructionOperand::InterferesWith(const InstructionOperand& other) const {
     71   if (kSimpleFPAliasing || !this->IsFPLocationOperand() ||
     72       !other.IsFPLocationOperand())
     73     return EqualsCanonicalized(other);
     74   // Aliasing is complex and both operands are fp locations.
     75   const LocationOperand& loc = *LocationOperand::cast(this);
     76   const LocationOperand& other_loc = LocationOperand::cast(other);
     77   LocationOperand::LocationKind kind = loc.location_kind();
     78   LocationOperand::LocationKind other_kind = other_loc.location_kind();
     79   if (kind != other_kind) return false;
     80   MachineRepresentation rep = loc.representation();
     81   MachineRepresentation other_rep = other_loc.representation();
     82   if (rep == other_rep) return EqualsCanonicalized(other);
     83   if (kind == LocationOperand::REGISTER) {
     84     // FP register-register interference.
     85     return GetRegConfig()->AreAliases(rep, loc.register_code(), other_rep,
     86                                       other_loc.register_code());
     87   } else {
     88     // FP slot-slot interference. Slots of different FP reps can alias because
     89     // the gap resolver may break a move into 2 or 4 equivalent smaller moves.
     90     DCHECK_EQ(LocationOperand::STACK_SLOT, kind);
     91     int index_hi = loc.index();
     92     int index_lo = index_hi - (1 << ElementSizeLog2Of(rep)) / kPointerSize + 1;
     93     int other_index_hi = other_loc.index();
     94     int other_index_lo =
     95         other_index_hi - (1 << ElementSizeLog2Of(other_rep)) / kPointerSize + 1;
     96     return other_index_hi >= index_lo && index_hi >= other_index_lo;
     97   }
     98   return false;
     99 }
    100 
    101 bool LocationOperand::IsCompatible(LocationOperand* op) {
    102   if (IsRegister() || IsStackSlot()) {
    103     return op->IsRegister() || op->IsStackSlot();
    104   } else if (kSimpleFPAliasing) {
    105     // A backend may choose to generate the same instruction sequence regardless
    106     // of the FP representation. As a result, we can relax the compatibility and
    107     // allow a Double to be moved in a Float for example. However, this is only
    108     // allowed if registers do not overlap.
    109     return (IsFPRegister() || IsFPStackSlot()) &&
    110            (op->IsFPRegister() || op->IsFPStackSlot());
    111   } else if (IsFloatRegister() || IsFloatStackSlot()) {
    112     return op->IsFloatRegister() || op->IsFloatStackSlot();
    113   } else if (IsDoubleRegister() || IsDoubleStackSlot()) {
    114     return op->IsDoubleRegister() || op->IsDoubleStackSlot();
    115   } else {
    116     return (IsSimd128Register() || IsSimd128StackSlot()) &&
    117            (op->IsSimd128Register() || op->IsSimd128StackSlot());
    118   }
    119 }
    120 
    121 void InstructionOperand::Print(const RegisterConfiguration* config) const {
    122   PrintableInstructionOperand wrapper;
    123   wrapper.register_configuration_ = config;
    124   wrapper.op_ = *this;
    125   StdoutStream{} << wrapper << std::endl;
    126 }
    127 
    128 void InstructionOperand::Print() const { Print(GetRegConfig()); }
    129 
    130 std::ostream& operator<<(std::ostream& os,
    131                          const PrintableInstructionOperand& printable) {
    132   const InstructionOperand& op = printable.op_;
    133   const RegisterConfiguration* conf = printable.register_configuration_;
    134   switch (op.kind()) {
    135     case InstructionOperand::UNALLOCATED: {
    136       const UnallocatedOperand* unalloc = UnallocatedOperand::cast(&op);
    137       os << "v" << unalloc->virtual_register();
    138       if (unalloc->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
    139         return os << "(=" << unalloc->fixed_slot_index() << "S)";
    140       }
    141       switch (unalloc->extended_policy()) {
    142         case UnallocatedOperand::NONE:
    143           return os;
    144         case UnallocatedOperand::FIXED_REGISTER:
    145           return os << "(="
    146                     << conf->GetGeneralRegisterName(
    147                            unalloc->fixed_register_index())
    148                     << ")";
    149         case UnallocatedOperand::FIXED_FP_REGISTER:
    150           return os << "(="
    151                     << conf->GetDoubleRegisterName(
    152                            unalloc->fixed_register_index())
    153                     << ")";
    154         case UnallocatedOperand::MUST_HAVE_REGISTER:
    155           return os << "(R)";
    156         case UnallocatedOperand::MUST_HAVE_SLOT:
    157           return os << "(S)";
    158         case UnallocatedOperand::SAME_AS_FIRST_INPUT:
    159           return os << "(1)";
    160         case UnallocatedOperand::REGISTER_OR_SLOT:
    161           return os << "(-)";
    162         case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
    163           return os << "(*)";
    164       }
    165     }
    166     case InstructionOperand::CONSTANT:
    167       return os << "[constant:" << ConstantOperand::cast(op).virtual_register()
    168                 << "]";
    169     case InstructionOperand::IMMEDIATE: {
    170       ImmediateOperand imm = ImmediateOperand::cast(op);
    171       switch (imm.type()) {
    172         case ImmediateOperand::INLINE:
    173           return os << "#" << imm.inline_value();
    174         case ImmediateOperand::INDEXED:
    175           return os << "[immediate:" << imm.indexed_value() << "]";
    176       }
    177     }
    178     case InstructionOperand::EXPLICIT:
    179     case InstructionOperand::ALLOCATED: {
    180       LocationOperand allocated = LocationOperand::cast(op);
    181       if (op.IsStackSlot()) {
    182         os << "[stack:" << allocated.index();
    183       } else if (op.IsFPStackSlot()) {
    184         os << "[fp_stack:" << allocated.index();
    185       } else if (op.IsRegister()) {
    186         os << "["
    187            << GetRegConfig()->GetGeneralOrSpecialRegisterName(
    188                   allocated.register_code())
    189            << "|R";
    190       } else if (op.IsDoubleRegister()) {
    191         os << "["
    192            << GetRegConfig()->GetDoubleRegisterName(allocated.register_code())
    193            << "|R";
    194       } else if (op.IsFloatRegister()) {
    195         os << "["
    196            << GetRegConfig()->GetFloatRegisterName(allocated.register_code())
    197            << "|R";
    198       } else {
    199         DCHECK(op.IsSimd128Register());
    200         os << "["
    201            << GetRegConfig()->GetSimd128RegisterName(allocated.register_code())
    202            << "|R";
    203       }
    204       if (allocated.IsExplicit()) {
    205         os << "|E";
    206       }
    207       switch (allocated.representation()) {
    208         case MachineRepresentation::kNone:
    209           os << "|-";
    210           break;
    211         case MachineRepresentation::kBit:
    212           os << "|b";
    213           break;
    214         case MachineRepresentation::kWord8:
    215           os << "|w8";
    216           break;
    217         case MachineRepresentation::kWord16:
    218           os << "|w16";
    219           break;
    220         case MachineRepresentation::kWord32:
    221           os << "|w32";
    222           break;
    223         case MachineRepresentation::kWord64:
    224           os << "|w64";
    225           break;
    226         case MachineRepresentation::kFloat32:
    227           os << "|f32";
    228           break;
    229         case MachineRepresentation::kFloat64:
    230           os << "|f64";
    231           break;
    232         case MachineRepresentation::kSimd128:
    233           os << "|s128";
    234           break;
    235         case MachineRepresentation::kTaggedSigned:
    236           os << "|ts";
    237           break;
    238         case MachineRepresentation::kTaggedPointer:
    239           os << "|tp";
    240           break;
    241         case MachineRepresentation::kTagged:
    242           os << "|t";
    243           break;
    244       }
    245       return os << "]";
    246     }
    247     case InstructionOperand::INVALID:
    248       return os << "(x)";
    249   }
    250   UNREACHABLE();
    251 }
    252 
    253 void MoveOperands::Print(const RegisterConfiguration* config) const {
    254   StdoutStream os;
    255   PrintableInstructionOperand wrapper;
    256   wrapper.register_configuration_ = config;
    257   wrapper.op_ = destination();
    258   os << wrapper << " = ";
    259   wrapper.op_ = source();
    260   os << wrapper << std::endl;
    261 }
    262 
    263 void MoveOperands::Print() const { Print(GetRegConfig()); }
    264 
    265 std::ostream& operator<<(std::ostream& os,
    266                          const PrintableMoveOperands& printable) {
    267   const MoveOperands& mo = *printable.move_operands_;
    268   PrintableInstructionOperand printable_op = {printable.register_configuration_,
    269                                               mo.destination()};
    270   os << printable_op;
    271   if (!mo.source().Equals(mo.destination())) {
    272     printable_op.op_ = mo.source();
    273     os << " = " << printable_op;
    274   }
    275   return os << ";";
    276 }
    277 
    278 
    279 bool ParallelMove::IsRedundant() const {
    280   for (MoveOperands* move : *this) {
    281     if (!move->IsRedundant()) return false;
    282   }
    283   return true;
    284 }
    285 
    286 void ParallelMove::PrepareInsertAfter(
    287     MoveOperands* move, ZoneVector<MoveOperands*>* to_eliminate) const {
    288   bool no_aliasing =
    289       kSimpleFPAliasing || !move->destination().IsFPLocationOperand();
    290   MoveOperands* replacement = nullptr;
    291   MoveOperands* eliminated = nullptr;
    292   for (MoveOperands* curr : *this) {
    293     if (curr->IsEliminated()) continue;
    294     if (curr->destination().EqualsCanonicalized(move->source())) {
    295       // We must replace move's source with curr's destination in order to
    296       // insert it into this ParallelMove.
    297       DCHECK(!replacement);
    298       replacement = curr;
    299       if (no_aliasing && eliminated != nullptr) break;
    300     } else if (curr->destination().InterferesWith(move->destination())) {
    301       // We can eliminate curr, since move overwrites at least a part of its
    302       // destination, implying its value is no longer live.
    303       eliminated = curr;
    304       to_eliminate->push_back(curr);
    305       if (no_aliasing && replacement != nullptr) break;
    306     }
    307   }
    308   if (replacement != nullptr) move->set_source(replacement->source());
    309 }
    310 
    311 ExplicitOperand::ExplicitOperand(LocationKind kind, MachineRepresentation rep,
    312                                  int index)
    313     : LocationOperand(EXPLICIT, kind, rep, index) {
    314   DCHECK_IMPLIES(kind == REGISTER && !IsFloatingPoint(rep),
    315                  GetRegConfig()->IsAllocatableGeneralCode(index));
    316   DCHECK_IMPLIES(kind == REGISTER && rep == MachineRepresentation::kFloat32,
    317                  GetRegConfig()->IsAllocatableFloatCode(index));
    318   DCHECK_IMPLIES(kind == REGISTER && (rep == MachineRepresentation::kFloat64),
    319                  GetRegConfig()->IsAllocatableDoubleCode(index));
    320 }
    321 
    322 Instruction::Instruction(InstructionCode opcode)
    323     : opcode_(opcode),
    324       bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) |
    325                  TempCountField::encode(0) | IsCallField::encode(false)),
    326       reference_map_(nullptr),
    327       block_(nullptr) {
    328   parallel_moves_[0] = nullptr;
    329   parallel_moves_[1] = nullptr;
    330 }
    331 
    332 Instruction::Instruction(InstructionCode opcode, size_t output_count,
    333                          InstructionOperand* outputs, size_t input_count,
    334                          InstructionOperand* inputs, size_t temp_count,
    335                          InstructionOperand* temps)
    336     : opcode_(opcode),
    337       bit_field_(OutputCountField::encode(output_count) |
    338                  InputCountField::encode(input_count) |
    339                  TempCountField::encode(temp_count) |
    340                  IsCallField::encode(false)),
    341       reference_map_(nullptr),
    342       block_(nullptr) {
    343   parallel_moves_[0] = nullptr;
    344   parallel_moves_[1] = nullptr;
    345   size_t offset = 0;
    346   for (size_t i = 0; i < output_count; ++i) {
    347     DCHECK(!outputs[i].IsInvalid());
    348     operands_[offset++] = outputs[i];
    349   }
    350   for (size_t i = 0; i < input_count; ++i) {
    351     DCHECK(!inputs[i].IsInvalid());
    352     operands_[offset++] = inputs[i];
    353   }
    354   for (size_t i = 0; i < temp_count; ++i) {
    355     DCHECK(!temps[i].IsInvalid());
    356     operands_[offset++] = temps[i];
    357   }
    358 }
    359 
    360 
    361 bool Instruction::AreMovesRedundant() const {
    362   for (int i = Instruction::FIRST_GAP_POSITION;
    363        i <= Instruction::LAST_GAP_POSITION; i++) {
    364     if (parallel_moves_[i] != nullptr && !parallel_moves_[i]->IsRedundant()) {
    365       return false;
    366     }
    367   }
    368   return true;
    369 }
    370 
    371 void Instruction::Print(const RegisterConfiguration* config) const {
    372   PrintableInstruction wrapper;
    373   wrapper.instr_ = this;
    374   wrapper.register_configuration_ = config;
    375   StdoutStream{} << wrapper << std::endl;
    376 }
    377 
    378 void Instruction::Print() const { Print(GetRegConfig()); }
    379 
    380 std::ostream& operator<<(std::ostream& os,
    381                          const PrintableParallelMove& printable) {
    382   const ParallelMove& pm = *printable.parallel_move_;
    383   bool first = true;
    384   for (MoveOperands* move : pm) {
    385     if (move->IsEliminated()) continue;
    386     if (!first) os << " ";
    387     first = false;
    388     PrintableMoveOperands pmo = {printable.register_configuration_, move};
    389     os << pmo;
    390   }
    391   return os;
    392 }
    393 
    394 
    395 void ReferenceMap::RecordReference(const AllocatedOperand& op) {
    396   // Do not record arguments as pointers.
    397   if (op.IsStackSlot() && LocationOperand::cast(op).index() < 0) return;
    398   DCHECK(!op.IsFPRegister() && !op.IsFPStackSlot());
    399   reference_operands_.push_back(op);
    400 }
    401 
    402 
    403 std::ostream& operator<<(std::ostream& os, const ReferenceMap& pm) {
    404   os << "{";
    405   bool first = true;
    406   PrintableInstructionOperand poi = {GetRegConfig(), InstructionOperand()};
    407   for (const InstructionOperand& op : pm.reference_operands_) {
    408     if (!first) {
    409       os << ";";
    410     } else {
    411       first = false;
    412     }
    413     poi.op_ = op;
    414     os << poi;
    415   }
    416   return os << "}";
    417 }
    418 
    419 
    420 std::ostream& operator<<(std::ostream& os, const ArchOpcode& ao) {
    421   switch (ao) {
    422 #define CASE(Name) \
    423   case k##Name:    \
    424     return os << #Name;
    425     ARCH_OPCODE_LIST(CASE)
    426 #undef CASE
    427   }
    428   UNREACHABLE();
    429 }
    430 
    431 
    432 std::ostream& operator<<(std::ostream& os, const AddressingMode& am) {
    433   switch (am) {
    434     case kMode_None:
    435       return os;
    436 #define CASE(Name)   \
    437   case kMode_##Name: \
    438     return os << #Name;
    439       TARGET_ADDRESSING_MODE_LIST(CASE)
    440 #undef CASE
    441   }
    442   UNREACHABLE();
    443 }
    444 
    445 
    446 std::ostream& operator<<(std::ostream& os, const FlagsMode& fm) {
    447   switch (fm) {
    448     case kFlags_none:
    449       return os;
    450     case kFlags_branch:
    451       return os << "branch";
    452     case kFlags_branch_and_poison:
    453       return os << "branch_and_poison";
    454     case kFlags_deoptimize:
    455       return os << "deoptimize";
    456     case kFlags_deoptimize_and_poison:
    457       return os << "deoptimize_and_poison";
    458     case kFlags_set:
    459       return os << "set";
    460     case kFlags_trap:
    461       return os << "trap";
    462   }
    463   UNREACHABLE();
    464 }
    465 
    466 
    467 std::ostream& operator<<(std::ostream& os, const FlagsCondition& fc) {
    468   switch (fc) {
    469     case kEqual:
    470       return os << "equal";
    471     case kNotEqual:
    472       return os << "not equal";
    473     case kSignedLessThan:
    474       return os << "signed less than";
    475     case kSignedGreaterThanOrEqual:
    476       return os << "signed greater than or equal";
    477     case kSignedLessThanOrEqual:
    478       return os << "signed less than or equal";
    479     case kSignedGreaterThan:
    480       return os << "signed greater than";
    481     case kUnsignedLessThan:
    482       return os << "unsigned less than";
    483     case kUnsignedGreaterThanOrEqual:
    484       return os << "unsigned greater than or equal";
    485     case kUnsignedLessThanOrEqual:
    486       return os << "unsigned less than or equal";
    487     case kUnsignedGreaterThan:
    488       return os << "unsigned greater than";
    489     case kFloatLessThanOrUnordered:
    490       return os << "less than or unordered (FP)";
    491     case kFloatGreaterThanOrEqual:
    492       return os << "greater than or equal (FP)";
    493     case kFloatLessThanOrEqual:
    494       return os << "less than or equal (FP)";
    495     case kFloatGreaterThanOrUnordered:
    496       return os << "greater than or unordered (FP)";
    497     case kFloatLessThan:
    498       return os << "less than (FP)";
    499     case kFloatGreaterThanOrEqualOrUnordered:
    500       return os << "greater than, equal or unordered (FP)";
    501     case kFloatLessThanOrEqualOrUnordered:
    502       return os << "less than, equal or unordered (FP)";
    503     case kFloatGreaterThan:
    504       return os << "greater than (FP)";
    505     case kUnorderedEqual:
    506       return os << "unordered equal";
    507     case kUnorderedNotEqual:
    508       return os << "unordered not equal";
    509     case kOverflow:
    510       return os << "overflow";
    511     case kNotOverflow:
    512       return os << "not overflow";
    513     case kPositiveOrZero:
    514       return os << "positive or zero";
    515     case kNegative:
    516       return os << "negative";
    517   }
    518   UNREACHABLE();
    519 }
    520 
    521 
    522 std::ostream& operator<<(std::ostream& os,
    523                          const PrintableInstruction& printable) {
    524   const Instruction& instr = *printable.instr_;
    525   PrintableInstructionOperand printable_op = {printable.register_configuration_,
    526                                               InstructionOperand()};
    527   os << "gap ";
    528   for (int i = Instruction::FIRST_GAP_POSITION;
    529        i <= Instruction::LAST_GAP_POSITION; i++) {
    530     os << "(";
    531     if (instr.parallel_moves()[i] != nullptr) {
    532       PrintableParallelMove ppm = {printable.register_configuration_,
    533                                    instr.parallel_moves()[i]};
    534       os << ppm;
    535     }
    536     os << ") ";
    537   }
    538   os << "\n          ";
    539 
    540   if (instr.OutputCount() > 1) os << "(";
    541   for (size_t i = 0; i < instr.OutputCount(); i++) {
    542     if (i > 0) os << ", ";
    543     printable_op.op_ = *instr.OutputAt(i);
    544     os << printable_op;
    545   }
    546 
    547   if (instr.OutputCount() > 1) os << ") = ";
    548   if (instr.OutputCount() == 1) os << " = ";
    549 
    550   os << ArchOpcodeField::decode(instr.opcode());
    551   AddressingMode am = AddressingModeField::decode(instr.opcode());
    552   if (am != kMode_None) {
    553     os << " : " << AddressingModeField::decode(instr.opcode());
    554   }
    555   FlagsMode fm = FlagsModeField::decode(instr.opcode());
    556   if (fm != kFlags_none) {
    557     os << " && " << fm << " if " << FlagsConditionField::decode(instr.opcode());
    558   }
    559   if (instr.InputCount() > 0) {
    560     for (size_t i = 0; i < instr.InputCount(); i++) {
    561       printable_op.op_ = *instr.InputAt(i);
    562       os << " " << printable_op;
    563     }
    564   }
    565   return os;
    566 }
    567 
    568 
    569 Constant::Constant(int32_t v) : type_(kInt32), value_(v) {}
    570 
    571 Constant::Constant(RelocatablePtrConstantInfo info) {
    572   if (info.type() == RelocatablePtrConstantInfo::kInt32) {
    573     type_ = kInt32;
    574   } else if (info.type() == RelocatablePtrConstantInfo::kInt64) {
    575     type_ = kInt64;
    576   } else {
    577     UNREACHABLE();
    578   }
    579   value_ = info.value();
    580   rmode_ = info.rmode();
    581 }
    582 
    583 Handle<HeapObject> Constant::ToHeapObject() const {
    584   DCHECK_EQ(kHeapObject, type());
    585   Handle<HeapObject> value(
    586       bit_cast<HeapObject**>(static_cast<intptr_t>(value_)));
    587   return value;
    588 }
    589 
    590 Handle<Code> Constant::ToCode() const {
    591   DCHECK_EQ(kHeapObject, type());
    592   Handle<Code> value(bit_cast<Code**>(static_cast<intptr_t>(value_)));
    593   return value;
    594 }
    595 
    596 std::ostream& operator<<(std::ostream& os, const Constant& constant) {
    597   switch (constant.type()) {
    598     case Constant::kInt32:
    599       return os << constant.ToInt32();
    600     case Constant::kInt64:
    601       return os << constant.ToInt64() << "l";
    602     case Constant::kFloat32:
    603       return os << constant.ToFloat32() << "f";
    604     case Constant::kFloat64:
    605       return os << constant.ToFloat64().value();
    606     case Constant::kExternalReference:
    607       return os << constant.ToExternalReference().address();
    608     case Constant::kHeapObject:
    609       return os << Brief(*constant.ToHeapObject());
    610     case Constant::kRpoNumber:
    611       return os << "RPO" << constant.ToRpoNumber().ToInt();
    612   }
    613   UNREACHABLE();
    614 }
    615 
    616 
    617 PhiInstruction::PhiInstruction(Zone* zone, int virtual_register,
    618                                size_t input_count)
    619     : virtual_register_(virtual_register),
    620       output_(UnallocatedOperand(UnallocatedOperand::NONE, virtual_register)),
    621       operands_(input_count, InstructionOperand::kInvalidVirtualRegister,
    622                 zone) {}
    623 
    624 
    625 void PhiInstruction::SetInput(size_t offset, int virtual_register) {
    626   DCHECK_EQ(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
    627   operands_[offset] = virtual_register;
    628 }
    629 
    630 void PhiInstruction::RenameInput(size_t offset, int virtual_register) {
    631   DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
    632   operands_[offset] = virtual_register;
    633 }
    634 
    635 InstructionBlock::InstructionBlock(Zone* zone, RpoNumber rpo_number,
    636                                    RpoNumber loop_header, RpoNumber loop_end,
    637                                    bool deferred, bool handler)
    638     : successors_(zone),
    639       predecessors_(zone),
    640       phis_(zone),
    641       ao_number_(rpo_number),
    642       rpo_number_(rpo_number),
    643       loop_header_(loop_header),
    644       loop_end_(loop_end),
    645       code_start_(-1),
    646       code_end_(-1),
    647       deferred_(deferred),
    648       handler_(handler),
    649       needs_frame_(false),
    650       must_construct_frame_(false),
    651       must_deconstruct_frame_(false) {}
    652 
    653 size_t InstructionBlock::PredecessorIndexOf(RpoNumber rpo_number) const {
    654   size_t j = 0;
    655   for (InstructionBlock::Predecessors::const_iterator i = predecessors_.begin();
    656        i != predecessors_.end(); ++i, ++j) {
    657     if (*i == rpo_number) break;
    658   }
    659   return j;
    660 }
    661 
    662 
    663 static RpoNumber GetRpo(const BasicBlock* block) {
    664   if (block == nullptr) return RpoNumber::Invalid();
    665   return RpoNumber::FromInt(block->rpo_number());
    666 }
    667 
    668 
    669 static RpoNumber GetLoopEndRpo(const BasicBlock* block) {
    670   if (!block->IsLoopHeader()) return RpoNumber::Invalid();
    671   return RpoNumber::FromInt(block->loop_end()->rpo_number());
    672 }
    673 
    674 
    675 static InstructionBlock* InstructionBlockFor(Zone* zone,
    676                                              const BasicBlock* block) {
    677   bool is_handler =
    678       !block->empty() && block->front()->opcode() == IrOpcode::kIfException;
    679   InstructionBlock* instr_block = new (zone)
    680       InstructionBlock(zone, GetRpo(block), GetRpo(block->loop_header()),
    681                        GetLoopEndRpo(block), block->deferred(), is_handler);
    682   // Map successors and precessors
    683   instr_block->successors().reserve(block->SuccessorCount());
    684   for (BasicBlock* successor : block->successors()) {
    685     instr_block->successors().push_back(GetRpo(successor));
    686   }
    687   instr_block->predecessors().reserve(block->PredecessorCount());
    688   for (BasicBlock* predecessor : block->predecessors()) {
    689     instr_block->predecessors().push_back(GetRpo(predecessor));
    690   }
    691   return instr_block;
    692 }
    693 
    694 std::ostream& operator<<(std::ostream& os,
    695                          const PrintableInstructionBlock& printable_block) {
    696   const InstructionBlock* block = printable_block.block_;
    697   const RegisterConfiguration* config = printable_block.register_configuration_;
    698   const InstructionSequence* code = printable_block.code_;
    699 
    700   os << "B" << block->rpo_number();
    701   os << ": AO#" << block->ao_number();
    702   if (block->IsDeferred()) os << " (deferred)";
    703   if (!block->needs_frame()) os << " (no frame)";
    704   if (block->must_construct_frame()) os << " (construct frame)";
    705   if (block->must_deconstruct_frame()) os << " (deconstruct frame)";
    706   if (block->IsLoopHeader()) {
    707     os << " loop blocks: [" << block->rpo_number() << ", " << block->loop_end()
    708        << ")";
    709   }
    710   os << "  instructions: [" << block->code_start() << ", " << block->code_end()
    711      << ")" << std::endl
    712      << " predecessors:";
    713 
    714   for (RpoNumber pred : block->predecessors()) {
    715     os << " B" << pred.ToInt();
    716   }
    717   os << std::endl;
    718 
    719   for (const PhiInstruction* phi : block->phis()) {
    720     PrintableInstructionOperand printable_op = {config, phi->output()};
    721     os << "     phi: " << printable_op << " =";
    722     for (int input : phi->operands()) {
    723       os << " v" << input;
    724     }
    725     os << std::endl;
    726   }
    727 
    728   PrintableInstruction printable_instr;
    729   printable_instr.register_configuration_ = config;
    730   for (int j = block->first_instruction_index();
    731        j <= block->last_instruction_index(); j++) {
    732     printable_instr.instr_ = code->InstructionAt(j);
    733     os << "   " << std::setw(5) << j << ": " << printable_instr << std::endl;
    734   }
    735 
    736   os << " successors:";
    737   for (RpoNumber succ : block->successors()) {
    738     os << " B" << succ.ToInt();
    739   }
    740   os << std::endl;
    741   return os;
    742 }
    743 
    744 InstructionBlocks* InstructionSequence::InstructionBlocksFor(
    745     Zone* zone, const Schedule* schedule) {
    746   InstructionBlocks* blocks = zone->NewArray<InstructionBlocks>(1);
    747   new (blocks) InstructionBlocks(
    748       static_cast<int>(schedule->rpo_order()->size()), nullptr, zone);
    749   size_t rpo_number = 0;
    750   for (BasicBlockVector::const_iterator it = schedule->rpo_order()->begin();
    751        it != schedule->rpo_order()->end(); ++it, ++rpo_number) {
    752     DCHECK(!(*blocks)[rpo_number]);
    753     DCHECK(GetRpo(*it).ToSize() == rpo_number);
    754     (*blocks)[rpo_number] = InstructionBlockFor(zone, *it);
    755   }
    756   ComputeAssemblyOrder(blocks);
    757   return blocks;
    758 }
    759 
    760 void InstructionSequence::ValidateEdgeSplitForm() const {
    761   // Validate blocks are in edge-split form: no block with multiple successors
    762   // has an edge to a block (== a successor) with more than one predecessors.
    763   for (const InstructionBlock* block : instruction_blocks()) {
    764     if (block->SuccessorCount() > 1) {
    765       for (const RpoNumber& successor_id : block->successors()) {
    766         const InstructionBlock* successor = InstructionBlockAt(successor_id);
    767         // Expect precisely one predecessor: "block".
    768         CHECK(successor->PredecessorCount() == 1 &&
    769               successor->predecessors()[0] == block->rpo_number());
    770       }
    771     }
    772   }
    773 }
    774 
    775 void InstructionSequence::ValidateDeferredBlockExitPaths() const {
    776   // A deferred block with more than one successor must have all its successors
    777   // deferred.
    778   for (const InstructionBlock* block : instruction_blocks()) {
    779     if (!block->IsDeferred() || block->SuccessorCount() <= 1) continue;
    780     for (RpoNumber successor_id : block->successors()) {
    781       CHECK(InstructionBlockAt(successor_id)->IsDeferred());
    782     }
    783   }
    784 }
    785 
    786 void InstructionSequence::ValidateDeferredBlockEntryPaths() const {
    787   // If a deferred block has multiple predecessors, they have to
    788   // all be deferred. Otherwise, we can run into a situation where a range
    789   // that spills only in deferred blocks inserts its spill in the block, but
    790   // other ranges need moves inserted by ResolveControlFlow in the predecessors,
    791   // which may clobber the register of this range.
    792   for (const InstructionBlock* block : instruction_blocks()) {
    793     if (!block->IsDeferred() || block->PredecessorCount() <= 1) continue;
    794     for (RpoNumber predecessor_id : block->predecessors()) {
    795       CHECK(InstructionBlockAt(predecessor_id)->IsDeferred());
    796     }
    797   }
    798 }
    799 
    800 void InstructionSequence::ValidateSSA() const {
    801   // TODO(mtrofin): We could use a local zone here instead.
    802   BitVector definitions(VirtualRegisterCount(), zone());
    803   for (const Instruction* instruction : *this) {
    804     for (size_t i = 0; i < instruction->OutputCount(); ++i) {
    805       const InstructionOperand* output = instruction->OutputAt(i);
    806       int vreg = (output->IsConstant())
    807                      ? ConstantOperand::cast(output)->virtual_register()
    808                      : UnallocatedOperand::cast(output)->virtual_register();
    809       CHECK(!definitions.Contains(vreg));
    810       definitions.Add(vreg);
    811     }
    812   }
    813 }
    814 
    815 void InstructionSequence::ComputeAssemblyOrder(InstructionBlocks* blocks) {
    816   int ao = 0;
    817   for (InstructionBlock* const block : *blocks) {
    818     if (!block->IsDeferred()) {
    819       block->set_ao_number(RpoNumber::FromInt(ao++));
    820     }
    821   }
    822   for (InstructionBlock* const block : *blocks) {
    823     if (block->IsDeferred()) {
    824       block->set_ao_number(RpoNumber::FromInt(ao++));
    825     }
    826   }
    827 }
    828 
    829 InstructionSequence::InstructionSequence(Isolate* isolate,
    830                                          Zone* instruction_zone,
    831                                          InstructionBlocks* instruction_blocks)
    832     : isolate_(isolate),
    833       zone_(instruction_zone),
    834       instruction_blocks_(instruction_blocks),
    835       source_positions_(zone()),
    836       constants_(ConstantMap::key_compare(),
    837                  ConstantMap::allocator_type(zone())),
    838       immediates_(zone()),
    839       instructions_(zone()),
    840       next_virtual_register_(0),
    841       reference_maps_(zone()),
    842       representations_(zone()),
    843       representation_mask_(0),
    844       deoptimization_entries_(zone()),
    845       current_block_(nullptr) {}
    846 
    847 int InstructionSequence::NextVirtualRegister() {
    848   int virtual_register = next_virtual_register_++;
    849   CHECK_NE(virtual_register, InstructionOperand::kInvalidVirtualRegister);
    850   return virtual_register;
    851 }
    852 
    853 
    854 Instruction* InstructionSequence::GetBlockStart(RpoNumber rpo) const {
    855   const InstructionBlock* block = InstructionBlockAt(rpo);
    856   return InstructionAt(block->code_start());
    857 }
    858 
    859 
    860 void InstructionSequence::StartBlock(RpoNumber rpo) {
    861   DCHECK_NULL(current_block_);
    862   current_block_ = InstructionBlockAt(rpo);
    863   int code_start = static_cast<int>(instructions_.size());
    864   current_block_->set_code_start(code_start);
    865 }
    866 
    867 
    868 void InstructionSequence::EndBlock(RpoNumber rpo) {
    869   int end = static_cast<int>(instructions_.size());
    870   DCHECK_EQ(current_block_->rpo_number(), rpo);
    871   CHECK(current_block_->code_start() >= 0 &&
    872         current_block_->code_start() < end);
    873   current_block_->set_code_end(end);
    874   current_block_ = nullptr;
    875 }
    876 
    877 
    878 int InstructionSequence::AddInstruction(Instruction* instr) {
    879   DCHECK_NOT_NULL(current_block_);
    880   int index = static_cast<int>(instructions_.size());
    881   instr->set_block(current_block_);
    882   instructions_.push_back(instr);
    883   if (instr->NeedsReferenceMap()) {
    884     DCHECK_NULL(instr->reference_map());
    885     ReferenceMap* reference_map = new (zone()) ReferenceMap(zone());
    886     reference_map->set_instruction_position(index);
    887     instr->set_reference_map(reference_map);
    888     reference_maps_.push_back(reference_map);
    889   }
    890   return index;
    891 }
    892 
    893 
    894 InstructionBlock* InstructionSequence::GetInstructionBlock(
    895     int instruction_index) const {
    896   return instructions()[instruction_index]->block();
    897 }
    898 
    899 
    900 static MachineRepresentation FilterRepresentation(MachineRepresentation rep) {
    901   switch (rep) {
    902     case MachineRepresentation::kBit:
    903     case MachineRepresentation::kWord8:
    904     case MachineRepresentation::kWord16:
    905       return InstructionSequence::DefaultRepresentation();
    906     case MachineRepresentation::kWord32:
    907     case MachineRepresentation::kWord64:
    908     case MachineRepresentation::kTaggedSigned:
    909     case MachineRepresentation::kTaggedPointer:
    910     case MachineRepresentation::kTagged:
    911     case MachineRepresentation::kFloat32:
    912     case MachineRepresentation::kFloat64:
    913     case MachineRepresentation::kSimd128:
    914       return rep;
    915     case MachineRepresentation::kNone:
    916       break;
    917   }
    918 
    919   UNREACHABLE();
    920 }
    921 
    922 
    923 MachineRepresentation InstructionSequence::GetRepresentation(
    924     int virtual_register) const {
    925   DCHECK_LE(0, virtual_register);
    926   DCHECK_LT(virtual_register, VirtualRegisterCount());
    927   if (virtual_register >= static_cast<int>(representations_.size())) {
    928     return DefaultRepresentation();
    929   }
    930   return representations_[virtual_register];
    931 }
    932 
    933 
    934 void InstructionSequence::MarkAsRepresentation(MachineRepresentation rep,
    935                                                int virtual_register) {
    936   DCHECK_LE(0, virtual_register);
    937   DCHECK_LT(virtual_register, VirtualRegisterCount());
    938   if (virtual_register >= static_cast<int>(representations_.size())) {
    939     representations_.resize(VirtualRegisterCount(), DefaultRepresentation());
    940   }
    941   rep = FilterRepresentation(rep);
    942   DCHECK_IMPLIES(representations_[virtual_register] != rep,
    943                  representations_[virtual_register] == DefaultRepresentation());
    944   representations_[virtual_register] = rep;
    945   representation_mask_ |= 1 << static_cast<int>(rep);
    946 }
    947 
    948 int InstructionSequence::AddDeoptimizationEntry(
    949     FrameStateDescriptor* descriptor, DeoptimizeKind kind,
    950     DeoptimizeReason reason, VectorSlotPair const& feedback) {
    951   int deoptimization_id = static_cast<int>(deoptimization_entries_.size());
    952   deoptimization_entries_.push_back(
    953       DeoptimizationEntry(descriptor, kind, reason, feedback));
    954   return deoptimization_id;
    955 }
    956 
    957 DeoptimizationEntry const& InstructionSequence::GetDeoptimizationEntry(
    958     int state_id) {
    959   return deoptimization_entries_[state_id];
    960 }
    961 
    962 
    963 RpoNumber InstructionSequence::InputRpo(Instruction* instr, size_t index) {
    964   InstructionOperand* operand = instr->InputAt(index);
    965   Constant constant =
    966       operand->IsImmediate()
    967           ? GetImmediate(ImmediateOperand::cast(operand))
    968           : GetConstant(ConstantOperand::cast(operand)->virtual_register());
    969   return constant.ToRpoNumber();
    970 }
    971 
    972 
    973 bool InstructionSequence::GetSourcePosition(const Instruction* instr,
    974                                             SourcePosition* result) const {
    975   auto it = source_positions_.find(instr);
    976   if (it == source_positions_.end()) return false;
    977   *result = it->second;
    978   return true;
    979 }
    980 
    981 
    982 void InstructionSequence::SetSourcePosition(const Instruction* instr,
    983                                             SourcePosition value) {
    984   source_positions_.insert(std::make_pair(instr, value));
    985 }
    986 
    987 void InstructionSequence::Print(const RegisterConfiguration* config) const {
    988   PrintableInstructionSequence wrapper;
    989   wrapper.register_configuration_ = config;
    990   wrapper.sequence_ = this;
    991   StdoutStream{} << wrapper << std::endl;
    992 }
    993 
    994 void InstructionSequence::Print() const { Print(GetRegConfig()); }
    995 
    996 void InstructionSequence::PrintBlock(const RegisterConfiguration* config,
    997                                      int block_id) const {
    998   RpoNumber rpo = RpoNumber::FromInt(block_id);
    999   const InstructionBlock* block = InstructionBlockAt(rpo);
   1000   CHECK(block->rpo_number() == rpo);
   1001   PrintableInstructionBlock printable_block = {config, block, this};
   1002   StdoutStream{} << printable_block << std::endl;
   1003 }
   1004 
   1005 void InstructionSequence::PrintBlock(int block_id) const {
   1006   PrintBlock(GetRegConfig(), block_id);
   1007 }
   1008 
   1009 const RegisterConfiguration*
   1010     InstructionSequence::registerConfigurationForTesting_ = nullptr;
   1011 
   1012 const RegisterConfiguration*
   1013 InstructionSequence::RegisterConfigurationForTesting() {
   1014   DCHECK_NOT_NULL(registerConfigurationForTesting_);
   1015   return registerConfigurationForTesting_;
   1016 }
   1017 
   1018 void InstructionSequence::SetRegisterConfigurationForTesting(
   1019     const RegisterConfiguration* regConfig) {
   1020   registerConfigurationForTesting_ = regConfig;
   1021   GetRegConfig = InstructionSequence::RegisterConfigurationForTesting;
   1022 }
   1023 
   1024 FrameStateDescriptor::FrameStateDescriptor(
   1025     Zone* zone, FrameStateType type, BailoutId bailout_id,
   1026     OutputFrameStateCombine state_combine, size_t parameters_count,
   1027     size_t locals_count, size_t stack_count,
   1028     MaybeHandle<SharedFunctionInfo> shared_info,
   1029     FrameStateDescriptor* outer_state)
   1030     : type_(type),
   1031       bailout_id_(bailout_id),
   1032       frame_state_combine_(state_combine),
   1033       parameters_count_(parameters_count),
   1034       locals_count_(locals_count),
   1035       stack_count_(stack_count),
   1036       values_(zone),
   1037       shared_info_(shared_info),
   1038       outer_state_(outer_state) {}
   1039 
   1040 size_t FrameStateDescriptor::GetSize() const {
   1041   return 1 + parameters_count() + locals_count() + stack_count() +
   1042          (HasContext() ? 1 : 0);
   1043 }
   1044 
   1045 
   1046 size_t FrameStateDescriptor::GetTotalSize() const {
   1047   size_t total_size = 0;
   1048   for (const FrameStateDescriptor* iter = this; iter != nullptr;
   1049        iter = iter->outer_state_) {
   1050     total_size += iter->GetSize();
   1051   }
   1052   return total_size;
   1053 }
   1054 
   1055 
   1056 size_t FrameStateDescriptor::GetFrameCount() const {
   1057   size_t count = 0;
   1058   for (const FrameStateDescriptor* iter = this; iter != nullptr;
   1059        iter = iter->outer_state_) {
   1060     ++count;
   1061   }
   1062   return count;
   1063 }
   1064 
   1065 
   1066 size_t FrameStateDescriptor::GetJSFrameCount() const {
   1067   size_t count = 0;
   1068   for (const FrameStateDescriptor* iter = this; iter != nullptr;
   1069        iter = iter->outer_state_) {
   1070     if (FrameStateFunctionInfo::IsJSFunctionType(iter->type_)) {
   1071       ++count;
   1072     }
   1073   }
   1074   return count;
   1075 }
   1076 
   1077 
   1078 std::ostream& operator<<(std::ostream& os, const RpoNumber& rpo) {
   1079   return os << rpo.ToSize();
   1080 }
   1081 
   1082 
   1083 std::ostream& operator<<(std::ostream& os,
   1084                          const PrintableInstructionSequence& printable) {
   1085   const InstructionSequence& code = *printable.sequence_;
   1086   for (size_t i = 0; i < code.immediates_.size(); ++i) {
   1087     Constant constant = code.immediates_[i];
   1088     os << "IMM#" << i << ": " << constant << "\n";
   1089   }
   1090   int i = 0;
   1091   for (ConstantMap::const_iterator it = code.constants_.begin();
   1092        it != code.constants_.end(); ++i, ++it) {
   1093     os << "CST#" << i << ": v" << it->first << " = " << it->second << "\n";
   1094   }
   1095   PrintableInstructionBlock printable_block = {
   1096       printable.register_configuration_, nullptr, printable.sequence_};
   1097   for (int i = 0; i < code.InstructionBlockCount(); i++) {
   1098     printable_block.block_ = code.InstructionBlockAt(RpoNumber::FromInt(i));
   1099     os << printable_block;
   1100   }
   1101   return os;
   1102 }
   1103 
   1104 }  // namespace compiler
   1105 }  // namespace internal
   1106 }  // namespace v8
   1107