Home | History | Annotate | Download | only in ppc
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/base/adapters.h"
      6 #include "src/compiler/instruction-selector-impl.h"
      7 #include "src/compiler/node-matchers.h"
      8 #include "src/compiler/node-properties.h"
      9 #include "src/ppc/frames-ppc.h"
     10 
     11 namespace v8 {
     12 namespace internal {
     13 namespace compiler {
     14 
     15 enum ImmediateMode {
     16   kInt16Imm,
     17   kInt16Imm_Unsigned,
     18   kInt16Imm_Negate,
     19   kInt16Imm_4ByteAligned,
     20   kShift32Imm,
     21   kShift64Imm,
     22   kNoImmediate
     23 };
     24 
     25 
     26 // Adds PPC-specific methods for generating operands.
     27 class PPCOperandGenerator final : public OperandGenerator {
     28  public:
     29   explicit PPCOperandGenerator(InstructionSelector* selector)
     30       : OperandGenerator(selector) {}
     31 
     32   InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
     33     if (CanBeImmediate(node, mode)) {
     34       return UseImmediate(node);
     35     }
     36     return UseRegister(node);
     37   }
     38 
     39   bool CanBeImmediate(Node* node, ImmediateMode mode) {
     40     int64_t value;
     41     if (node->opcode() == IrOpcode::kInt32Constant)
     42       value = OpParameter<int32_t>(node);
     43     else if (node->opcode() == IrOpcode::kInt64Constant)
     44       value = OpParameter<int64_t>(node);
     45     else
     46       return false;
     47     return CanBeImmediate(value, mode);
     48   }
     49 
     50   bool CanBeImmediate(int64_t value, ImmediateMode mode) {
     51     switch (mode) {
     52       case kInt16Imm:
     53         return is_int16(value);
     54       case kInt16Imm_Unsigned:
     55         return is_uint16(value);
     56       case kInt16Imm_Negate:
     57         return is_int16(-value);
     58       case kInt16Imm_4ByteAligned:
     59         return is_int16(value) && !(value & 3);
     60       case kShift32Imm:
     61         return 0 <= value && value < 32;
     62       case kShift64Imm:
     63         return 0 <= value && value < 64;
     64       case kNoImmediate:
     65         return false;
     66     }
     67     return false;
     68   }
     69 };
     70 
     71 
     72 namespace {
     73 
     74 void VisitRR(InstructionSelector* selector, InstructionCode opcode,
     75              Node* node) {
     76   PPCOperandGenerator g(selector);
     77   selector->Emit(opcode, g.DefineAsRegister(node),
     78                  g.UseRegister(node->InputAt(0)));
     79 }
     80 
     81 void VisitRRR(InstructionSelector* selector, InstructionCode opcode,
     82               Node* node) {
     83   PPCOperandGenerator g(selector);
     84   selector->Emit(opcode, g.DefineAsRegister(node),
     85                  g.UseRegister(node->InputAt(0)),
     86                  g.UseRegister(node->InputAt(1)));
     87 }
     88 
     89 void VisitRRO(InstructionSelector* selector, InstructionCode opcode, Node* node,
     90               ImmediateMode operand_mode) {
     91   PPCOperandGenerator g(selector);
     92   selector->Emit(opcode, g.DefineAsRegister(node),
     93                  g.UseRegister(node->InputAt(0)),
     94                  g.UseOperand(node->InputAt(1), operand_mode));
     95 }
     96 
     97 
     98 #if V8_TARGET_ARCH_PPC64
     99 void VisitTryTruncateDouble(InstructionSelector* selector,
    100                             InstructionCode opcode, Node* node) {
    101   PPCOperandGenerator g(selector);
    102   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
    103   InstructionOperand outputs[2];
    104   size_t output_count = 0;
    105   outputs[output_count++] = g.DefineAsRegister(node);
    106 
    107   Node* success_output = NodeProperties::FindProjection(node, 1);
    108   if (success_output) {
    109     outputs[output_count++] = g.DefineAsRegister(success_output);
    110   }
    111 
    112   selector->Emit(opcode, output_count, outputs, 1, inputs);
    113 }
    114 #endif
    115 
    116 
    117 // Shared routine for multiple binary operations.
    118 template <typename Matcher>
    119 void VisitBinop(InstructionSelector* selector, Node* node,
    120                 InstructionCode opcode, ImmediateMode operand_mode,
    121                 FlagsContinuation* cont) {
    122   PPCOperandGenerator g(selector);
    123   Matcher m(node);
    124   InstructionOperand inputs[4];
    125   size_t input_count = 0;
    126   InstructionOperand outputs[2];
    127   size_t output_count = 0;
    128 
    129   inputs[input_count++] = g.UseRegister(m.left().node());
    130   inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
    131 
    132   if (cont->IsBranch()) {
    133     inputs[input_count++] = g.Label(cont->true_block());
    134     inputs[input_count++] = g.Label(cont->false_block());
    135   }
    136 
    137   if (cont->IsDeoptimize()) {
    138     // If we can deoptimize as a result of the binop, we need to make sure that
    139     // the deopt inputs are not overwritten by the binop result. One way
    140     // to achieve that is to declare the output register as same-as-first.
    141     outputs[output_count++] = g.DefineSameAsFirst(node);
    142   } else {
    143     outputs[output_count++] = g.DefineAsRegister(node);
    144   }
    145   if (cont->IsSet()) {
    146     outputs[output_count++] = g.DefineAsRegister(cont->result());
    147   }
    148 
    149   DCHECK_NE(0u, input_count);
    150   DCHECK_NE(0u, output_count);
    151   DCHECK_GE(arraysize(inputs), input_count);
    152   DCHECK_GE(arraysize(outputs), output_count);
    153 
    154   opcode = cont->Encode(opcode);
    155   if (cont->IsDeoptimize()) {
    156     selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
    157                              cont->kind(), cont->reason(), cont->frame_state());
    158   } else if (cont->IsTrap()) {
    159     inputs[input_count++] = g.UseImmediate(cont->trap_id());
    160     selector->Emit(opcode, output_count, outputs, input_count, inputs);
    161   } else {
    162     selector->Emit(opcode, output_count, outputs, input_count, inputs);
    163   }
    164 }
    165 
    166 
    167 // Shared routine for multiple binary operations.
    168 template <typename Matcher>
    169 void VisitBinop(InstructionSelector* selector, Node* node,
    170                 InstructionCode opcode, ImmediateMode operand_mode) {
    171   FlagsContinuation cont;
    172   VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
    173 }
    174 
    175 }  // namespace
    176 
    177 
    178 void InstructionSelector::VisitLoad(Node* node) {
    179   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
    180   PPCOperandGenerator g(this);
    181   Node* base = node->InputAt(0);
    182   Node* offset = node->InputAt(1);
    183   ArchOpcode opcode = kArchNop;
    184   ImmediateMode mode = kInt16Imm;
    185   switch (load_rep.representation()) {
    186     case MachineRepresentation::kFloat32:
    187       opcode = kPPC_LoadFloat32;
    188       break;
    189     case MachineRepresentation::kFloat64:
    190       opcode = kPPC_LoadDouble;
    191       break;
    192     case MachineRepresentation::kBit:  // Fall through.
    193     case MachineRepresentation::kWord8:
    194       opcode = load_rep.IsSigned() ? kPPC_LoadWordS8 : kPPC_LoadWordU8;
    195       break;
    196     case MachineRepresentation::kWord16:
    197       opcode = load_rep.IsSigned() ? kPPC_LoadWordS16 : kPPC_LoadWordU16;
    198       break;
    199 #if !V8_TARGET_ARCH_PPC64
    200     case MachineRepresentation::kTaggedSigned:   // Fall through.
    201     case MachineRepresentation::kTaggedPointer:  // Fall through.
    202     case MachineRepresentation::kTagged:  // Fall through.
    203 #endif
    204     case MachineRepresentation::kWord32:
    205       opcode = kPPC_LoadWordU32;
    206       break;
    207 #if V8_TARGET_ARCH_PPC64
    208     case MachineRepresentation::kTaggedSigned:   // Fall through.
    209     case MachineRepresentation::kTaggedPointer:  // Fall through.
    210     case MachineRepresentation::kTagged:  // Fall through.
    211     case MachineRepresentation::kWord64:
    212       opcode = kPPC_LoadWord64;
    213       mode = kInt16Imm_4ByteAligned;
    214       break;
    215 #else
    216     case MachineRepresentation::kWord64:  // Fall through.
    217 #endif
    218     case MachineRepresentation::kSimd128:  // Fall through.
    219     case MachineRepresentation::kSimd1x4:  // Fall through.
    220     case MachineRepresentation::kSimd1x8:  // Fall through.
    221     case MachineRepresentation::kSimd1x16:  // Fall through.
    222     case MachineRepresentation::kNone:
    223       UNREACHABLE();
    224       return;
    225   }
    226   if (g.CanBeImmediate(offset, mode)) {
    227     Emit(opcode | AddressingModeField::encode(kMode_MRI),
    228          g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset));
    229   } else if (g.CanBeImmediate(base, mode)) {
    230     Emit(opcode | AddressingModeField::encode(kMode_MRI),
    231          g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base));
    232   } else {
    233     Emit(opcode | AddressingModeField::encode(kMode_MRR),
    234          g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset));
    235   }
    236 }
    237 
    238 void InstructionSelector::VisitProtectedLoad(Node* node) {
    239   // TODO(eholk)
    240   UNIMPLEMENTED();
    241 }
    242 
    243 void InstructionSelector::VisitStore(Node* node) {
    244   PPCOperandGenerator g(this);
    245   Node* base = node->InputAt(0);
    246   Node* offset = node->InputAt(1);
    247   Node* value = node->InputAt(2);
    248 
    249   StoreRepresentation store_rep = StoreRepresentationOf(node->op());
    250   WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
    251   MachineRepresentation rep = store_rep.representation();
    252 
    253   if (write_barrier_kind != kNoWriteBarrier) {
    254     DCHECK(CanBeTaggedPointer(rep));
    255     AddressingMode addressing_mode;
    256     InstructionOperand inputs[3];
    257     size_t input_count = 0;
    258     inputs[input_count++] = g.UseUniqueRegister(base);
    259     // OutOfLineRecordWrite uses the offset in an 'add' instruction as well as
    260     // for the store itself, so we must check compatibility with both.
    261     if (g.CanBeImmediate(offset, kInt16Imm)
    262 #if V8_TARGET_ARCH_PPC64
    263         && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)
    264 #endif
    265             ) {
    266       inputs[input_count++] = g.UseImmediate(offset);
    267       addressing_mode = kMode_MRI;
    268     } else {
    269       inputs[input_count++] = g.UseUniqueRegister(offset);
    270       addressing_mode = kMode_MRR;
    271     }
    272     inputs[input_count++] = g.UseUniqueRegister(value);
    273     RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
    274     switch (write_barrier_kind) {
    275       case kNoWriteBarrier:
    276         UNREACHABLE();
    277         break;
    278       case kMapWriteBarrier:
    279         record_write_mode = RecordWriteMode::kValueIsMap;
    280         break;
    281       case kPointerWriteBarrier:
    282         record_write_mode = RecordWriteMode::kValueIsPointer;
    283         break;
    284       case kFullWriteBarrier:
    285         record_write_mode = RecordWriteMode::kValueIsAny;
    286         break;
    287     }
    288     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
    289     size_t const temp_count = arraysize(temps);
    290     InstructionCode code = kArchStoreWithWriteBarrier;
    291     code |= AddressingModeField::encode(addressing_mode);
    292     code |= MiscField::encode(static_cast<int>(record_write_mode));
    293     Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
    294   } else {
    295     ArchOpcode opcode = kArchNop;
    296     ImmediateMode mode = kInt16Imm;
    297     switch (rep) {
    298       case MachineRepresentation::kFloat32:
    299         opcode = kPPC_StoreFloat32;
    300         break;
    301       case MachineRepresentation::kFloat64:
    302         opcode = kPPC_StoreDouble;
    303         break;
    304       case MachineRepresentation::kBit:  // Fall through.
    305       case MachineRepresentation::kWord8:
    306         opcode = kPPC_StoreWord8;
    307         break;
    308       case MachineRepresentation::kWord16:
    309         opcode = kPPC_StoreWord16;
    310         break;
    311 #if !V8_TARGET_ARCH_PPC64
    312       case MachineRepresentation::kTaggedSigned:   // Fall through.
    313       case MachineRepresentation::kTaggedPointer:  // Fall through.
    314       case MachineRepresentation::kTagged:  // Fall through.
    315 #endif
    316       case MachineRepresentation::kWord32:
    317         opcode = kPPC_StoreWord32;
    318         break;
    319 #if V8_TARGET_ARCH_PPC64
    320       case MachineRepresentation::kTaggedSigned:   // Fall through.
    321       case MachineRepresentation::kTaggedPointer:  // Fall through.
    322       case MachineRepresentation::kTagged:  // Fall through.
    323       case MachineRepresentation::kWord64:
    324         opcode = kPPC_StoreWord64;
    325         mode = kInt16Imm_4ByteAligned;
    326         break;
    327 #else
    328       case MachineRepresentation::kWord64:  // Fall through.
    329 #endif
    330       case MachineRepresentation::kSimd128:  // Fall through.
    331       case MachineRepresentation::kSimd1x4:  // Fall through.
    332       case MachineRepresentation::kSimd1x8:  // Fall through.
    333       case MachineRepresentation::kSimd1x16:  // Fall through.
    334       case MachineRepresentation::kNone:
    335         UNREACHABLE();
    336         return;
    337     }
    338     if (g.CanBeImmediate(offset, mode)) {
    339       Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
    340            g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value));
    341     } else if (g.CanBeImmediate(base, mode)) {
    342       Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
    343            g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value));
    344     } else {
    345       Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
    346            g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value));
    347     }
    348   }
    349 }
    350 
    351 void InstructionSelector::VisitProtectedStore(Node* node) {
    352   // TODO(eholk)
    353   UNIMPLEMENTED();
    354 }
    355 
    356 // Architecture supports unaligned access, therefore VisitLoad is used instead
    357 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
    358 
    359 // Architecture supports unaligned access, therefore VisitStore is used instead
    360 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
    361 
    362 void InstructionSelector::VisitCheckedLoad(Node* node) {
    363   CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
    364   PPCOperandGenerator g(this);
    365   Node* const base = node->InputAt(0);
    366   Node* const offset = node->InputAt(1);
    367   Node* const length = node->InputAt(2);
    368   ArchOpcode opcode = kArchNop;
    369   switch (load_rep.representation()) {
    370     case MachineRepresentation::kWord8:
    371       opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
    372       break;
    373     case MachineRepresentation::kWord16:
    374       opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
    375       break;
    376     case MachineRepresentation::kWord32:
    377       opcode = kCheckedLoadWord32;
    378       break;
    379 #if V8_TARGET_ARCH_PPC64
    380     case MachineRepresentation::kWord64:
    381       opcode = kCheckedLoadWord64;
    382       break;
    383 #endif
    384     case MachineRepresentation::kFloat32:
    385       opcode = kCheckedLoadFloat32;
    386       break;
    387     case MachineRepresentation::kFloat64:
    388       opcode = kCheckedLoadFloat64;
    389       break;
    390     case MachineRepresentation::kBit:     // Fall through.
    391     case MachineRepresentation::kTaggedSigned:   // Fall through.
    392     case MachineRepresentation::kTaggedPointer:  // Fall through.
    393     case MachineRepresentation::kTagged:  // Fall through.
    394 #if !V8_TARGET_ARCH_PPC64
    395     case MachineRepresentation::kWord64:  // Fall through.
    396 #endif
    397     case MachineRepresentation::kSimd128:  // Fall through.
    398     case MachineRepresentation::kSimd1x4:  // Fall through.
    399     case MachineRepresentation::kSimd1x8:  // Fall through.
    400     case MachineRepresentation::kSimd1x16:  // Fall through.
    401     case MachineRepresentation::kNone:
    402       UNREACHABLE();
    403       return;
    404   }
    405   AddressingMode addressingMode = kMode_MRR;
    406   Emit(opcode | AddressingModeField::encode(addressingMode),
    407        g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset),
    408        g.UseOperand(length, kInt16Imm_Unsigned));
    409 }
    410 
    411 
    412 void InstructionSelector::VisitCheckedStore(Node* node) {
    413   MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
    414   PPCOperandGenerator g(this);
    415   Node* const base = node->InputAt(0);
    416   Node* const offset = node->InputAt(1);
    417   Node* const length = node->InputAt(2);
    418   Node* const value = node->InputAt(3);
    419   ArchOpcode opcode = kArchNop;
    420   switch (rep) {
    421     case MachineRepresentation::kWord8:
    422       opcode = kCheckedStoreWord8;
    423       break;
    424     case MachineRepresentation::kWord16:
    425       opcode = kCheckedStoreWord16;
    426       break;
    427     case MachineRepresentation::kWord32:
    428       opcode = kCheckedStoreWord32;
    429       break;
    430 #if V8_TARGET_ARCH_PPC64
    431     case MachineRepresentation::kWord64:
    432       opcode = kCheckedStoreWord64;
    433       break;
    434 #endif
    435     case MachineRepresentation::kFloat32:
    436       opcode = kCheckedStoreFloat32;
    437       break;
    438     case MachineRepresentation::kFloat64:
    439       opcode = kCheckedStoreFloat64;
    440       break;
    441     case MachineRepresentation::kBit:     // Fall through.
    442     case MachineRepresentation::kTaggedSigned:   // Fall through.
    443     case MachineRepresentation::kTaggedPointer:  // Fall through.
    444     case MachineRepresentation::kTagged:  // Fall through.
    445 #if !V8_TARGET_ARCH_PPC64
    446     case MachineRepresentation::kWord64:  // Fall through.
    447 #endif
    448     case MachineRepresentation::kSimd128:  // Fall through.
    449     case MachineRepresentation::kSimd1x4:  // Fall through.
    450     case MachineRepresentation::kSimd1x8:  // Fall through.
    451     case MachineRepresentation::kSimd1x16:  // Fall through.
    452     case MachineRepresentation::kNone:
    453       UNREACHABLE();
    454       return;
    455   }
    456   AddressingMode addressingMode = kMode_MRR;
    457   Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(),
    458        g.UseRegister(base), g.UseRegister(offset),
    459        g.UseOperand(length, kInt16Imm_Unsigned), g.UseRegister(value));
    460 }
    461 
    462 
    463 template <typename Matcher>
    464 static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
    465                          ArchOpcode opcode, bool left_can_cover,
    466                          bool right_can_cover, ImmediateMode imm_mode) {
    467   PPCOperandGenerator g(selector);
    468 
    469   // Map instruction to equivalent operation with inverted right input.
    470   ArchOpcode inv_opcode = opcode;
    471   switch (opcode) {
    472     case kPPC_And:
    473       inv_opcode = kPPC_AndComplement;
    474       break;
    475     case kPPC_Or:
    476       inv_opcode = kPPC_OrComplement;
    477       break;
    478     default:
    479       UNREACHABLE();
    480   }
    481 
    482   // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
    483   if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
    484     Matcher mleft(m->left().node());
    485     if (mleft.right().Is(-1)) {
    486       selector->Emit(inv_opcode, g.DefineAsRegister(node),
    487                      g.UseRegister(m->right().node()),
    488                      g.UseRegister(mleft.left().node()));
    489       return;
    490     }
    491   }
    492 
    493   // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
    494   if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
    495       right_can_cover) {
    496     Matcher mright(m->right().node());
    497     if (mright.right().Is(-1)) {
    498       // TODO(all): support shifted operand on right.
    499       selector->Emit(inv_opcode, g.DefineAsRegister(node),
    500                      g.UseRegister(m->left().node()),
    501                      g.UseRegister(mright.left().node()));
    502       return;
    503     }
    504   }
    505 
    506   VisitBinop<Matcher>(selector, node, opcode, imm_mode);
    507 }
    508 
    509 
    510 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
    511   int mask_width = base::bits::CountPopulation32(value);
    512   int mask_msb = base::bits::CountLeadingZeros32(value);
    513   int mask_lsb = base::bits::CountTrailingZeros32(value);
    514   if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
    515     return false;
    516   *mb = mask_lsb + mask_width - 1;
    517   *me = mask_lsb;
    518   return true;
    519 }
    520 
    521 
    522 #if V8_TARGET_ARCH_PPC64
    523 static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
    524   int mask_width = base::bits::CountPopulation64(value);
    525   int mask_msb = base::bits::CountLeadingZeros64(value);
    526   int mask_lsb = base::bits::CountTrailingZeros64(value);
    527   if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
    528     return false;
    529   *mb = mask_lsb + mask_width - 1;
    530   *me = mask_lsb;
    531   return true;
    532 }
    533 #endif
    534 
    535 
    536 // TODO(mbrandy): Absorb rotate-right into rlwinm?
    537 void InstructionSelector::VisitWord32And(Node* node) {
    538   PPCOperandGenerator g(this);
    539   Int32BinopMatcher m(node);
    540   int mb = 0;
    541   int me = 0;
    542   if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
    543     int sh = 0;
    544     Node* left = m.left().node();
    545     if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
    546         CanCover(node, left)) {
    547       // Try to absorb left/right shift into rlwinm
    548       Int32BinopMatcher mleft(m.left().node());
    549       if (mleft.right().IsInRange(0, 31)) {
    550         left = mleft.left().node();
    551         sh = mleft.right().Value();
    552         if (m.left().IsWord32Shr()) {
    553           // Adjust the mask such that it doesn't include any rotated bits.
    554           if (mb > 31 - sh) mb = 31 - sh;
    555           sh = (32 - sh) & 0x1f;
    556         } else {
    557           // Adjust the mask such that it doesn't include any rotated bits.
    558           if (me < sh) me = sh;
    559         }
    560       }
    561     }
    562     if (mb >= me) {
    563       Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node), g.UseRegister(left),
    564            g.TempImmediate(sh), g.TempImmediate(mb), g.TempImmediate(me));
    565       return;
    566     }
    567   }
    568   VisitLogical<Int32BinopMatcher>(
    569       this, node, &m, kPPC_And, CanCover(node, m.left().node()),
    570       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
    571 }
    572 
    573 
    574 #if V8_TARGET_ARCH_PPC64
    575 // TODO(mbrandy): Absorb rotate-right into rldic?
    576 void InstructionSelector::VisitWord64And(Node* node) {
    577   PPCOperandGenerator g(this);
    578   Int64BinopMatcher m(node);
    579   int mb = 0;
    580   int me = 0;
    581   if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
    582     int sh = 0;
    583     Node* left = m.left().node();
    584     if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
    585         CanCover(node, left)) {
    586       // Try to absorb left/right shift into rldic
    587       Int64BinopMatcher mleft(m.left().node());
    588       if (mleft.right().IsInRange(0, 63)) {
    589         left = mleft.left().node();
    590         sh = mleft.right().Value();
    591         if (m.left().IsWord64Shr()) {
    592           // Adjust the mask such that it doesn't include any rotated bits.
    593           if (mb > 63 - sh) mb = 63 - sh;
    594           sh = (64 - sh) & 0x3f;
    595         } else {
    596           // Adjust the mask such that it doesn't include any rotated bits.
    597           if (me < sh) me = sh;
    598         }
    599       }
    600     }
    601     if (mb >= me) {
    602       bool match = false;
    603       ArchOpcode opcode;
    604       int mask;
    605       if (me == 0) {
    606         match = true;
    607         opcode = kPPC_RotLeftAndClearLeft64;
    608         mask = mb;
    609       } else if (mb == 63) {
    610         match = true;
    611         opcode = kPPC_RotLeftAndClearRight64;
    612         mask = me;
    613       } else if (sh && me <= sh && m.left().IsWord64Shl()) {
    614         match = true;
    615         opcode = kPPC_RotLeftAndClear64;
    616         mask = mb;
    617       }
    618       if (match) {
    619         Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
    620              g.TempImmediate(sh), g.TempImmediate(mask));
    621         return;
    622       }
    623     }
    624   }
    625   VisitLogical<Int64BinopMatcher>(
    626       this, node, &m, kPPC_And, CanCover(node, m.left().node()),
    627       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
    628 }
    629 #endif
    630 
    631 
    632 void InstructionSelector::VisitWord32Or(Node* node) {
    633   Int32BinopMatcher m(node);
    634   VisitLogical<Int32BinopMatcher>(
    635       this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
    636       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
    637 }
    638 
    639 
    640 #if V8_TARGET_ARCH_PPC64
    641 void InstructionSelector::VisitWord64Or(Node* node) {
    642   Int64BinopMatcher m(node);
    643   VisitLogical<Int64BinopMatcher>(
    644       this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
    645       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
    646 }
    647 #endif
    648 
    649 
    650 void InstructionSelector::VisitWord32Xor(Node* node) {
    651   PPCOperandGenerator g(this);
    652   Int32BinopMatcher m(node);
    653   if (m.right().Is(-1)) {
    654     Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
    655   } else {
    656     VisitBinop<Int32BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
    657   }
    658 }
    659 
    660 
    661 #if V8_TARGET_ARCH_PPC64
    662 void InstructionSelector::VisitWord64Xor(Node* node) {
    663   PPCOperandGenerator g(this);
    664   Int64BinopMatcher m(node);
    665   if (m.right().Is(-1)) {
    666     Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
    667   } else {
    668     VisitBinop<Int64BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
    669   }
    670 }
    671 #endif
    672 
    673 
    674 void InstructionSelector::VisitWord32Shl(Node* node) {
    675   PPCOperandGenerator g(this);
    676   Int32BinopMatcher m(node);
    677   if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
    678     // Try to absorb logical-and into rlwinm
    679     Int32BinopMatcher mleft(m.left().node());
    680     int sh = m.right().Value();
    681     int mb;
    682     int me;
    683     if (mleft.right().HasValue() &&
    684         IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
    685       // Adjust the mask such that it doesn't include any rotated bits.
    686       if (me < sh) me = sh;
    687       if (mb >= me) {
    688         Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
    689              g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
    690              g.TempImmediate(mb), g.TempImmediate(me));
    691         return;
    692       }
    693     }
    694   }
    695   VisitRRO(this, kPPC_ShiftLeft32, node, kShift32Imm);
    696 }
    697 
    698 
    699 #if V8_TARGET_ARCH_PPC64
    700 void InstructionSelector::VisitWord64Shl(Node* node) {
    701   PPCOperandGenerator g(this);
    702   Int64BinopMatcher m(node);
    703   // TODO(mbrandy): eliminate left sign extension if right >= 32
    704   if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
    705     // Try to absorb logical-and into rldic
    706     Int64BinopMatcher mleft(m.left().node());
    707     int sh = m.right().Value();
    708     int mb;
    709     int me;
    710     if (mleft.right().HasValue() &&
    711         IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
    712       // Adjust the mask such that it doesn't include any rotated bits.
    713       if (me < sh) me = sh;
    714       if (mb >= me) {
    715         bool match = false;
    716         ArchOpcode opcode;
    717         int mask;
    718         if (me == 0) {
    719           match = true;
    720           opcode = kPPC_RotLeftAndClearLeft64;
    721           mask = mb;
    722         } else if (mb == 63) {
    723           match = true;
    724           opcode = kPPC_RotLeftAndClearRight64;
    725           mask = me;
    726         } else if (sh && me <= sh) {
    727           match = true;
    728           opcode = kPPC_RotLeftAndClear64;
    729           mask = mb;
    730         }
    731         if (match) {
    732           Emit(opcode, g.DefineAsRegister(node),
    733                g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
    734                g.TempImmediate(mask));
    735           return;
    736         }
    737       }
    738     }
    739   }
    740   VisitRRO(this, kPPC_ShiftLeft64, node, kShift64Imm);
    741 }
    742 #endif
    743 
    744 
    745 void InstructionSelector::VisitWord32Shr(Node* node) {
    746   PPCOperandGenerator g(this);
    747   Int32BinopMatcher m(node);
    748   if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
    749     // Try to absorb logical-and into rlwinm
    750     Int32BinopMatcher mleft(m.left().node());
    751     int sh = m.right().Value();
    752     int mb;
    753     int me;
    754     if (mleft.right().HasValue() &&
    755         IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
    756       // Adjust the mask such that it doesn't include any rotated bits.
    757       if (mb > 31 - sh) mb = 31 - sh;
    758       sh = (32 - sh) & 0x1f;
    759       if (mb >= me) {
    760         Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
    761              g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
    762              g.TempImmediate(mb), g.TempImmediate(me));
    763         return;
    764       }
    765     }
    766   }
    767   VisitRRO(this, kPPC_ShiftRight32, node, kShift32Imm);
    768 }
    769 
    770 #if V8_TARGET_ARCH_PPC64
    771 void InstructionSelector::VisitWord64Shr(Node* node) {
    772   PPCOperandGenerator g(this);
    773   Int64BinopMatcher m(node);
    774   if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
    775     // Try to absorb logical-and into rldic
    776     Int64BinopMatcher mleft(m.left().node());
    777     int sh = m.right().Value();
    778     int mb;
    779     int me;
    780     if (mleft.right().HasValue() &&
    781         IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
    782       // Adjust the mask such that it doesn't include any rotated bits.
    783       if (mb > 63 - sh) mb = 63 - sh;
    784       sh = (64 - sh) & 0x3f;
    785       if (mb >= me) {
    786         bool match = false;
    787         ArchOpcode opcode;
    788         int mask;
    789         if (me == 0) {
    790           match = true;
    791           opcode = kPPC_RotLeftAndClearLeft64;
    792           mask = mb;
    793         } else if (mb == 63) {
    794           match = true;
    795           opcode = kPPC_RotLeftAndClearRight64;
    796           mask = me;
    797         }
    798         if (match) {
    799           Emit(opcode, g.DefineAsRegister(node),
    800                g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
    801                g.TempImmediate(mask));
    802           return;
    803         }
    804       }
    805     }
    806   }
    807   VisitRRO(this, kPPC_ShiftRight64, node, kShift64Imm);
    808 }
    809 #endif
    810 
    811 
    812 void InstructionSelector::VisitWord32Sar(Node* node) {
    813   PPCOperandGenerator g(this);
    814   Int32BinopMatcher m(node);
    815   // Replace with sign extension for (x << K) >> K where K is 16 or 24.
    816   if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
    817     Int32BinopMatcher mleft(m.left().node());
    818     if (mleft.right().Is(16) && m.right().Is(16)) {
    819       Emit(kPPC_ExtendSignWord16, g.DefineAsRegister(node),
    820            g.UseRegister(mleft.left().node()));
    821       return;
    822     } else if (mleft.right().Is(24) && m.right().Is(24)) {
    823       Emit(kPPC_ExtendSignWord8, g.DefineAsRegister(node),
    824            g.UseRegister(mleft.left().node()));
    825       return;
    826     }
    827   }
    828   VisitRRO(this, kPPC_ShiftRightAlg32, node, kShift32Imm);
    829 }
    830 
    831 #if !V8_TARGET_ARCH_PPC64
    832 void VisitPairBinop(InstructionSelector* selector, InstructionCode opcode,
    833                     InstructionCode opcode2, Node* node) {
    834   PPCOperandGenerator g(selector);
    835 
    836   Node* projection1 = NodeProperties::FindProjection(node, 1);
    837   if (projection1) {
    838     // We use UseUniqueRegister here to avoid register sharing with the output
    839     // registers.
    840     InstructionOperand inputs[] = {
    841         g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
    842         g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
    843 
    844     InstructionOperand outputs[] = {
    845         g.DefineAsRegister(node),
    846         g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
    847 
    848     selector->Emit(opcode, 2, outputs, 4, inputs);
    849   } else {
    850     // The high word of the result is not used, so we emit the standard 32 bit
    851     // instruction.
    852     selector->Emit(opcode2, g.DefineSameAsFirst(node),
    853                    g.UseRegister(node->InputAt(0)),
    854                    g.UseRegister(node->InputAt(2)));
    855   }
    856 }
    857 
    858 void InstructionSelector::VisitInt32PairAdd(Node* node) {
    859   VisitPairBinop(this, kPPC_AddPair, kPPC_Add32, node);
    860 }
    861 
    862 void InstructionSelector::VisitInt32PairSub(Node* node) {
    863   VisitPairBinop(this, kPPC_SubPair, kPPC_Sub, node);
    864 }
    865 
    866 void InstructionSelector::VisitInt32PairMul(Node* node) {
    867   PPCOperandGenerator g(this);
    868   Node* projection1 = NodeProperties::FindProjection(node, 1);
    869   if (projection1) {
    870     InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
    871                                    g.UseUniqueRegister(node->InputAt(1)),
    872                                    g.UseUniqueRegister(node->InputAt(2)),
    873                                    g.UseUniqueRegister(node->InputAt(3))};
    874 
    875     InstructionOperand outputs[] = {
    876         g.DefineAsRegister(node),
    877         g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
    878 
    879     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
    880 
    881     Emit(kPPC_MulPair, 2, outputs, 4, inputs, 2, temps);
    882   } else {
    883     // The high word of the result is not used, so we emit the standard 32 bit
    884     // instruction.
    885     Emit(kPPC_Mul32, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)),
    886          g.UseRegister(node->InputAt(2)));
    887   }
    888 }
    889 
    890 namespace {
    891 // Shared routine for multiple shift operations.
    892 void VisitPairShift(InstructionSelector* selector, InstructionCode opcode,
    893                     Node* node) {
    894   PPCOperandGenerator g(selector);
    895   // We use g.UseUniqueRegister here to guarantee that there is
    896   // no register aliasing of input registers with output registers.
    897   Int32Matcher m(node->InputAt(2));
    898   InstructionOperand shift_operand;
    899   if (m.HasValue()) {
    900     shift_operand = g.UseImmediate(m.node());
    901   } else {
    902     shift_operand = g.UseUniqueRegister(m.node());
    903   }
    904 
    905   InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
    906                                  g.UseUniqueRegister(node->InputAt(1)),
    907                                  shift_operand};
    908 
    909   Node* projection1 = NodeProperties::FindProjection(node, 1);
    910 
    911   InstructionOperand outputs[2];
    912   InstructionOperand temps[1];
    913   int32_t output_count = 0;
    914   int32_t temp_count = 0;
    915 
    916   outputs[output_count++] = g.DefineAsRegister(node);
    917   if (projection1) {
    918     outputs[output_count++] = g.DefineAsRegister(projection1);
    919   } else {
    920     temps[temp_count++] = g.TempRegister();
    921   }
    922 
    923   selector->Emit(opcode, output_count, outputs, 3, inputs, temp_count, temps);
    924 }
    925 }  // namespace
    926 
    927 void InstructionSelector::VisitWord32PairShl(Node* node) {
    928   VisitPairShift(this, kPPC_ShiftLeftPair, node);
    929 }
    930 
    931 void InstructionSelector::VisitWord32PairShr(Node* node) {
    932   VisitPairShift(this, kPPC_ShiftRightPair, node);
    933 }
    934 
    935 void InstructionSelector::VisitWord32PairSar(Node* node) {
    936   VisitPairShift(this, kPPC_ShiftRightAlgPair, node);
    937 }
    938 #endif
    939 
    940 #if V8_TARGET_ARCH_PPC64
    941 void InstructionSelector::VisitWord64Sar(Node* node) {
    942   PPCOperandGenerator g(this);
    943   Int64BinopMatcher m(node);
    944   if (CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
    945       m.right().Is(32)) {
    946     // Just load and sign-extend the interesting 4 bytes instead. This happens,
    947     // for example, when we're loading and untagging SMIs.
    948     BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(),
    949                                                 AddressOption::kAllowAll);
    950     if (mleft.matches() && mleft.index() == nullptr) {
    951       int64_t offset = 0;
    952       Node* displacement = mleft.displacement();
    953       if (displacement != nullptr) {
    954         Int64Matcher mdisplacement(displacement);
    955         DCHECK(mdisplacement.HasValue());
    956         offset = mdisplacement.Value();
    957       }
    958       offset = SmiWordOffset(offset);
    959       if (g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)) {
    960         Emit(kPPC_LoadWordS32 | AddressingModeField::encode(kMode_MRI),
    961              g.DefineAsRegister(node), g.UseRegister(mleft.base()),
    962              g.TempImmediate(offset));
    963         return;
    964       }
    965     }
    966   }
    967   VisitRRO(this, kPPC_ShiftRightAlg64, node, kShift64Imm);
    968 }
    969 #endif
    970 
    971 
    972 // TODO(mbrandy): Absorb logical-and into rlwinm?
    973 void InstructionSelector::VisitWord32Ror(Node* node) {
    974   VisitRRO(this, kPPC_RotRight32, node, kShift32Imm);
    975 }
    976 
    977 
    978 #if V8_TARGET_ARCH_PPC64
    979 // TODO(mbrandy): Absorb logical-and into rldic?
    980 void InstructionSelector::VisitWord64Ror(Node* node) {
    981   VisitRRO(this, kPPC_RotRight64, node, kShift64Imm);
    982 }
    983 #endif
    984 
    985 
    986 void InstructionSelector::VisitWord32Clz(Node* node) {
    987   PPCOperandGenerator g(this);
    988   Emit(kPPC_Cntlz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
    989 }
    990 
    991 
    992 #if V8_TARGET_ARCH_PPC64
    993 void InstructionSelector::VisitWord64Clz(Node* node) {
    994   PPCOperandGenerator g(this);
    995   Emit(kPPC_Cntlz64, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
    996 }
    997 #endif
    998 
    999 
   1000 void InstructionSelector::VisitWord32Popcnt(Node* node) {
   1001   PPCOperandGenerator g(this);
   1002   Emit(kPPC_Popcnt32, g.DefineAsRegister(node),
   1003        g.UseRegister(node->InputAt(0)));
   1004 }
   1005 
   1006 
   1007 #if V8_TARGET_ARCH_PPC64
   1008 void InstructionSelector::VisitWord64Popcnt(Node* node) {
   1009   PPCOperandGenerator g(this);
   1010   Emit(kPPC_Popcnt64, g.DefineAsRegister(node),
   1011        g.UseRegister(node->InputAt(0)));
   1012 }
   1013 #endif
   1014 
   1015 
   1016 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
   1017 
   1018 
   1019 #if V8_TARGET_ARCH_PPC64
   1020 void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
   1021 #endif
   1022 
   1023 
   1024 void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
   1025 
   1026 
   1027 #if V8_TARGET_ARCH_PPC64
   1028 void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
   1029 #endif
   1030 
   1031 void InstructionSelector::VisitWord64ReverseBytes(Node* node) { UNREACHABLE(); }
   1032 
   1033 void InstructionSelector::VisitWord32ReverseBytes(Node* node) { UNREACHABLE(); }
   1034 
   1035 void InstructionSelector::VisitInt32Add(Node* node) {
   1036   VisitBinop<Int32BinopMatcher>(this, node, kPPC_Add32, kInt16Imm);
   1037 }
   1038 
   1039 
   1040 #if V8_TARGET_ARCH_PPC64
   1041 void InstructionSelector::VisitInt64Add(Node* node) {
   1042   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm);
   1043 }
   1044 #endif
   1045 
   1046 void InstructionSelector::VisitInt32Sub(Node* node) {
   1047   PPCOperandGenerator g(this);
   1048   Int32BinopMatcher m(node);
   1049   if (m.left().Is(0)) {
   1050     Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
   1051   } else {
   1052     VisitBinop<Int32BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
   1053   }
   1054 }
   1055 
   1056 
   1057 #if V8_TARGET_ARCH_PPC64
   1058 void InstructionSelector::VisitInt64Sub(Node* node) {
   1059   PPCOperandGenerator g(this);
   1060   Int64BinopMatcher m(node);
   1061   if (m.left().Is(0)) {
   1062     Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
   1063   } else {
   1064     VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
   1065   }
   1066 }
   1067 #endif
   1068 
   1069 namespace {
   1070 
   1071 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
   1072                   InstructionOperand left, InstructionOperand right,
   1073                   FlagsContinuation* cont);
   1074 void EmitInt32MulWithOverflow(InstructionSelector* selector, Node* node,
   1075                               FlagsContinuation* cont) {
   1076   PPCOperandGenerator g(selector);
   1077   Int32BinopMatcher m(node);
   1078   InstructionOperand result_operand = g.DefineAsRegister(node);
   1079   InstructionOperand high32_operand = g.TempRegister();
   1080   InstructionOperand temp_operand = g.TempRegister();
   1081   {
   1082     InstructionOperand outputs[] = {result_operand, high32_operand};
   1083     InstructionOperand inputs[] = {g.UseRegister(m.left().node()),
   1084                                    g.UseRegister(m.right().node())};
   1085     selector->Emit(kPPC_Mul32WithHigh32, 2, outputs, 2, inputs);
   1086   }
   1087   {
   1088     InstructionOperand shift_31 = g.UseImmediate(31);
   1089     InstructionOperand outputs[] = {temp_operand};
   1090     InstructionOperand inputs[] = {result_operand, shift_31};
   1091     selector->Emit(kPPC_ShiftRightAlg32, 1, outputs, 2, inputs);
   1092   }
   1093 
   1094   VisitCompare(selector, kPPC_Cmp32, high32_operand, temp_operand, cont);
   1095 }
   1096 
   1097 }  // namespace
   1098 
   1099 
   1100 void InstructionSelector::VisitInt32Mul(Node* node) {
   1101   VisitRRR(this, kPPC_Mul32, node);
   1102 }
   1103 
   1104 
   1105 #if V8_TARGET_ARCH_PPC64
   1106 void InstructionSelector::VisitInt64Mul(Node* node) {
   1107   VisitRRR(this, kPPC_Mul64, node);
   1108 }
   1109 #endif
   1110 
   1111 
   1112 void InstructionSelector::VisitInt32MulHigh(Node* node) {
   1113   PPCOperandGenerator g(this);
   1114   Emit(kPPC_MulHigh32, g.DefineAsRegister(node),
   1115        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
   1116 }
   1117 
   1118 
   1119 void InstructionSelector::VisitUint32MulHigh(Node* node) {
   1120   PPCOperandGenerator g(this);
   1121   Emit(kPPC_MulHighU32, g.DefineAsRegister(node),
   1122        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
   1123 }
   1124 
   1125 
   1126 void InstructionSelector::VisitInt32Div(Node* node) {
   1127   VisitRRR(this, kPPC_Div32, node);
   1128 }
   1129 
   1130 
   1131 #if V8_TARGET_ARCH_PPC64
   1132 void InstructionSelector::VisitInt64Div(Node* node) {
   1133   VisitRRR(this, kPPC_Div64, node);
   1134 }
   1135 #endif
   1136 
   1137 
   1138 void InstructionSelector::VisitUint32Div(Node* node) {
   1139   VisitRRR(this, kPPC_DivU32, node);
   1140 }
   1141 
   1142 
   1143 #if V8_TARGET_ARCH_PPC64
   1144 void InstructionSelector::VisitUint64Div(Node* node) {
   1145   VisitRRR(this, kPPC_DivU64, node);
   1146 }
   1147 #endif
   1148 
   1149 
   1150 void InstructionSelector::VisitInt32Mod(Node* node) {
   1151   VisitRRR(this, kPPC_Mod32, node);
   1152 }
   1153 
   1154 
   1155 #if V8_TARGET_ARCH_PPC64
   1156 void InstructionSelector::VisitInt64Mod(Node* node) {
   1157   VisitRRR(this, kPPC_Mod64, node);
   1158 }
   1159 #endif
   1160 
   1161 
   1162 void InstructionSelector::VisitUint32Mod(Node* node) {
   1163   VisitRRR(this, kPPC_ModU32, node);
   1164 }
   1165 
   1166 
   1167 #if V8_TARGET_ARCH_PPC64
   1168 void InstructionSelector::VisitUint64Mod(Node* node) {
   1169   VisitRRR(this, kPPC_ModU64, node);
   1170 }
   1171 #endif
   1172 
   1173 
   1174 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
   1175   VisitRR(this, kPPC_Float32ToDouble, node);
   1176 }
   1177 
   1178 
   1179 void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
   1180   VisitRR(this, kPPC_Int32ToFloat32, node);
   1181 }
   1182 
   1183 
   1184 void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
   1185   VisitRR(this, kPPC_Uint32ToFloat32, node);
   1186 }
   1187 
   1188 
   1189 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
   1190   VisitRR(this, kPPC_Int32ToDouble, node);
   1191 }
   1192 
   1193 
   1194 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
   1195   VisitRR(this, kPPC_Uint32ToDouble, node);
   1196 }
   1197 
   1198 
   1199 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
   1200   VisitRR(this, kPPC_DoubleToInt32, node);
   1201 }
   1202 
   1203 
   1204 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
   1205   VisitRR(this, kPPC_DoubleToUint32, node);
   1206 }
   1207 
   1208 void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
   1209   VisitRR(this, kPPC_DoubleToUint32, node);
   1210 }
   1211 
   1212 #if V8_TARGET_ARCH_PPC64
   1213 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
   1214   VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
   1215 }
   1216 
   1217 
   1218 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
   1219   VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
   1220 }
   1221 
   1222 
   1223 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
   1224   VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
   1225 }
   1226 
   1227 
   1228 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
   1229   VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
   1230 }
   1231 
   1232 
   1233 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
   1234   // TODO(mbrandy): inspect input to see if nop is appropriate.
   1235   VisitRR(this, kPPC_ExtendSignWord32, node);
   1236 }
   1237 
   1238 
   1239 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
   1240   // TODO(mbrandy): inspect input to see if nop is appropriate.
   1241   VisitRR(this, kPPC_Uint32ToUint64, node);
   1242 }
   1243 #endif
   1244 
   1245 
   1246 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
   1247   VisitRR(this, kPPC_DoubleToFloat32, node);
   1248 }
   1249 
   1250 void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
   1251   VisitRR(this, kArchTruncateDoubleToI, node);
   1252 }
   1253 
   1254 void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
   1255   VisitRR(this, kPPC_DoubleToInt32, node);
   1256 }
   1257 
   1258 
   1259 void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
   1260   VisitRR(this, kPPC_DoubleToInt32, node);
   1261 }
   1262 
   1263 
   1264 void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
   1265   VisitRR(this, kPPC_DoubleToUint32, node);
   1266 }
   1267 
   1268 
   1269 #if V8_TARGET_ARCH_PPC64
   1270 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
   1271   // TODO(mbrandy): inspect input to see if nop is appropriate.
   1272   VisitRR(this, kPPC_Int64ToInt32, node);
   1273 }
   1274 
   1275 
   1276 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
   1277   VisitRR(this, kPPC_Int64ToFloat32, node);
   1278 }
   1279 
   1280 
   1281 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
   1282   VisitRR(this, kPPC_Int64ToDouble, node);
   1283 }
   1284 
   1285 
   1286 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
   1287   VisitRR(this, kPPC_Uint64ToFloat32, node);
   1288 }
   1289 
   1290 
   1291 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
   1292   VisitRR(this, kPPC_Uint64ToDouble, node);
   1293 }
   1294 #endif
   1295 
   1296 
   1297 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
   1298   VisitRR(this, kPPC_BitcastFloat32ToInt32, node);
   1299 }
   1300 
   1301 
   1302 #if V8_TARGET_ARCH_PPC64
   1303 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
   1304   VisitRR(this, kPPC_BitcastDoubleToInt64, node);
   1305 }
   1306 #endif
   1307 
   1308 
   1309 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
   1310   VisitRR(this, kPPC_BitcastInt32ToFloat32, node);
   1311 }
   1312 
   1313 
   1314 #if V8_TARGET_ARCH_PPC64
   1315 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
   1316   VisitRR(this, kPPC_BitcastInt64ToDouble, node);
   1317 }
   1318 #endif
   1319 
   1320 
   1321 void InstructionSelector::VisitFloat32Add(Node* node) {
   1322   VisitRRR(this, kPPC_AddDouble | MiscField::encode(1), node);
   1323 }
   1324 
   1325 
   1326 void InstructionSelector::VisitFloat64Add(Node* node) {
   1327   // TODO(mbrandy): detect multiply-add
   1328   VisitRRR(this, kPPC_AddDouble, node);
   1329 }
   1330 
   1331 
   1332 void InstructionSelector::VisitFloat32Sub(Node* node) {
   1333   VisitRRR(this, kPPC_SubDouble | MiscField::encode(1), node);
   1334 }
   1335 
   1336 void InstructionSelector::VisitFloat64Sub(Node* node) {
   1337   // TODO(mbrandy): detect multiply-subtract
   1338   VisitRRR(this, kPPC_SubDouble, node);
   1339 }
   1340 
   1341 void InstructionSelector::VisitFloat32Mul(Node* node) {
   1342   VisitRRR(this, kPPC_MulDouble | MiscField::encode(1), node);
   1343 }
   1344 
   1345 
   1346 void InstructionSelector::VisitFloat64Mul(Node* node) {
   1347   // TODO(mbrandy): detect negate
   1348   VisitRRR(this, kPPC_MulDouble, node);
   1349 }
   1350 
   1351 
   1352 void InstructionSelector::VisitFloat32Div(Node* node) {
   1353   VisitRRR(this, kPPC_DivDouble | MiscField::encode(1), node);
   1354 }
   1355 
   1356 
   1357 void InstructionSelector::VisitFloat64Div(Node* node) {
   1358   VisitRRR(this, kPPC_DivDouble, node);
   1359 }
   1360 
   1361 
   1362 void InstructionSelector::VisitFloat64Mod(Node* node) {
   1363   PPCOperandGenerator g(this);
   1364   Emit(kPPC_ModDouble, g.DefineAsFixed(node, d1),
   1365        g.UseFixed(node->InputAt(0), d1),
   1366        g.UseFixed(node->InputAt(1), d2))->MarkAsCall();
   1367 }
   1368 
   1369 void InstructionSelector::VisitFloat32Max(Node* node) {
   1370   VisitRRR(this, kPPC_MaxDouble | MiscField::encode(1), node);
   1371 }
   1372 
   1373 void InstructionSelector::VisitFloat64Max(Node* node) {
   1374   VisitRRR(this, kPPC_MaxDouble, node);
   1375 }
   1376 
   1377 
   1378 void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
   1379   VisitRR(this, kPPC_Float64SilenceNaN, node);
   1380 }
   1381 
   1382 void InstructionSelector::VisitFloat32Min(Node* node) {
   1383   VisitRRR(this, kPPC_MinDouble | MiscField::encode(1), node);
   1384 }
   1385 
   1386 void InstructionSelector::VisitFloat64Min(Node* node) {
   1387   VisitRRR(this, kPPC_MinDouble, node);
   1388 }
   1389 
   1390 
   1391 void InstructionSelector::VisitFloat32Abs(Node* node) {
   1392   VisitRR(this, kPPC_AbsDouble | MiscField::encode(1), node);
   1393 }
   1394 
   1395 
   1396 void InstructionSelector::VisitFloat64Abs(Node* node) {
   1397   VisitRR(this, kPPC_AbsDouble, node);
   1398 }
   1399 
   1400 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
   1401   VisitRR(this, kPPC_SqrtDouble | MiscField::encode(1), node);
   1402 }
   1403 
   1404 void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
   1405                                                   InstructionCode opcode) {
   1406   PPCOperandGenerator g(this);
   1407   Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1))
   1408        ->MarkAsCall();
   1409 }
   1410 
   1411 void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
   1412                                                   InstructionCode opcode) {
   1413   PPCOperandGenerator g(this);
   1414   Emit(opcode, g.DefineAsFixed(node, d1),
   1415        g.UseFixed(node->InputAt(0), d1),
   1416        g.UseFixed(node->InputAt(1), d2))->MarkAsCall();
   1417 }
   1418 
   1419 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
   1420   VisitRR(this, kPPC_SqrtDouble, node);
   1421 }
   1422 
   1423 
   1424 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
   1425   VisitRR(this, kPPC_FloorDouble | MiscField::encode(1), node);
   1426 }
   1427 
   1428 
   1429 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
   1430   VisitRR(this, kPPC_FloorDouble, node);
   1431 }
   1432 
   1433 
   1434 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
   1435   VisitRR(this, kPPC_CeilDouble | MiscField::encode(1), node);
   1436 }
   1437 
   1438 
   1439 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
   1440   VisitRR(this, kPPC_CeilDouble, node);
   1441 }
   1442 
   1443 
   1444 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
   1445   VisitRR(this, kPPC_TruncateDouble | MiscField::encode(1), node);
   1446 }
   1447 
   1448 
   1449 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
   1450   VisitRR(this, kPPC_TruncateDouble, node);
   1451 }
   1452 
   1453 
   1454 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
   1455   VisitRR(this, kPPC_RoundDouble, node);
   1456 }
   1457 
   1458 
   1459 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
   1460   UNREACHABLE();
   1461 }
   1462 
   1463 
   1464 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
   1465   UNREACHABLE();
   1466 }
   1467 
   1468 void InstructionSelector::VisitFloat32Neg(Node* node) {
   1469   VisitRR(this, kPPC_NegDouble, node);
   1470 }
   1471 
   1472 void InstructionSelector::VisitFloat64Neg(Node* node) {
   1473   VisitRR(this, kPPC_NegDouble, node);
   1474 }
   1475 
   1476 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
   1477   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
   1478     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
   1479     return VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32,
   1480                                          kInt16Imm, &cont);
   1481   }
   1482   FlagsContinuation cont;
   1483   VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32, kInt16Imm,
   1484                                 &cont);
   1485 }
   1486 
   1487 
   1488 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
   1489   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
   1490     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
   1491     return VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
   1492                                          kInt16Imm_Negate, &cont);
   1493   }
   1494   FlagsContinuation cont;
   1495   VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
   1496                                 kInt16Imm_Negate, &cont);
   1497 }
   1498 
   1499 
   1500 #if V8_TARGET_ARCH_PPC64
   1501 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
   1502   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
   1503     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
   1504     return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm,
   1505                                          &cont);
   1506   }
   1507   FlagsContinuation cont;
   1508   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add64, kInt16Imm, &cont);
   1509 }
   1510 
   1511 
   1512 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
   1513   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
   1514     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
   1515     return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate,
   1516                                          &cont);
   1517   }
   1518   FlagsContinuation cont;
   1519   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate, &cont);
   1520 }
   1521 #endif
   1522 
   1523 
   1524 static bool CompareLogical(FlagsContinuation* cont) {
   1525   switch (cont->condition()) {
   1526     case kUnsignedLessThan:
   1527     case kUnsignedGreaterThanOrEqual:
   1528     case kUnsignedLessThanOrEqual:
   1529     case kUnsignedGreaterThan:
   1530       return true;
   1531     default:
   1532       return false;
   1533   }
   1534   UNREACHABLE();
   1535   return false;
   1536 }
   1537 
   1538 
   1539 namespace {
   1540 
   1541 // Shared routine for multiple compare operations.
   1542 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
   1543                   InstructionOperand left, InstructionOperand right,
   1544                   FlagsContinuation* cont) {
   1545   PPCOperandGenerator g(selector);
   1546   opcode = cont->Encode(opcode);
   1547   if (cont->IsBranch()) {
   1548     selector->Emit(opcode, g.NoOutput(), left, right,
   1549                    g.Label(cont->true_block()), g.Label(cont->false_block()));
   1550   } else if (cont->IsDeoptimize()) {
   1551     selector->EmitDeoptimize(opcode, g.NoOutput(), left, right, cont->kind(),
   1552                              cont->reason(), cont->frame_state());
   1553   } else if (cont->IsSet()) {
   1554     selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
   1555   } else {
   1556     DCHECK(cont->IsTrap());
   1557     selector->Emit(opcode, g.NoOutput(), left, right,
   1558                    g.UseImmediate(cont->trap_id()));
   1559   }
   1560 }
   1561 
   1562 
   1563 // Shared routine for multiple word compare operations.
   1564 void VisitWordCompare(InstructionSelector* selector, Node* node,
   1565                       InstructionCode opcode, FlagsContinuation* cont,
   1566                       bool commutative, ImmediateMode immediate_mode) {
   1567   PPCOperandGenerator g(selector);
   1568   Node* left = node->InputAt(0);
   1569   Node* right = node->InputAt(1);
   1570 
   1571   // Match immediates on left or right side of comparison.
   1572   if (g.CanBeImmediate(right, immediate_mode)) {
   1573     VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
   1574                  cont);
   1575   } else if (g.CanBeImmediate(left, immediate_mode)) {
   1576     if (!commutative) cont->Commute();
   1577     VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
   1578                  cont);
   1579   } else {
   1580     VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
   1581                  cont);
   1582   }
   1583 }
   1584 
   1585 
   1586 void VisitWord32Compare(InstructionSelector* selector, Node* node,
   1587                         FlagsContinuation* cont) {
   1588   ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
   1589   VisitWordCompare(selector, node, kPPC_Cmp32, cont, false, mode);
   1590 }
   1591 
   1592 
   1593 #if V8_TARGET_ARCH_PPC64
   1594 void VisitWord64Compare(InstructionSelector* selector, Node* node,
   1595                         FlagsContinuation* cont) {
   1596   ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
   1597   VisitWordCompare(selector, node, kPPC_Cmp64, cont, false, mode);
   1598 }
   1599 #endif
   1600 
   1601 
   1602 // Shared routine for multiple float32 compare operations.
   1603 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
   1604                          FlagsContinuation* cont) {
   1605   PPCOperandGenerator g(selector);
   1606   Node* left = node->InputAt(0);
   1607   Node* right = node->InputAt(1);
   1608   VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
   1609                g.UseRegister(right), cont);
   1610 }
   1611 
   1612 
   1613 // Shared routine for multiple float64 compare operations.
   1614 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
   1615                          FlagsContinuation* cont) {
   1616   PPCOperandGenerator g(selector);
   1617   Node* left = node->InputAt(0);
   1618   Node* right = node->InputAt(1);
   1619   VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
   1620                g.UseRegister(right), cont);
   1621 }
   1622 
   1623 
   1624 // Shared routine for word comparisons against zero.
   1625 void VisitWordCompareZero(InstructionSelector* selector, Node* user,
   1626                           Node* value, InstructionCode opcode,
   1627                           FlagsContinuation* cont) {
   1628   // Try to combine with comparisons against 0 by simply inverting the branch.
   1629   while (value->opcode() == IrOpcode::kWord32Equal &&
   1630          selector->CanCover(user, value)) {
   1631     Int32BinopMatcher m(value);
   1632     if (!m.right().Is(0)) break;
   1633 
   1634     user = value;
   1635     value = m.left().node();
   1636     cont->Negate();
   1637   }
   1638 
   1639   if (selector->CanCover(user, value)) {
   1640     switch (value->opcode()) {
   1641       case IrOpcode::kWord32Equal:
   1642         cont->OverwriteAndNegateIfEqual(kEqual);
   1643         return VisitWord32Compare(selector, value, cont);
   1644       case IrOpcode::kInt32LessThan:
   1645         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
   1646         return VisitWord32Compare(selector, value, cont);
   1647       case IrOpcode::kInt32LessThanOrEqual:
   1648         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
   1649         return VisitWord32Compare(selector, value, cont);
   1650       case IrOpcode::kUint32LessThan:
   1651         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
   1652         return VisitWord32Compare(selector, value, cont);
   1653       case IrOpcode::kUint32LessThanOrEqual:
   1654         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
   1655         return VisitWord32Compare(selector, value, cont);
   1656 #if V8_TARGET_ARCH_PPC64
   1657       case IrOpcode::kWord64Equal:
   1658         cont->OverwriteAndNegateIfEqual(kEqual);
   1659         return VisitWord64Compare(selector, value, cont);
   1660       case IrOpcode::kInt64LessThan:
   1661         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
   1662         return VisitWord64Compare(selector, value, cont);
   1663       case IrOpcode::kInt64LessThanOrEqual:
   1664         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
   1665         return VisitWord64Compare(selector, value, cont);
   1666       case IrOpcode::kUint64LessThan:
   1667         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
   1668         return VisitWord64Compare(selector, value, cont);
   1669       case IrOpcode::kUint64LessThanOrEqual:
   1670         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
   1671         return VisitWord64Compare(selector, value, cont);
   1672 #endif
   1673       case IrOpcode::kFloat32Equal:
   1674         cont->OverwriteAndNegateIfEqual(kEqual);
   1675         return VisitFloat32Compare(selector, value, cont);
   1676       case IrOpcode::kFloat32LessThan:
   1677         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
   1678         return VisitFloat32Compare(selector, value, cont);
   1679       case IrOpcode::kFloat32LessThanOrEqual:
   1680         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
   1681         return VisitFloat32Compare(selector, value, cont);
   1682       case IrOpcode::kFloat64Equal:
   1683         cont->OverwriteAndNegateIfEqual(kEqual);
   1684         return VisitFloat64Compare(selector, value, cont);
   1685       case IrOpcode::kFloat64LessThan:
   1686         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
   1687         return VisitFloat64Compare(selector, value, cont);
   1688       case IrOpcode::kFloat64LessThanOrEqual:
   1689         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
   1690         return VisitFloat64Compare(selector, value, cont);
   1691       case IrOpcode::kProjection:
   1692         // Check if this is the overflow output projection of an
   1693         // <Operation>WithOverflow node.
   1694         if (ProjectionIndexOf(value->op()) == 1u) {
   1695           // We cannot combine the <Operation>WithOverflow with this branch
   1696           // unless the 0th projection (the use of the actual value of the
   1697           // <Operation> is either nullptr, which means there's no use of the
   1698           // actual value, or was already defined, which means it is scheduled
   1699           // *AFTER* this branch).
   1700           Node* const node = value->InputAt(0);
   1701           Node* const result = NodeProperties::FindProjection(node, 0);
   1702           if (result == nullptr || selector->IsDefined(result)) {
   1703             switch (node->opcode()) {
   1704               case IrOpcode::kInt32AddWithOverflow:
   1705                 cont->OverwriteAndNegateIfEqual(kOverflow);
   1706                 return VisitBinop<Int32BinopMatcher>(
   1707                     selector, node, kPPC_AddWithOverflow32, kInt16Imm, cont);
   1708               case IrOpcode::kInt32SubWithOverflow:
   1709                 cont->OverwriteAndNegateIfEqual(kOverflow);
   1710                 return VisitBinop<Int32BinopMatcher>(selector, node,
   1711                                                      kPPC_SubWithOverflow32,
   1712                                                      kInt16Imm_Negate, cont);
   1713               case IrOpcode::kInt32MulWithOverflow:
   1714                 cont->OverwriteAndNegateIfEqual(kNotEqual);
   1715                 return EmitInt32MulWithOverflow(selector, node, cont);
   1716 #if V8_TARGET_ARCH_PPC64
   1717               case IrOpcode::kInt64AddWithOverflow:
   1718                 cont->OverwriteAndNegateIfEqual(kOverflow);
   1719                 return VisitBinop<Int64BinopMatcher>(selector, node, kPPC_Add64,
   1720                                                      kInt16Imm, cont);
   1721               case IrOpcode::kInt64SubWithOverflow:
   1722                 cont->OverwriteAndNegateIfEqual(kOverflow);
   1723                 return VisitBinop<Int64BinopMatcher>(selector, node, kPPC_Sub,
   1724                                                      kInt16Imm_Negate, cont);
   1725 #endif
   1726               default:
   1727                 break;
   1728             }
   1729           }
   1730         }
   1731         break;
   1732       case IrOpcode::kInt32Sub:
   1733         return VisitWord32Compare(selector, value, cont);
   1734       case IrOpcode::kWord32And:
   1735         // TODO(mbandy): opportunity for rlwinm?
   1736         return VisitWordCompare(selector, value, kPPC_Tst32, cont, true,
   1737                                 kInt16Imm_Unsigned);
   1738 // TODO(mbrandy): Handle?
   1739 // case IrOpcode::kInt32Add:
   1740 // case IrOpcode::kWord32Or:
   1741 // case IrOpcode::kWord32Xor:
   1742 // case IrOpcode::kWord32Sar:
   1743 // case IrOpcode::kWord32Shl:
   1744 // case IrOpcode::kWord32Shr:
   1745 // case IrOpcode::kWord32Ror:
   1746 #if V8_TARGET_ARCH_PPC64
   1747       case IrOpcode::kInt64Sub:
   1748         return VisitWord64Compare(selector, value, cont);
   1749       case IrOpcode::kWord64And:
   1750         // TODO(mbandy): opportunity for rldic?
   1751         return VisitWordCompare(selector, value, kPPC_Tst64, cont, true,
   1752                                 kInt16Imm_Unsigned);
   1753 // TODO(mbrandy): Handle?
   1754 // case IrOpcode::kInt64Add:
   1755 // case IrOpcode::kWord64Or:
   1756 // case IrOpcode::kWord64Xor:
   1757 // case IrOpcode::kWord64Sar:
   1758 // case IrOpcode::kWord64Shl:
   1759 // case IrOpcode::kWord64Shr:
   1760 // case IrOpcode::kWord64Ror:
   1761 #endif
   1762       default:
   1763         break;
   1764     }
   1765   }
   1766 
   1767   // Branch could not be combined with a compare, emit compare against 0.
   1768   PPCOperandGenerator g(selector);
   1769   VisitCompare(selector, opcode, g.UseRegister(value), g.TempImmediate(0),
   1770                cont);
   1771 }
   1772 
   1773 
   1774 void VisitWord32CompareZero(InstructionSelector* selector, Node* user,
   1775                             Node* value, FlagsContinuation* cont) {
   1776   VisitWordCompareZero(selector, user, value, kPPC_Cmp32, cont);
   1777 }
   1778 
   1779 
   1780 #if V8_TARGET_ARCH_PPC64
   1781 void VisitWord64CompareZero(InstructionSelector* selector, Node* user,
   1782                             Node* value, FlagsContinuation* cont) {
   1783   VisitWordCompareZero(selector, user, value, kPPC_Cmp64, cont);
   1784 }
   1785 #endif
   1786 
   1787 }  // namespace
   1788 
   1789 
   1790 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
   1791                                       BasicBlock* fbranch) {
   1792   FlagsContinuation cont(kNotEqual, tbranch, fbranch);
   1793   VisitWord32CompareZero(this, branch, branch->InputAt(0), &cont);
   1794 }
   1795 
   1796 void InstructionSelector::VisitDeoptimizeIf(Node* node) {
   1797   DeoptimizeParameters p = DeoptimizeParametersOf(node->op());
   1798   FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
   1799       kNotEqual, p.kind(), p.reason(), node->InputAt(1));
   1800   VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
   1801 }
   1802 
   1803 void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
   1804   DeoptimizeParameters p = DeoptimizeParametersOf(node->op());
   1805   FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
   1806       kEqual, p.kind(), p.reason(), node->InputAt(1));
   1807   VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
   1808 }
   1809 
   1810 void InstructionSelector::VisitTrapIf(Node* node, Runtime::FunctionId func_id) {
   1811   FlagsContinuation cont =
   1812       FlagsContinuation::ForTrap(kNotEqual, func_id, node->InputAt(1));
   1813   VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
   1814 }
   1815 
   1816 void InstructionSelector::VisitTrapUnless(Node* node,
   1817                                           Runtime::FunctionId func_id) {
   1818   FlagsContinuation cont =
   1819       FlagsContinuation::ForTrap(kEqual, func_id, node->InputAt(1));
   1820   VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
   1821 }
   1822 
   1823 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
   1824   PPCOperandGenerator g(this);
   1825   InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
   1826 
   1827   // Emit either ArchTableSwitch or ArchLookupSwitch.
   1828   size_t table_space_cost = 4 + sw.value_range;
   1829   size_t table_time_cost = 3;
   1830   size_t lookup_space_cost = 3 + 2 * sw.case_count;
   1831   size_t lookup_time_cost = sw.case_count;
   1832   if (sw.case_count > 0 &&
   1833       table_space_cost + 3 * table_time_cost <=
   1834           lookup_space_cost + 3 * lookup_time_cost &&
   1835       sw.min_value > std::numeric_limits<int32_t>::min()) {
   1836     InstructionOperand index_operand = value_operand;
   1837     if (sw.min_value) {
   1838       index_operand = g.TempRegister();
   1839       Emit(kPPC_Sub, index_operand, value_operand,
   1840            g.TempImmediate(sw.min_value));
   1841     }
   1842     // Generate a table lookup.
   1843     return EmitTableSwitch(sw, index_operand);
   1844   }
   1845 
   1846   // Generate a sequence of conditional jumps.
   1847   return EmitLookupSwitch(sw, value_operand);
   1848 }
   1849 
   1850 
   1851 void InstructionSelector::VisitWord32Equal(Node* const node) {
   1852   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
   1853   Int32BinopMatcher m(node);
   1854   if (m.right().Is(0)) {
   1855     return VisitWord32CompareZero(this, m.node(), m.left().node(), &cont);
   1856   }
   1857   VisitWord32Compare(this, node, &cont);
   1858 }
   1859 
   1860 
   1861 void InstructionSelector::VisitInt32LessThan(Node* node) {
   1862   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
   1863   VisitWord32Compare(this, node, &cont);
   1864 }
   1865 
   1866 
   1867 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
   1868   FlagsContinuation cont =
   1869       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
   1870   VisitWord32Compare(this, node, &cont);
   1871 }
   1872 
   1873 
   1874 void InstructionSelector::VisitUint32LessThan(Node* node) {
   1875   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
   1876   VisitWord32Compare(this, node, &cont);
   1877 }
   1878 
   1879 
   1880 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
   1881   FlagsContinuation cont =
   1882       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
   1883   VisitWord32Compare(this, node, &cont);
   1884 }
   1885 
   1886 
   1887 #if V8_TARGET_ARCH_PPC64
   1888 void InstructionSelector::VisitWord64Equal(Node* const node) {
   1889   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
   1890   Int64BinopMatcher m(node);
   1891   if (m.right().Is(0)) {
   1892     return VisitWord64CompareZero(this, m.node(), m.left().node(), &cont);
   1893   }
   1894   VisitWord64Compare(this, node, &cont);
   1895 }
   1896 
   1897 
   1898 void InstructionSelector::VisitInt64LessThan(Node* node) {
   1899   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
   1900   VisitWord64Compare(this, node, &cont);
   1901 }
   1902 
   1903 
   1904 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
   1905   FlagsContinuation cont =
   1906       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
   1907   VisitWord64Compare(this, node, &cont);
   1908 }
   1909 
   1910 
   1911 void InstructionSelector::VisitUint64LessThan(Node* node) {
   1912   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
   1913   VisitWord64Compare(this, node, &cont);
   1914 }
   1915 
   1916 
   1917 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
   1918   FlagsContinuation cont =
   1919       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
   1920   VisitWord64Compare(this, node, &cont);
   1921 }
   1922 #endif
   1923 
   1924 void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
   1925   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
   1926     FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, ovf);
   1927     return EmitInt32MulWithOverflow(this, node, &cont);
   1928   }
   1929   FlagsContinuation cont;
   1930   EmitInt32MulWithOverflow(this, node, &cont);
   1931 }
   1932 
   1933 
   1934 void InstructionSelector::VisitFloat32Equal(Node* node) {
   1935   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
   1936   VisitFloat32Compare(this, node, &cont);
   1937 }
   1938 
   1939 
   1940 void InstructionSelector::VisitFloat32LessThan(Node* node) {
   1941   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
   1942   VisitFloat32Compare(this, node, &cont);
   1943 }
   1944 
   1945 
   1946 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
   1947   FlagsContinuation cont =
   1948       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
   1949   VisitFloat32Compare(this, node, &cont);
   1950 }
   1951 
   1952 
   1953 void InstructionSelector::VisitFloat64Equal(Node* node) {
   1954   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
   1955   VisitFloat64Compare(this, node, &cont);
   1956 }
   1957 
   1958 
   1959 void InstructionSelector::VisitFloat64LessThan(Node* node) {
   1960   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
   1961   VisitFloat64Compare(this, node, &cont);
   1962 }
   1963 
   1964 
   1965 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
   1966   FlagsContinuation cont =
   1967       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
   1968   VisitFloat64Compare(this, node, &cont);
   1969 }
   1970 
   1971 
   1972 void InstructionSelector::EmitPrepareArguments(
   1973     ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
   1974     Node* node) {
   1975   PPCOperandGenerator g(this);
   1976 
   1977   // Prepare for C function call.
   1978   if (descriptor->IsCFunctionCall()) {
   1979     Emit(kArchPrepareCallCFunction |
   1980              MiscField::encode(static_cast<int>(descriptor->ParameterCount())),
   1981          0, nullptr, 0, nullptr);
   1982 
   1983     // Poke any stack arguments.
   1984     int slot = kStackFrameExtraParamSlot;
   1985     for (PushParameter input : (*arguments)) {
   1986       Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
   1987            g.TempImmediate(slot));
   1988       ++slot;
   1989     }
   1990   } else {
   1991     // Push any stack arguments.
   1992     int num_slots = static_cast<int>(descriptor->StackParameterCount());
   1993     int slot = 0;
   1994     for (PushParameter input : (*arguments)) {
   1995       if (slot == 0) {
   1996         DCHECK(input.node());
   1997         Emit(kPPC_PushFrame, g.NoOutput(), g.UseRegister(input.node()),
   1998              g.TempImmediate(num_slots));
   1999       } else {
   2000         // Skip any alignment holes in pushed nodes.
   2001         if (input.node()) {
   2002           Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
   2003                g.TempImmediate(slot));
   2004         }
   2005       }
   2006       ++slot;
   2007     }
   2008   }
   2009 }
   2010 
   2011 
   2012 bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
   2013 
   2014 int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
   2015 
   2016 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
   2017   PPCOperandGenerator g(this);
   2018   Emit(kPPC_DoubleExtractLowWord32, g.DefineAsRegister(node),
   2019        g.UseRegister(node->InputAt(0)));
   2020 }
   2021 
   2022 
   2023 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
   2024   PPCOperandGenerator g(this);
   2025   Emit(kPPC_DoubleExtractHighWord32, g.DefineAsRegister(node),
   2026        g.UseRegister(node->InputAt(0)));
   2027 }
   2028 
   2029 
   2030 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
   2031   PPCOperandGenerator g(this);
   2032   Node* left = node->InputAt(0);
   2033   Node* right = node->InputAt(1);
   2034   if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
   2035       CanCover(node, left)) {
   2036     left = left->InputAt(1);
   2037     Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
   2038          g.UseRegister(right));
   2039     return;
   2040   }
   2041   Emit(kPPC_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
   2042        g.UseRegister(left), g.UseRegister(right));
   2043 }
   2044 
   2045 
   2046 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
   2047   PPCOperandGenerator g(this);
   2048   Node* left = node->InputAt(0);
   2049   Node* right = node->InputAt(1);
   2050   if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
   2051       CanCover(node, left)) {
   2052     left = left->InputAt(1);
   2053     Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
   2054          g.UseRegister(left));
   2055     return;
   2056   }
   2057   Emit(kPPC_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
   2058        g.UseRegister(left), g.UseRegister(right));
   2059 }
   2060 
   2061 void InstructionSelector::VisitAtomicLoad(Node* node) {
   2062   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
   2063   PPCOperandGenerator g(this);
   2064   Node* base = node->InputAt(0);
   2065   Node* index = node->InputAt(1);
   2066   ArchOpcode opcode = kArchNop;
   2067   switch (load_rep.representation()) {
   2068     case MachineRepresentation::kWord8:
   2069       opcode = load_rep.IsSigned() ? kAtomicLoadInt8 : kAtomicLoadUint8;
   2070       break;
   2071     case MachineRepresentation::kWord16:
   2072       opcode = load_rep.IsSigned() ? kAtomicLoadInt16 : kAtomicLoadUint16;
   2073       break;
   2074     case MachineRepresentation::kWord32:
   2075       opcode = kAtomicLoadWord32;
   2076       break;
   2077     default:
   2078       UNREACHABLE();
   2079       return;
   2080   }
   2081   Emit(opcode | AddressingModeField::encode(kMode_MRR),
   2082       g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
   2083 }
   2084 
   2085 void InstructionSelector::VisitAtomicStore(Node* node) {
   2086   MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
   2087   PPCOperandGenerator g(this);
   2088   Node* base = node->InputAt(0);
   2089   Node* index = node->InputAt(1);
   2090   Node* value = node->InputAt(2);
   2091   ArchOpcode opcode = kArchNop;
   2092   switch (rep) {
   2093     case MachineRepresentation::kWord8:
   2094       opcode = kAtomicStoreWord8;
   2095       break;
   2096     case MachineRepresentation::kWord16:
   2097       opcode = kAtomicStoreWord16;
   2098       break;
   2099     case MachineRepresentation::kWord32:
   2100       opcode = kAtomicStoreWord32;
   2101       break;
   2102     default:
   2103       UNREACHABLE();
   2104       return;
   2105   }
   2106 
   2107   InstructionOperand inputs[4];
   2108   size_t input_count = 0;
   2109   inputs[input_count++] = g.UseUniqueRegister(base);
   2110   inputs[input_count++] = g.UseUniqueRegister(index);
   2111   inputs[input_count++] = g.UseUniqueRegister(value);
   2112   Emit(opcode | AddressingModeField::encode(kMode_MRR),
   2113       0, nullptr, input_count, inputs);
   2114 }
   2115 
   2116 // static
   2117 MachineOperatorBuilder::Flags
   2118 InstructionSelector::SupportedMachineOperatorFlags() {
   2119   return MachineOperatorBuilder::kFloat32RoundDown |
   2120          MachineOperatorBuilder::kFloat64RoundDown |
   2121          MachineOperatorBuilder::kFloat32RoundUp |
   2122          MachineOperatorBuilder::kFloat64RoundUp |
   2123          MachineOperatorBuilder::kFloat32RoundTruncate |
   2124          MachineOperatorBuilder::kFloat64RoundTruncate |
   2125          MachineOperatorBuilder::kFloat64RoundTiesAway |
   2126          MachineOperatorBuilder::kWord32Popcnt |
   2127          MachineOperatorBuilder::kWord64Popcnt;
   2128   // We omit kWord32ShiftIsSafe as s[rl]w use 0x3f as a mask rather than 0x1f.
   2129 }
   2130 
   2131 // static
   2132 MachineOperatorBuilder::AlignmentRequirements
   2133 InstructionSelector::AlignmentRequirements() {
   2134   return MachineOperatorBuilder::AlignmentRequirements::
   2135       FullUnalignedAccessSupport();
   2136 }
   2137 
   2138 }  // namespace compiler
   2139 }  // namespace internal
   2140 }  // namespace v8
   2141