Home | History | Annotate | Download | only in ppc
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/compiler/code-generator.h"
      6 
      7 #include "src/ast/scopes.h"
      8 #include "src/compiler/code-generator-impl.h"
      9 #include "src/compiler/gap-resolver.h"
     10 #include "src/compiler/node-matchers.h"
     11 #include "src/compiler/osr.h"
     12 #include "src/ppc/macro-assembler-ppc.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 namespace compiler {
     17 
     18 #define __ masm()->
     19 
     20 
     21 #define kScratchReg r11
     22 
     23 
     24 // Adds PPC-specific methods to convert InstructionOperands.
     25 class PPCOperandConverter final : public InstructionOperandConverter {
     26  public:
     27   PPCOperandConverter(CodeGenerator* gen, Instruction* instr)
     28       : InstructionOperandConverter(gen, instr) {}
     29 
     30   size_t OutputCount() { return instr_->OutputCount(); }
     31 
     32   RCBit OutputRCBit() const {
     33     switch (instr_->flags_mode()) {
     34       case kFlags_branch:
     35       case kFlags_deoptimize:
     36       case kFlags_set:
     37         return SetRC;
     38       case kFlags_none:
     39         return LeaveRC;
     40     }
     41     UNREACHABLE();
     42     return LeaveRC;
     43   }
     44 
     45   bool CompareLogical() const {
     46     switch (instr_->flags_condition()) {
     47       case kUnsignedLessThan:
     48       case kUnsignedGreaterThanOrEqual:
     49       case kUnsignedLessThanOrEqual:
     50       case kUnsignedGreaterThan:
     51         return true;
     52       default:
     53         return false;
     54     }
     55     UNREACHABLE();
     56     return false;
     57   }
     58 
     59   Operand InputImmediate(size_t index) {
     60     Constant constant = ToConstant(instr_->InputAt(index));
     61     switch (constant.type()) {
     62       case Constant::kInt32:
     63         return Operand(constant.ToInt32());
     64       case Constant::kFloat32:
     65         return Operand(
     66             isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
     67       case Constant::kFloat64:
     68         return Operand(
     69             isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
     70       case Constant::kInt64:
     71 #if V8_TARGET_ARCH_PPC64
     72         return Operand(constant.ToInt64());
     73 #endif
     74       case Constant::kExternalReference:
     75       case Constant::kHeapObject:
     76       case Constant::kRpoNumber:
     77         break;
     78     }
     79     UNREACHABLE();
     80     return Operand::Zero();
     81   }
     82 
     83   MemOperand MemoryOperand(AddressingMode* mode, size_t* first_index) {
     84     const size_t index = *first_index;
     85     *mode = AddressingModeField::decode(instr_->opcode());
     86     switch (*mode) {
     87       case kMode_None:
     88         break;
     89       case kMode_MRI:
     90         *first_index += 2;
     91         return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
     92       case kMode_MRR:
     93         *first_index += 2;
     94         return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
     95     }
     96     UNREACHABLE();
     97     return MemOperand(r0);
     98   }
     99 
    100   MemOperand MemoryOperand(AddressingMode* mode, size_t first_index = 0) {
    101     return MemoryOperand(mode, &first_index);
    102   }
    103 
    104   MemOperand ToMemOperand(InstructionOperand* op) const {
    105     DCHECK_NOT_NULL(op);
    106     DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
    107     return SlotToMemOperand(AllocatedOperand::cast(op)->index());
    108   }
    109 
    110   MemOperand SlotToMemOperand(int slot) const {
    111     FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
    112     return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
    113   }
    114 };
    115 
    116 
    117 static inline bool HasRegisterInput(Instruction* instr, size_t index) {
    118   return instr->InputAt(index)->IsRegister();
    119 }
    120 
    121 
    122 namespace {
    123 
    124 class OutOfLineLoadNAN32 final : public OutOfLineCode {
    125  public:
    126   OutOfLineLoadNAN32(CodeGenerator* gen, DoubleRegister result)
    127       : OutOfLineCode(gen), result_(result) {}
    128 
    129   void Generate() final {
    130     __ LoadDoubleLiteral(result_, std::numeric_limits<float>::quiet_NaN(),
    131                          kScratchReg);
    132   }
    133 
    134  private:
    135   DoubleRegister const result_;
    136 };
    137 
    138 
    139 class OutOfLineLoadNAN64 final : public OutOfLineCode {
    140  public:
    141   OutOfLineLoadNAN64(CodeGenerator* gen, DoubleRegister result)
    142       : OutOfLineCode(gen), result_(result) {}
    143 
    144   void Generate() final {
    145     __ LoadDoubleLiteral(result_, std::numeric_limits<double>::quiet_NaN(),
    146                          kScratchReg);
    147   }
    148 
    149  private:
    150   DoubleRegister const result_;
    151 };
    152 
    153 
    154 class OutOfLineLoadZero final : public OutOfLineCode {
    155  public:
    156   OutOfLineLoadZero(CodeGenerator* gen, Register result)
    157       : OutOfLineCode(gen), result_(result) {}
    158 
    159   void Generate() final { __ li(result_, Operand::Zero()); }
    160 
    161  private:
    162   Register const result_;
    163 };
    164 
    165 
    166 class OutOfLineRecordWrite final : public OutOfLineCode {
    167  public:
    168   OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register offset,
    169                        Register value, Register scratch0, Register scratch1,
    170                        RecordWriteMode mode)
    171       : OutOfLineCode(gen),
    172         object_(object),
    173         offset_(offset),
    174         offset_immediate_(0),
    175         value_(value),
    176         scratch0_(scratch0),
    177         scratch1_(scratch1),
    178         mode_(mode),
    179         must_save_lr_(!gen->frame_access_state()->has_frame()) {}
    180 
    181   OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t offset,
    182                        Register value, Register scratch0, Register scratch1,
    183                        RecordWriteMode mode)
    184       : OutOfLineCode(gen),
    185         object_(object),
    186         offset_(no_reg),
    187         offset_immediate_(offset),
    188         value_(value),
    189         scratch0_(scratch0),
    190         scratch1_(scratch1),
    191         mode_(mode),
    192         must_save_lr_(!gen->frame_access_state()->has_frame()) {}
    193 
    194   void Generate() final {
    195     if (mode_ > RecordWriteMode::kValueIsPointer) {
    196       __ JumpIfSmi(value_, exit());
    197     }
    198     __ CheckPageFlag(value_, scratch0_,
    199                      MemoryChunk::kPointersToHereAreInterestingMask, eq,
    200                      exit());
    201     RememberedSetAction const remembered_set_action =
    202         mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
    203                                              : OMIT_REMEMBERED_SET;
    204     SaveFPRegsMode const save_fp_mode =
    205         frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
    206     if (must_save_lr_) {
    207       // We need to save and restore lr if the frame was elided.
    208       __ mflr(scratch1_);
    209       __ Push(scratch1_);
    210     }
    211     RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
    212                          remembered_set_action, save_fp_mode);
    213     if (offset_.is(no_reg)) {
    214       __ addi(scratch1_, object_, Operand(offset_immediate_));
    215     } else {
    216       DCHECK_EQ(0, offset_immediate_);
    217       __ add(scratch1_, object_, offset_);
    218     }
    219     if (must_save_lr_ && FLAG_enable_embedded_constant_pool) {
    220       ConstantPoolUnavailableScope constant_pool_unavailable(masm());
    221       __ CallStub(&stub);
    222     } else {
    223       __ CallStub(&stub);
    224     }
    225     if (must_save_lr_) {
    226       // We need to save and restore lr if the frame was elided.
    227       __ Pop(scratch1_);
    228       __ mtlr(scratch1_);
    229     }
    230   }
    231 
    232  private:
    233   Register const object_;
    234   Register const offset_;
    235   int32_t const offset_immediate_;  // Valid if offset_.is(no_reg).
    236   Register const value_;
    237   Register const scratch0_;
    238   Register const scratch1_;
    239   RecordWriteMode const mode_;
    240   bool must_save_lr_;
    241 };
    242 
    243 
    244 Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) {
    245   switch (condition) {
    246     case kEqual:
    247       return eq;
    248     case kNotEqual:
    249       return ne;
    250     case kSignedLessThan:
    251     case kUnsignedLessThan:
    252       return lt;
    253     case kSignedGreaterThanOrEqual:
    254     case kUnsignedGreaterThanOrEqual:
    255       return ge;
    256     case kSignedLessThanOrEqual:
    257     case kUnsignedLessThanOrEqual:
    258       return le;
    259     case kSignedGreaterThan:
    260     case kUnsignedGreaterThan:
    261       return gt;
    262     case kOverflow:
    263       // Overflow checked for add/sub only.
    264       switch (op) {
    265 #if V8_TARGET_ARCH_PPC64
    266         case kPPC_Add:
    267         case kPPC_Sub:
    268 #endif
    269         case kPPC_AddWithOverflow32:
    270         case kPPC_SubWithOverflow32:
    271           return lt;
    272         default:
    273           break;
    274       }
    275       break;
    276     case kNotOverflow:
    277       switch (op) {
    278 #if V8_TARGET_ARCH_PPC64
    279         case kPPC_Add:
    280         case kPPC_Sub:
    281 #endif
    282         case kPPC_AddWithOverflow32:
    283         case kPPC_SubWithOverflow32:
    284           return ge;
    285         default:
    286           break;
    287       }
    288       break;
    289     default:
    290       break;
    291   }
    292   UNREACHABLE();
    293   return kNoCondition;
    294 }
    295 
    296 }  // namespace
    297 
    298 #define ASSEMBLE_FLOAT_UNOP_RC(asm_instr, round)                     \
    299   do {                                                               \
    300     __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
    301                  i.OutputRCBit());                                   \
    302     if (round) {                                                     \
    303       __ frsp(i.OutputDoubleRegister(), i.OutputDoubleRegister());   \
    304     }                                                                \
    305   } while (0)
    306 
    307 #define ASSEMBLE_FLOAT_BINOP_RC(asm_instr, round)                    \
    308   do {                                                               \
    309     __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
    310                  i.InputDoubleRegister(1), i.OutputRCBit());         \
    311     if (round) {                                                     \
    312       __ frsp(i.OutputDoubleRegister(), i.OutputDoubleRegister());   \
    313     }                                                                \
    314   } while (0)
    315 
    316 #define ASSEMBLE_BINOP(asm_instr_reg, asm_instr_imm)           \
    317   do {                                                         \
    318     if (HasRegisterInput(instr, 1)) {                          \
    319       __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
    320                        i.InputRegister(1));                    \
    321     } else {                                                   \
    322       __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
    323                        i.InputImmediate(1));                   \
    324     }                                                          \
    325   } while (0)
    326 
    327 
    328 #define ASSEMBLE_BINOP_RC(asm_instr_reg, asm_instr_imm)        \
    329   do {                                                         \
    330     if (HasRegisterInput(instr, 1)) {                          \
    331       __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
    332                        i.InputRegister(1), i.OutputRCBit());   \
    333     } else {                                                   \
    334       __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
    335                        i.InputImmediate(1), i.OutputRCBit());  \
    336     }                                                          \
    337   } while (0)
    338 
    339 
    340 #define ASSEMBLE_BINOP_INT_RC(asm_instr_reg, asm_instr_imm)    \
    341   do {                                                         \
    342     if (HasRegisterInput(instr, 1)) {                          \
    343       __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
    344                        i.InputRegister(1), i.OutputRCBit());   \
    345     } else {                                                   \
    346       __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
    347                        i.InputInt32(1), i.OutputRCBit());      \
    348     }                                                          \
    349   } while (0)
    350 
    351 
    352 #define ASSEMBLE_ADD_WITH_OVERFLOW()                                    \
    353   do {                                                                  \
    354     if (HasRegisterInput(instr, 1)) {                                   \
    355       __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
    356                                 i.InputRegister(1), kScratchReg, r0);   \
    357     } else {                                                            \
    358       __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
    359                                 i.InputInt32(1), kScratchReg, r0);      \
    360     }                                                                   \
    361   } while (0)
    362 
    363 
    364 #define ASSEMBLE_SUB_WITH_OVERFLOW()                                    \
    365   do {                                                                  \
    366     if (HasRegisterInput(instr, 1)) {                                   \
    367       __ SubAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
    368                                 i.InputRegister(1), kScratchReg, r0);   \
    369     } else {                                                            \
    370       __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
    371                                 -i.InputInt32(1), kScratchReg, r0);     \
    372     }                                                                   \
    373   } while (0)
    374 
    375 
    376 #if V8_TARGET_ARCH_PPC64
    377 #define ASSEMBLE_ADD_WITH_OVERFLOW32()         \
    378   do {                                         \
    379     ASSEMBLE_ADD_WITH_OVERFLOW();              \
    380     __ extsw(kScratchReg, kScratchReg, SetRC); \
    381   } while (0)
    382 
    383 #define ASSEMBLE_SUB_WITH_OVERFLOW32()         \
    384   do {                                         \
    385     ASSEMBLE_SUB_WITH_OVERFLOW();              \
    386     __ extsw(kScratchReg, kScratchReg, SetRC); \
    387   } while (0)
    388 #else
    389 #define ASSEMBLE_ADD_WITH_OVERFLOW32 ASSEMBLE_ADD_WITH_OVERFLOW
    390 #define ASSEMBLE_SUB_WITH_OVERFLOW32 ASSEMBLE_SUB_WITH_OVERFLOW
    391 #endif
    392 
    393 
    394 #define ASSEMBLE_COMPARE(cmp_instr, cmpl_instr)                        \
    395   do {                                                                 \
    396     const CRegister cr = cr0;                                          \
    397     if (HasRegisterInput(instr, 1)) {                                  \
    398       if (i.CompareLogical()) {                                        \
    399         __ cmpl_instr(i.InputRegister(0), i.InputRegister(1), cr);     \
    400       } else {                                                         \
    401         __ cmp_instr(i.InputRegister(0), i.InputRegister(1), cr);      \
    402       }                                                                \
    403     } else {                                                           \
    404       if (i.CompareLogical()) {                                        \
    405         __ cmpl_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \
    406       } else {                                                         \
    407         __ cmp_instr##i(i.InputRegister(0), i.InputImmediate(1), cr);  \
    408       }                                                                \
    409     }                                                                  \
    410     DCHECK_EQ(SetRC, i.OutputRCBit());                                 \
    411   } while (0)
    412 
    413 
    414 #define ASSEMBLE_FLOAT_COMPARE(cmp_instr)                                 \
    415   do {                                                                    \
    416     const CRegister cr = cr0;                                             \
    417     __ cmp_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1), cr); \
    418     DCHECK_EQ(SetRC, i.OutputRCBit());                                    \
    419   } while (0)
    420 
    421 
    422 #define ASSEMBLE_MODULO(div_instr, mul_instr)                        \
    423   do {                                                               \
    424     const Register scratch = kScratchReg;                            \
    425     __ div_instr(scratch, i.InputRegister(0), i.InputRegister(1));   \
    426     __ mul_instr(scratch, scratch, i.InputRegister(1));              \
    427     __ sub(i.OutputRegister(), i.InputRegister(0), scratch, LeaveOE, \
    428            i.OutputRCBit());                                         \
    429   } while (0)
    430 
    431 
    432 #define ASSEMBLE_FLOAT_MODULO()                                               \
    433   do {                                                                        \
    434     FrameScope scope(masm(), StackFrame::MANUAL);                             \
    435     __ PrepareCallCFunction(0, 2, kScratchReg);                               \
    436     __ MovToFloatParameters(i.InputDoubleRegister(0),                         \
    437                             i.InputDoubleRegister(1));                        \
    438     __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()), \
    439                      0, 2);                                                   \
    440     __ MovFromFloatResult(i.OutputDoubleRegister());                          \
    441     DCHECK_EQ(LeaveRC, i.OutputRCBit());                                      \
    442   } while (0)
    443 
    444 #define ASSEMBLE_IEEE754_UNOP(name)                                            \
    445   do {                                                                         \
    446     /* TODO(bmeurer): We should really get rid of this special instruction, */ \
    447     /* and generate a CallAddress instruction instead. */                      \
    448     FrameScope scope(masm(), StackFrame::MANUAL);                              \
    449     __ PrepareCallCFunction(0, 1, kScratchReg);                                \
    450     __ MovToFloatParameter(i.InputDoubleRegister(0));                          \
    451     __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()),  \
    452                      0, 1);                                                    \
    453     /* Move the result in the double result register. */                       \
    454     __ MovFromFloatResult(i.OutputDoubleRegister());                           \
    455     DCHECK_EQ(LeaveRC, i.OutputRCBit());                                       \
    456   } while (0)
    457 
    458 #define ASSEMBLE_IEEE754_BINOP(name)                                           \
    459   do {                                                                         \
    460     /* TODO(bmeurer): We should really get rid of this special instruction, */ \
    461     /* and generate a CallAddress instruction instead. */                      \
    462     FrameScope scope(masm(), StackFrame::MANUAL);                              \
    463     __ PrepareCallCFunction(0, 2, kScratchReg);                                \
    464     __ MovToFloatParameters(i.InputDoubleRegister(0),                          \
    465                            i.InputDoubleRegister(1));                          \
    466     __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()),  \
    467                      0, 2);                                                    \
    468     /* Move the result in the double result register. */                       \
    469     __ MovFromFloatResult(i.OutputDoubleRegister());                           \
    470     DCHECK_EQ(LeaveRC, i.OutputRCBit());                                       \
    471   } while (0)
    472 
    473 #define ASSEMBLE_FLOAT_MAX(scratch_reg)                                       \
    474   do {                                                                        \
    475     __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
    476     __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(0),  \
    477             i.InputDoubleRegister(1));                                        \
    478   } while (0)
    479 
    480 
    481 #define ASSEMBLE_FLOAT_MIN(scratch_reg)                                       \
    482   do {                                                                        \
    483     __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
    484     __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(1),  \
    485             i.InputDoubleRegister(0));                                        \
    486   } while (0)
    487 
    488 
    489 #define ASSEMBLE_LOAD_FLOAT(asm_instr, asm_instrx)    \
    490   do {                                                \
    491     DoubleRegister result = i.OutputDoubleRegister(); \
    492     AddressingMode mode = kMode_None;                 \
    493     MemOperand operand = i.MemoryOperand(&mode);      \
    494     if (mode == kMode_MRI) {                          \
    495       __ asm_instr(result, operand);                  \
    496     } else {                                          \
    497       __ asm_instrx(result, operand);                 \
    498     }                                                 \
    499     DCHECK_EQ(LeaveRC, i.OutputRCBit());              \
    500   } while (0)
    501 
    502 
    503 #define ASSEMBLE_LOAD_INTEGER(asm_instr, asm_instrx) \
    504   do {                                               \
    505     Register result = i.OutputRegister();            \
    506     AddressingMode mode = kMode_None;                \
    507     MemOperand operand = i.MemoryOperand(&mode);     \
    508     if (mode == kMode_MRI) {                         \
    509       __ asm_instr(result, operand);                 \
    510     } else {                                         \
    511       __ asm_instrx(result, operand);                \
    512     }                                                \
    513     DCHECK_EQ(LeaveRC, i.OutputRCBit());             \
    514   } while (0)
    515 
    516 
    517 #define ASSEMBLE_STORE_FLOAT32()                         \
    518   do {                                                   \
    519     size_t index = 0;                                    \
    520     AddressingMode mode = kMode_None;                    \
    521     MemOperand operand = i.MemoryOperand(&mode, &index); \
    522     DoubleRegister value = i.InputDoubleRegister(index); \
    523     __ frsp(kScratchDoubleReg, value);                   \
    524     if (mode == kMode_MRI) {                             \
    525       __ stfs(kScratchDoubleReg, operand);               \
    526     } else {                                             \
    527       __ stfsx(kScratchDoubleReg, operand);              \
    528     }                                                    \
    529     DCHECK_EQ(LeaveRC, i.OutputRCBit());                 \
    530   } while (0)
    531 
    532 
    533 #define ASSEMBLE_STORE_DOUBLE()                          \
    534   do {                                                   \
    535     size_t index = 0;                                    \
    536     AddressingMode mode = kMode_None;                    \
    537     MemOperand operand = i.MemoryOperand(&mode, &index); \
    538     DoubleRegister value = i.InputDoubleRegister(index); \
    539     if (mode == kMode_MRI) {                             \
    540       __ stfd(value, operand);                           \
    541     } else {                                             \
    542       __ stfdx(value, operand);                          \
    543     }                                                    \
    544     DCHECK_EQ(LeaveRC, i.OutputRCBit());                 \
    545   } while (0)
    546 
    547 
    548 #define ASSEMBLE_STORE_INTEGER(asm_instr, asm_instrx)    \
    549   do {                                                   \
    550     size_t index = 0;                                    \
    551     AddressingMode mode = kMode_None;                    \
    552     MemOperand operand = i.MemoryOperand(&mode, &index); \
    553     Register value = i.InputRegister(index);             \
    554     if (mode == kMode_MRI) {                             \
    555       __ asm_instr(value, operand);                      \
    556     } else {                                             \
    557       __ asm_instrx(value, operand);                     \
    558     }                                                    \
    559     DCHECK_EQ(LeaveRC, i.OutputRCBit());                 \
    560   } while (0)
    561 
    562 #if V8_TARGET_ARCH_PPC64
    563 // TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
    564 #define CleanUInt32(x) __ ClearLeftImm(x, x, Operand(32))
    565 #else
    566 #define CleanUInt32(x)
    567 #endif
    568 
    569 #define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, asm_instrx, width)  \
    570   do {                                                             \
    571     DoubleRegister result = i.OutputDoubleRegister();              \
    572     size_t index = 0;                                              \
    573     AddressingMode mode = kMode_None;                              \
    574     MemOperand operand = i.MemoryOperand(&mode, index);            \
    575     DCHECK_EQ(kMode_MRR, mode);                                    \
    576     Register offset = operand.rb();                                \
    577     if (HasRegisterInput(instr, 2)) {                              \
    578       __ cmplw(offset, i.InputRegister(2));                        \
    579     } else {                                                       \
    580       __ cmplwi(offset, i.InputImmediate(2));                      \
    581     }                                                              \
    582     auto ool = new (zone()) OutOfLineLoadNAN##width(this, result); \
    583     __ bge(ool->entry());                                          \
    584     if (mode == kMode_MRI) {                                       \
    585       __ asm_instr(result, operand);                               \
    586     } else {                                                       \
    587       CleanUInt32(offset);                                         \
    588       __ asm_instrx(result, operand);                              \
    589     }                                                              \
    590     __ bind(ool->exit());                                          \
    591     DCHECK_EQ(LeaveRC, i.OutputRCBit());                           \
    592   } while (0)
    593 
    594 #define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr, asm_instrx) \
    595   do {                                                       \
    596     Register result = i.OutputRegister();                    \
    597     size_t index = 0;                                        \
    598     AddressingMode mode = kMode_None;                        \
    599     MemOperand operand = i.MemoryOperand(&mode, index);      \
    600     DCHECK_EQ(kMode_MRR, mode);                              \
    601     Register offset = operand.rb();                          \
    602     if (HasRegisterInput(instr, 2)) {                        \
    603       __ cmplw(offset, i.InputRegister(2));                  \
    604     } else {                                                 \
    605       __ cmplwi(offset, i.InputImmediate(2));                \
    606     }                                                        \
    607     auto ool = new (zone()) OutOfLineLoadZero(this, result); \
    608     __ bge(ool->entry());                                    \
    609     if (mode == kMode_MRI) {                                 \
    610       __ asm_instr(result, operand);                         \
    611     } else {                                                 \
    612       CleanUInt32(offset);                                   \
    613       __ asm_instrx(result, operand);                        \
    614     }                                                        \
    615     __ bind(ool->exit());                                    \
    616     DCHECK_EQ(LeaveRC, i.OutputRCBit());                     \
    617   } while (0)
    618 
    619 #define ASSEMBLE_CHECKED_STORE_FLOAT32()                \
    620   do {                                                  \
    621     Label done;                                         \
    622     size_t index = 0;                                   \
    623     AddressingMode mode = kMode_None;                   \
    624     MemOperand operand = i.MemoryOperand(&mode, index); \
    625     DCHECK_EQ(kMode_MRR, mode);                         \
    626     Register offset = operand.rb();                     \
    627     if (HasRegisterInput(instr, 2)) {                   \
    628       __ cmplw(offset, i.InputRegister(2));             \
    629     } else {                                            \
    630       __ cmplwi(offset, i.InputImmediate(2));           \
    631     }                                                   \
    632     __ bge(&done);                                      \
    633     DoubleRegister value = i.InputDoubleRegister(3);    \
    634     __ frsp(kScratchDoubleReg, value);                  \
    635     if (mode == kMode_MRI) {                            \
    636       __ stfs(kScratchDoubleReg, operand);              \
    637     } else {                                            \
    638       CleanUInt32(offset);                              \
    639       __ stfsx(kScratchDoubleReg, operand);             \
    640     }                                                   \
    641     __ bind(&done);                                     \
    642     DCHECK_EQ(LeaveRC, i.OutputRCBit());                \
    643   } while (0)
    644 
    645 #define ASSEMBLE_CHECKED_STORE_DOUBLE()                 \
    646   do {                                                  \
    647     Label done;                                         \
    648     size_t index = 0;                                   \
    649     AddressingMode mode = kMode_None;                   \
    650     MemOperand operand = i.MemoryOperand(&mode, index); \
    651     DCHECK_EQ(kMode_MRR, mode);                         \
    652     Register offset = operand.rb();                     \
    653     if (HasRegisterInput(instr, 2)) {                   \
    654       __ cmplw(offset, i.InputRegister(2));             \
    655     } else {                                            \
    656       __ cmplwi(offset, i.InputImmediate(2));           \
    657     }                                                   \
    658     __ bge(&done);                                      \
    659     DoubleRegister value = i.InputDoubleRegister(3);    \
    660     if (mode == kMode_MRI) {                            \
    661       __ stfd(value, operand);                          \
    662     } else {                                            \
    663       CleanUInt32(offset);                              \
    664       __ stfdx(value, operand);                         \
    665     }                                                   \
    666     __ bind(&done);                                     \
    667     DCHECK_EQ(LeaveRC, i.OutputRCBit());                \
    668   } while (0)
    669 
    670 #define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr, asm_instrx) \
    671   do {                                                        \
    672     Label done;                                               \
    673     size_t index = 0;                                         \
    674     AddressingMode mode = kMode_None;                         \
    675     MemOperand operand = i.MemoryOperand(&mode, index);       \
    676     DCHECK_EQ(kMode_MRR, mode);                               \
    677     Register offset = operand.rb();                           \
    678     if (HasRegisterInput(instr, 2)) {                         \
    679       __ cmplw(offset, i.InputRegister(2));                   \
    680     } else {                                                  \
    681       __ cmplwi(offset, i.InputImmediate(2));                 \
    682     }                                                         \
    683     __ bge(&done);                                            \
    684     Register value = i.InputRegister(3);                      \
    685     if (mode == kMode_MRI) {                                  \
    686       __ asm_instr(value, operand);                           \
    687     } else {                                                  \
    688       CleanUInt32(offset);                                    \
    689       __ asm_instrx(value, operand);                          \
    690     }                                                         \
    691     __ bind(&done);                                           \
    692     DCHECK_EQ(LeaveRC, i.OutputRCBit());                      \
    693   } while (0)
    694 
    695 #define ASSEMBLE_ATOMIC_LOAD_INTEGER(asm_instr, asm_instrx)   \
    696   do {                                                        \
    697     Label done;                                               \
    698     Register result = i.OutputRegister();                     \
    699     AddressingMode mode = kMode_None;                         \
    700     MemOperand operand = i.MemoryOperand(&mode);              \
    701     __ sync();                                                \
    702     if (mode == kMode_MRI) {                                  \
    703     __ asm_instr(result, operand);                            \
    704     } else {                                                  \
    705     __ asm_instrx(result, operand);                           \
    706     }                                                         \
    707     __ bind(&done);                                           \
    708     __ cmp(result, result);                                   \
    709     __ bne(&done);                                            \
    710     __ isync();                                               \
    711   } while (0)
    712 #define ASSEMBLE_ATOMIC_STORE_INTEGER(asm_instr, asm_instrx)  \
    713   do {                                                        \
    714     size_t index = 0;                                         \
    715     AddressingMode mode = kMode_None;                         \
    716     MemOperand operand = i.MemoryOperand(&mode, &index);      \
    717     Register value = i.InputRegister(index);                  \
    718     __ sync();                                                \
    719     if (mode == kMode_MRI) {                                  \
    720       __ asm_instr(value, operand);                           \
    721     } else {                                                  \
    722       __ asm_instrx(value, operand);                          \
    723     }                                                         \
    724     DCHECK_EQ(LeaveRC, i.OutputRCBit());                      \
    725   } while (0)
    726 
    727 void CodeGenerator::AssembleDeconstructFrame() {
    728   __ LeaveFrame(StackFrame::MANUAL);
    729 }
    730 
    731 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
    732   int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
    733   if (sp_slot_delta > 0) {
    734     __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
    735   }
    736   frame_access_state()->SetFrameAccessToDefault();
    737 }
    738 
    739 
    740 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
    741   int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
    742   if (sp_slot_delta < 0) {
    743     __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
    744     frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
    745   }
    746   if (frame_access_state()->has_frame()) {
    747     __ RestoreFrameStateForTailCall();
    748   }
    749   frame_access_state()->SetFrameAccessToSP();
    750 }
    751 
    752 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
    753                                                      Register scratch1,
    754                                                      Register scratch2,
    755                                                      Register scratch3) {
    756   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
    757   Label done;
    758 
    759   // Check if current frame is an arguments adaptor frame.
    760   __ LoadP(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
    761   __ CmpSmiLiteral(scratch1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
    762   __ bne(&done);
    763 
    764   // Load arguments count from current arguments adaptor frame (note, it
    765   // does not include receiver).
    766   Register caller_args_count_reg = scratch1;
    767   __ LoadP(caller_args_count_reg,
    768            MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
    769   __ SmiUntag(caller_args_count_reg);
    770 
    771   ParameterCount callee_args_count(args_reg);
    772   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
    773                         scratch3);
    774   __ bind(&done);
    775 }
    776 
    777 // Assembles an instruction after register allocation, producing machine code.
    778 CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
    779     Instruction* instr) {
    780   PPCOperandConverter i(this, instr);
    781   ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode());
    782 
    783   switch (opcode) {
    784     case kArchCallCodeObject: {
    785       v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
    786           masm());
    787       EnsureSpaceForLazyDeopt();
    788       if (HasRegisterInput(instr, 0)) {
    789         __ addi(ip, i.InputRegister(0),
    790                 Operand(Code::kHeaderSize - kHeapObjectTag));
    791         __ Call(ip);
    792       } else {
    793         __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
    794                 RelocInfo::CODE_TARGET);
    795       }
    796       RecordCallPosition(instr);
    797       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    798       frame_access_state()->ClearSPDelta();
    799       break;
    800     }
    801     case kArchTailCallCodeObjectFromJSFunction:
    802     case kArchTailCallCodeObject: {
    803       int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
    804       AssembleDeconstructActivationRecord(stack_param_delta);
    805       if (opcode == kArchTailCallCodeObjectFromJSFunction) {
    806         AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
    807                                          i.TempRegister(0), i.TempRegister(1),
    808                                          i.TempRegister(2));
    809       }
    810       if (HasRegisterInput(instr, 0)) {
    811         __ addi(ip, i.InputRegister(0),
    812                 Operand(Code::kHeaderSize - kHeapObjectTag));
    813         __ Jump(ip);
    814       } else {
    815         // We cannot use the constant pool to load the target since
    816         // we've already restored the caller's frame.
    817         ConstantPoolUnavailableScope constant_pool_unavailable(masm());
    818         __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
    819                 RelocInfo::CODE_TARGET);
    820       }
    821       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    822       frame_access_state()->ClearSPDelta();
    823       break;
    824     }
    825     case kArchTailCallAddress: {
    826       int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
    827       AssembleDeconstructActivationRecord(stack_param_delta);
    828       CHECK(!instr->InputAt(0)->IsImmediate());
    829       __ Jump(i.InputRegister(0));
    830       frame_access_state()->ClearSPDelta();
    831       break;
    832     }
    833     case kArchCallJSFunction: {
    834       v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
    835           masm());
    836       EnsureSpaceForLazyDeopt();
    837       Register func = i.InputRegister(0);
    838       if (FLAG_debug_code) {
    839         // Check the function's context matches the context argument.
    840         __ LoadP(kScratchReg,
    841                  FieldMemOperand(func, JSFunction::kContextOffset));
    842         __ cmp(cp, kScratchReg);
    843         __ Assert(eq, kWrongFunctionContext);
    844       }
    845       __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
    846       __ Call(ip);
    847       RecordCallPosition(instr);
    848       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    849       frame_access_state()->ClearSPDelta();
    850       break;
    851     }
    852     case kArchTailCallJSFunctionFromJSFunction:
    853     case kArchTailCallJSFunction: {
    854       Register func = i.InputRegister(0);
    855       if (FLAG_debug_code) {
    856         // Check the function's context matches the context argument.
    857         __ LoadP(kScratchReg,
    858                  FieldMemOperand(func, JSFunction::kContextOffset));
    859         __ cmp(cp, kScratchReg);
    860         __ Assert(eq, kWrongFunctionContext);
    861       }
    862       int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
    863       AssembleDeconstructActivationRecord(stack_param_delta);
    864       if (opcode == kArchTailCallJSFunctionFromJSFunction) {
    865         AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
    866                                          i.TempRegister(0), i.TempRegister(1),
    867                                          i.TempRegister(2));
    868       }
    869       __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
    870       __ Jump(ip);
    871       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    872       frame_access_state()->ClearSPDelta();
    873       break;
    874     }
    875     case kArchPrepareCallCFunction: {
    876       int const num_parameters = MiscField::decode(instr->opcode());
    877       __ PrepareCallCFunction(num_parameters, kScratchReg);
    878       // Frame alignment requires using FP-relative frame addressing.
    879       frame_access_state()->SetFrameAccessToFP();
    880       break;
    881     }
    882     case kArchPrepareTailCall:
    883       AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
    884       break;
    885     case kArchCallCFunction: {
    886       int const num_parameters = MiscField::decode(instr->opcode());
    887       if (instr->InputAt(0)->IsImmediate()) {
    888         ExternalReference ref = i.InputExternalReference(0);
    889         __ CallCFunction(ref, num_parameters);
    890       } else {
    891         Register func = i.InputRegister(0);
    892         __ CallCFunction(func, num_parameters);
    893       }
    894       frame_access_state()->SetFrameAccessToDefault();
    895       frame_access_state()->ClearSPDelta();
    896       break;
    897     }
    898     case kArchJmp:
    899       AssembleArchJump(i.InputRpo(0));
    900       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    901       break;
    902     case kArchLookupSwitch:
    903       AssembleArchLookupSwitch(instr);
    904       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    905       break;
    906     case kArchTableSwitch:
    907       AssembleArchTableSwitch(instr);
    908       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    909       break;
    910     case kArchDebugBreak:
    911       __ stop("kArchDebugBreak");
    912       break;
    913     case kArchNop:
    914     case kArchThrowTerminator:
    915       // don't emit code for nops.
    916       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    917       break;
    918     case kArchDeoptimize: {
    919       int deopt_state_id =
    920           BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
    921       Deoptimizer::BailoutType bailout_type =
    922           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
    923       CodeGenResult result =
    924           AssembleDeoptimizerCall(deopt_state_id, bailout_type);
    925       if (result != kSuccess) return result;
    926       break;
    927     }
    928     case kArchRet:
    929       AssembleReturn();
    930       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    931       break;
    932     case kArchStackPointer:
    933       __ mr(i.OutputRegister(), sp);
    934       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    935       break;
    936     case kArchFramePointer:
    937       __ mr(i.OutputRegister(), fp);
    938       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    939       break;
    940     case kArchParentFramePointer:
    941       if (frame_access_state()->has_frame()) {
    942         __ LoadP(i.OutputRegister(), MemOperand(fp, 0));
    943       } else {
    944         __ mr(i.OutputRegister(), fp);
    945       }
    946       break;
    947     case kArchTruncateDoubleToI:
    948       // TODO(mbrandy): move slow call to stub out of line.
    949       __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
    950       DCHECK_EQ(LeaveRC, i.OutputRCBit());
    951       break;
    952     case kArchStoreWithWriteBarrier: {
    953       RecordWriteMode mode =
    954           static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
    955       Register object = i.InputRegister(0);
    956       Register value = i.InputRegister(2);
    957       Register scratch0 = i.TempRegister(0);
    958       Register scratch1 = i.TempRegister(1);
    959       OutOfLineRecordWrite* ool;
    960 
    961       AddressingMode addressing_mode =
    962           AddressingModeField::decode(instr->opcode());
    963       if (addressing_mode == kMode_MRI) {
    964         int32_t offset = i.InputInt32(1);
    965         ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
    966                                                 scratch0, scratch1, mode);
    967         __ StoreP(value, MemOperand(object, offset));
    968       } else {
    969         DCHECK_EQ(kMode_MRR, addressing_mode);
    970         Register offset(i.InputRegister(1));
    971         ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
    972                                                 scratch0, scratch1, mode);
    973         __ StorePX(value, MemOperand(object, offset));
    974       }
    975       __ CheckPageFlag(object, scratch0,
    976                        MemoryChunk::kPointersFromHereAreInterestingMask, ne,
    977                        ool->entry());
    978       __ bind(ool->exit());
    979       break;
    980     }
    981     case kArchStackSlot: {
    982       FrameOffset offset =
    983           frame_access_state()->GetFrameOffset(i.InputInt32(0));
    984       __ addi(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
    985               Operand(offset.offset()));
    986       break;
    987     }
    988     case kPPC_And:
    989       if (HasRegisterInput(instr, 1)) {
    990         __ and_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
    991                 i.OutputRCBit());
    992       } else {
    993         __ andi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
    994       }
    995       break;
    996     case kPPC_AndComplement:
    997       __ andc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
    998               i.OutputRCBit());
    999       break;
   1000     case kPPC_Or:
   1001       if (HasRegisterInput(instr, 1)) {
   1002         __ orx(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
   1003                i.OutputRCBit());
   1004       } else {
   1005         __ ori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
   1006         DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1007       }
   1008       break;
   1009     case kPPC_OrComplement:
   1010       __ orc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
   1011              i.OutputRCBit());
   1012       break;
   1013     case kPPC_Xor:
   1014       if (HasRegisterInput(instr, 1)) {
   1015         __ xor_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
   1016                 i.OutputRCBit());
   1017       } else {
   1018         __ xori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
   1019         DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1020       }
   1021       break;
   1022     case kPPC_ShiftLeft32:
   1023       ASSEMBLE_BINOP_RC(slw, slwi);
   1024       break;
   1025 #if V8_TARGET_ARCH_PPC64
   1026     case kPPC_ShiftLeft64:
   1027       ASSEMBLE_BINOP_RC(sld, sldi);
   1028       break;
   1029 #endif
   1030     case kPPC_ShiftRight32:
   1031       ASSEMBLE_BINOP_RC(srw, srwi);
   1032       break;
   1033 #if V8_TARGET_ARCH_PPC64
   1034     case kPPC_ShiftRight64:
   1035       ASSEMBLE_BINOP_RC(srd, srdi);
   1036       break;
   1037 #endif
   1038     case kPPC_ShiftRightAlg32:
   1039       ASSEMBLE_BINOP_INT_RC(sraw, srawi);
   1040       break;
   1041 #if V8_TARGET_ARCH_PPC64
   1042     case kPPC_ShiftRightAlg64:
   1043       ASSEMBLE_BINOP_INT_RC(srad, sradi);
   1044       break;
   1045 #endif
   1046 #if !V8_TARGET_ARCH_PPC64
   1047     case kPPC_AddPair:
   1048       // i.InputRegister(0) ... left low word.
   1049       // i.InputRegister(1) ... left high word.
   1050       // i.InputRegister(2) ... right low word.
   1051       // i.InputRegister(3) ... right high word.
   1052       __ addc(i.OutputRegister(0), i.InputRegister(0), i.InputRegister(2));
   1053       __ adde(i.OutputRegister(1), i.InputRegister(1), i.InputRegister(3));
   1054       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1055       break;
   1056     case kPPC_SubPair:
   1057       // i.InputRegister(0) ... left low word.
   1058       // i.InputRegister(1) ... left high word.
   1059       // i.InputRegister(2) ... right low word.
   1060       // i.InputRegister(3) ... right high word.
   1061       __ subc(i.OutputRegister(0), i.InputRegister(0), i.InputRegister(2));
   1062       __ sube(i.OutputRegister(1), i.InputRegister(1), i.InputRegister(3));
   1063       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1064       break;
   1065     case kPPC_MulPair:
   1066       // i.InputRegister(0) ... left low word.
   1067       // i.InputRegister(1) ... left high word.
   1068       // i.InputRegister(2) ... right low word.
   1069       // i.InputRegister(3) ... right high word.
   1070       __ mullw(i.TempRegister(0), i.InputRegister(0), i.InputRegister(3));
   1071       __ mullw(i.TempRegister(1), i.InputRegister(2), i.InputRegister(1));
   1072       __ add(i.TempRegister(0), i.TempRegister(0), i.TempRegister(1));
   1073       __ mullw(i.OutputRegister(0), i.InputRegister(0), i.InputRegister(2));
   1074       __ mulhwu(i.OutputRegister(1), i.InputRegister(0), i.InputRegister(2));
   1075       __ add(i.OutputRegister(1), i.OutputRegister(1), i.TempRegister(0));
   1076       break;
   1077     case kPPC_ShiftLeftPair:
   1078       if (instr->InputAt(2)->IsImmediate()) {
   1079         __ ShiftLeftPair(i.OutputRegister(0), i.OutputRegister(1),
   1080                          i.InputRegister(0), i.InputRegister(1),
   1081                          i.InputInt32(2));
   1082       } else {
   1083         __ ShiftLeftPair(i.OutputRegister(0), i.OutputRegister(1),
   1084                          i.InputRegister(0), i.InputRegister(1), kScratchReg,
   1085                          i.InputRegister(2));
   1086       }
   1087       break;
   1088     case kPPC_ShiftRightPair:
   1089       if (instr->InputAt(2)->IsImmediate()) {
   1090         __ ShiftRightPair(i.OutputRegister(0), i.OutputRegister(1),
   1091                           i.InputRegister(0), i.InputRegister(1),
   1092                           i.InputInt32(2));
   1093       } else {
   1094         __ ShiftRightPair(i.OutputRegister(0), i.OutputRegister(1),
   1095                           i.InputRegister(0), i.InputRegister(1), kScratchReg,
   1096                           i.InputRegister(2));
   1097       }
   1098       break;
   1099     case kPPC_ShiftRightAlgPair:
   1100       if (instr->InputAt(2)->IsImmediate()) {
   1101         __ ShiftRightAlgPair(i.OutputRegister(0), i.OutputRegister(1),
   1102                              i.InputRegister(0), i.InputRegister(1),
   1103                              i.InputInt32(2));
   1104       } else {
   1105         __ ShiftRightAlgPair(i.OutputRegister(0), i.OutputRegister(1),
   1106                              i.InputRegister(0), i.InputRegister(1),
   1107                              kScratchReg, i.InputRegister(2));
   1108       }
   1109       break;
   1110 #endif
   1111     case kPPC_RotRight32:
   1112       if (HasRegisterInput(instr, 1)) {
   1113         __ subfic(kScratchReg, i.InputRegister(1), Operand(32));
   1114         __ rotlw(i.OutputRegister(), i.InputRegister(0), kScratchReg,
   1115                  i.OutputRCBit());
   1116       } else {
   1117         int sh = i.InputInt32(1);
   1118         __ rotrwi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit());
   1119       }
   1120       break;
   1121 #if V8_TARGET_ARCH_PPC64
   1122     case kPPC_RotRight64:
   1123       if (HasRegisterInput(instr, 1)) {
   1124         __ subfic(kScratchReg, i.InputRegister(1), Operand(64));
   1125         __ rotld(i.OutputRegister(), i.InputRegister(0), kScratchReg,
   1126                  i.OutputRCBit());
   1127       } else {
   1128         int sh = i.InputInt32(1);
   1129         __ rotrdi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit());
   1130       }
   1131       break;
   1132 #endif
   1133     case kPPC_Not:
   1134       __ notx(i.OutputRegister(), i.InputRegister(0), i.OutputRCBit());
   1135       break;
   1136     case kPPC_RotLeftAndMask32:
   1137       __ rlwinm(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
   1138                 31 - i.InputInt32(2), 31 - i.InputInt32(3), i.OutputRCBit());
   1139       break;
   1140 #if V8_TARGET_ARCH_PPC64
   1141     case kPPC_RotLeftAndClear64:
   1142       __ rldic(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
   1143                63 - i.InputInt32(2), i.OutputRCBit());
   1144       break;
   1145     case kPPC_RotLeftAndClearLeft64:
   1146       __ rldicl(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
   1147                 63 - i.InputInt32(2), i.OutputRCBit());
   1148       break;
   1149     case kPPC_RotLeftAndClearRight64:
   1150       __ rldicr(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
   1151                 63 - i.InputInt32(2), i.OutputRCBit());
   1152       break;
   1153 #endif
   1154     case kPPC_Add:
   1155 #if V8_TARGET_ARCH_PPC64
   1156       if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
   1157         ASSEMBLE_ADD_WITH_OVERFLOW();
   1158       } else {
   1159 #endif
   1160         if (HasRegisterInput(instr, 1)) {
   1161           __ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
   1162                  LeaveOE, i.OutputRCBit());
   1163         } else {
   1164           __ addi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
   1165           DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1166         }
   1167 #if V8_TARGET_ARCH_PPC64
   1168       }
   1169 #endif
   1170       break;
   1171     case kPPC_AddWithOverflow32:
   1172       ASSEMBLE_ADD_WITH_OVERFLOW32();
   1173       break;
   1174     case kPPC_AddDouble:
   1175       ASSEMBLE_FLOAT_BINOP_RC(fadd, MiscField::decode(instr->opcode()));
   1176       break;
   1177     case kPPC_Sub:
   1178 #if V8_TARGET_ARCH_PPC64
   1179       if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
   1180         ASSEMBLE_SUB_WITH_OVERFLOW();
   1181       } else {
   1182 #endif
   1183         if (HasRegisterInput(instr, 1)) {
   1184           __ sub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
   1185                  LeaveOE, i.OutputRCBit());
   1186         } else {
   1187           __ subi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
   1188           DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1189         }
   1190 #if V8_TARGET_ARCH_PPC64
   1191       }
   1192 #endif
   1193       break;
   1194     case kPPC_SubWithOverflow32:
   1195       ASSEMBLE_SUB_WITH_OVERFLOW32();
   1196       break;
   1197     case kPPC_SubDouble:
   1198       ASSEMBLE_FLOAT_BINOP_RC(fsub, MiscField::decode(instr->opcode()));
   1199       break;
   1200     case kPPC_Mul32:
   1201       __ mullw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
   1202                LeaveOE, i.OutputRCBit());
   1203       break;
   1204 #if V8_TARGET_ARCH_PPC64
   1205     case kPPC_Mul64:
   1206       __ mulld(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
   1207                LeaveOE, i.OutputRCBit());
   1208       break;
   1209 #endif
   1210     case kPPC_MulHigh32:
   1211       __ mulhw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
   1212                i.OutputRCBit());
   1213       break;
   1214     case kPPC_MulHighU32:
   1215       __ mulhwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
   1216                 i.OutputRCBit());
   1217       break;
   1218     case kPPC_MulDouble:
   1219       ASSEMBLE_FLOAT_BINOP_RC(fmul, MiscField::decode(instr->opcode()));
   1220       break;
   1221     case kPPC_Div32:
   1222       __ divw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
   1223       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1224       break;
   1225 #if V8_TARGET_ARCH_PPC64
   1226     case kPPC_Div64:
   1227       __ divd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
   1228       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1229       break;
   1230 #endif
   1231     case kPPC_DivU32:
   1232       __ divwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
   1233       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1234       break;
   1235 #if V8_TARGET_ARCH_PPC64
   1236     case kPPC_DivU64:
   1237       __ divdu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
   1238       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1239       break;
   1240 #endif
   1241     case kPPC_DivDouble:
   1242       ASSEMBLE_FLOAT_BINOP_RC(fdiv, MiscField::decode(instr->opcode()));
   1243       break;
   1244     case kPPC_Mod32:
   1245       ASSEMBLE_MODULO(divw, mullw);
   1246       break;
   1247 #if V8_TARGET_ARCH_PPC64
   1248     case kPPC_Mod64:
   1249       ASSEMBLE_MODULO(divd, mulld);
   1250       break;
   1251 #endif
   1252     case kPPC_ModU32:
   1253       ASSEMBLE_MODULO(divwu, mullw);
   1254       break;
   1255 #if V8_TARGET_ARCH_PPC64
   1256     case kPPC_ModU64:
   1257       ASSEMBLE_MODULO(divdu, mulld);
   1258       break;
   1259 #endif
   1260     case kPPC_ModDouble:
   1261       // TODO(bmeurer): We should really get rid of this special instruction,
   1262       // and generate a CallAddress instruction instead.
   1263       ASSEMBLE_FLOAT_MODULO();
   1264       break;
   1265     case kIeee754Float64Atan:
   1266       ASSEMBLE_IEEE754_UNOP(atan);
   1267       break;
   1268     case kIeee754Float64Atan2:
   1269       ASSEMBLE_IEEE754_BINOP(atan2);
   1270       break;
   1271     case kIeee754Float64Tan:
   1272       ASSEMBLE_IEEE754_UNOP(tan);
   1273       break;
   1274     case kIeee754Float64Cbrt:
   1275       ASSEMBLE_IEEE754_UNOP(cbrt);
   1276       break;
   1277     case kIeee754Float64Sin:
   1278       ASSEMBLE_IEEE754_UNOP(sin);
   1279       break;
   1280     case kIeee754Float64Cos:
   1281       ASSEMBLE_IEEE754_UNOP(cos);
   1282       break;
   1283     case kIeee754Float64Exp:
   1284       ASSEMBLE_IEEE754_UNOP(exp);
   1285       break;
   1286     case kIeee754Float64Expm1:
   1287       ASSEMBLE_IEEE754_UNOP(expm1);
   1288       break;
   1289     case kIeee754Float64Atanh:
   1290       ASSEMBLE_IEEE754_UNOP(atanh);
   1291       break;
   1292     case kIeee754Float64Log:
   1293       ASSEMBLE_IEEE754_UNOP(log);
   1294       break;
   1295     case kIeee754Float64Log1p:
   1296       ASSEMBLE_IEEE754_UNOP(log1p);
   1297       break;
   1298     case kIeee754Float64Log2:
   1299       ASSEMBLE_IEEE754_UNOP(log2);
   1300       break;
   1301     case kIeee754Float64Log10:
   1302       ASSEMBLE_IEEE754_UNOP(log10);
   1303       break;
   1304     case kPPC_Neg:
   1305       __ neg(i.OutputRegister(), i.InputRegister(0), LeaveOE, i.OutputRCBit());
   1306       break;
   1307     case kPPC_MaxDouble:
   1308       ASSEMBLE_FLOAT_MAX(kScratchDoubleReg);
   1309       break;
   1310     case kPPC_MinDouble:
   1311       ASSEMBLE_FLOAT_MIN(kScratchDoubleReg);
   1312       break;
   1313     case kPPC_AbsDouble:
   1314       ASSEMBLE_FLOAT_UNOP_RC(fabs, 0);
   1315       break;
   1316     case kPPC_SqrtDouble:
   1317       ASSEMBLE_FLOAT_UNOP_RC(fsqrt, MiscField::decode(instr->opcode()));
   1318       break;
   1319     case kPPC_FloorDouble:
   1320       ASSEMBLE_FLOAT_UNOP_RC(frim, MiscField::decode(instr->opcode()));
   1321       break;
   1322     case kPPC_CeilDouble:
   1323       ASSEMBLE_FLOAT_UNOP_RC(frip, MiscField::decode(instr->opcode()));
   1324       break;
   1325     case kPPC_TruncateDouble:
   1326       ASSEMBLE_FLOAT_UNOP_RC(friz, MiscField::decode(instr->opcode()));
   1327       break;
   1328     case kPPC_RoundDouble:
   1329       ASSEMBLE_FLOAT_UNOP_RC(frin, MiscField::decode(instr->opcode()));
   1330       break;
   1331     case kPPC_NegDouble:
   1332       ASSEMBLE_FLOAT_UNOP_RC(fneg, 0);
   1333       break;
   1334     case kPPC_Cntlz32:
   1335       __ cntlzw_(i.OutputRegister(), i.InputRegister(0));
   1336       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1337       break;
   1338 #if V8_TARGET_ARCH_PPC64
   1339     case kPPC_Cntlz64:
   1340       __ cntlzd_(i.OutputRegister(), i.InputRegister(0));
   1341       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1342       break;
   1343 #endif
   1344     case kPPC_Popcnt32:
   1345       __ popcntw(i.OutputRegister(), i.InputRegister(0));
   1346       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1347       break;
   1348 #if V8_TARGET_ARCH_PPC64
   1349     case kPPC_Popcnt64:
   1350       __ popcntd(i.OutputRegister(), i.InputRegister(0));
   1351       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1352       break;
   1353 #endif
   1354     case kPPC_Cmp32:
   1355       ASSEMBLE_COMPARE(cmpw, cmplw);
   1356       break;
   1357 #if V8_TARGET_ARCH_PPC64
   1358     case kPPC_Cmp64:
   1359       ASSEMBLE_COMPARE(cmp, cmpl);
   1360       break;
   1361 #endif
   1362     case kPPC_CmpDouble:
   1363       ASSEMBLE_FLOAT_COMPARE(fcmpu);
   1364       break;
   1365     case kPPC_Tst32:
   1366       if (HasRegisterInput(instr, 1)) {
   1367         __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit());
   1368       } else {
   1369         __ andi(r0, i.InputRegister(0), i.InputImmediate(1));
   1370       }
   1371 #if V8_TARGET_ARCH_PPC64
   1372       __ extsw(r0, r0, i.OutputRCBit());
   1373 #endif
   1374       DCHECK_EQ(SetRC, i.OutputRCBit());
   1375       break;
   1376 #if V8_TARGET_ARCH_PPC64
   1377     case kPPC_Tst64:
   1378       if (HasRegisterInput(instr, 1)) {
   1379         __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit());
   1380       } else {
   1381         __ andi(r0, i.InputRegister(0), i.InputImmediate(1));
   1382       }
   1383       DCHECK_EQ(SetRC, i.OutputRCBit());
   1384       break;
   1385 #endif
   1386     case kPPC_Float64SilenceNaN: {
   1387       DoubleRegister value = i.InputDoubleRegister(0);
   1388       DoubleRegister result = i.OutputDoubleRegister();
   1389       __ CanonicalizeNaN(result, value);
   1390       break;
   1391     }
   1392     case kPPC_Push:
   1393       if (instr->InputAt(0)->IsFPRegister()) {
   1394         __ stfdu(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize));
   1395         frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
   1396       } else {
   1397         __ Push(i.InputRegister(0));
   1398         frame_access_state()->IncreaseSPDelta(1);
   1399       }
   1400       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1401       break;
   1402     case kPPC_PushFrame: {
   1403       int num_slots = i.InputInt32(1);
   1404       if (instr->InputAt(0)->IsFPRegister()) {
   1405         LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
   1406         if (op->representation() == MachineRepresentation::kFloat64) {
   1407           __ StoreDoubleU(i.InputDoubleRegister(0),
   1408                         MemOperand(sp, -num_slots * kPointerSize), r0);
   1409         } else {
   1410           DCHECK(op->representation() == MachineRepresentation::kFloat32);
   1411           __ StoreSingleU(i.InputDoubleRegister(0),
   1412                         MemOperand(sp, -num_slots * kPointerSize), r0);
   1413         }
   1414       } else {
   1415         __ StorePU(i.InputRegister(0),
   1416                    MemOperand(sp, -num_slots * kPointerSize), r0);
   1417       }
   1418       break;
   1419     }
   1420     case kPPC_StoreToStackSlot: {
   1421       int slot = i.InputInt32(1);
   1422       if (instr->InputAt(0)->IsFPRegister()) {
   1423         LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
   1424         if (op->representation() == MachineRepresentation::kFloat64) {
   1425           __ StoreDouble(i.InputDoubleRegister(0),
   1426                         MemOperand(sp, slot * kPointerSize), r0);
   1427         } else {
   1428           DCHECK(op->representation() == MachineRepresentation::kFloat32);
   1429           __ StoreSingle(i.InputDoubleRegister(0),
   1430                         MemOperand(sp, slot * kPointerSize), r0);
   1431         }
   1432       } else {
   1433         __ StoreP(i.InputRegister(0), MemOperand(sp, slot * kPointerSize), r0);
   1434       }
   1435       break;
   1436     }
   1437     case kPPC_ExtendSignWord8:
   1438       __ extsb(i.OutputRegister(), i.InputRegister(0));
   1439       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1440       break;
   1441     case kPPC_ExtendSignWord16:
   1442       __ extsh(i.OutputRegister(), i.InputRegister(0));
   1443       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1444       break;
   1445 #if V8_TARGET_ARCH_PPC64
   1446     case kPPC_ExtendSignWord32:
   1447       __ extsw(i.OutputRegister(), i.InputRegister(0));
   1448       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1449       break;
   1450     case kPPC_Uint32ToUint64:
   1451       // Zero extend
   1452       __ clrldi(i.OutputRegister(), i.InputRegister(0), Operand(32));
   1453       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1454       break;
   1455     case kPPC_Int64ToInt32:
   1456       __ extsw(i.OutputRegister(), i.InputRegister(0));
   1457       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1458       break;
   1459     case kPPC_Int64ToFloat32:
   1460       __ ConvertInt64ToFloat(i.InputRegister(0), i.OutputDoubleRegister());
   1461       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1462       break;
   1463     case kPPC_Int64ToDouble:
   1464       __ ConvertInt64ToDouble(i.InputRegister(0), i.OutputDoubleRegister());
   1465       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1466       break;
   1467     case kPPC_Uint64ToFloat32:
   1468       __ ConvertUnsignedInt64ToFloat(i.InputRegister(0),
   1469                                      i.OutputDoubleRegister());
   1470       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1471       break;
   1472     case kPPC_Uint64ToDouble:
   1473       __ ConvertUnsignedInt64ToDouble(i.InputRegister(0),
   1474                                       i.OutputDoubleRegister());
   1475       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1476       break;
   1477 #endif
   1478     case kPPC_Int32ToFloat32:
   1479       __ ConvertIntToFloat(i.InputRegister(0), i.OutputDoubleRegister());
   1480       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1481       break;
   1482     case kPPC_Int32ToDouble:
   1483       __ ConvertIntToDouble(i.InputRegister(0), i.OutputDoubleRegister());
   1484       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1485       break;
   1486     case kPPC_Uint32ToFloat32:
   1487       __ ConvertUnsignedIntToFloat(i.InputRegister(0),
   1488                                    i.OutputDoubleRegister());
   1489       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1490       break;
   1491     case kPPC_Uint32ToDouble:
   1492       __ ConvertUnsignedIntToDouble(i.InputRegister(0),
   1493                                     i.OutputDoubleRegister());
   1494       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1495       break;
   1496     case kPPC_DoubleToInt32:
   1497     case kPPC_DoubleToUint32:
   1498     case kPPC_DoubleToInt64: {
   1499 #if V8_TARGET_ARCH_PPC64
   1500       bool check_conversion =
   1501           (opcode == kPPC_DoubleToInt64 && i.OutputCount() > 1);
   1502       if (check_conversion) {
   1503         __ mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
   1504       }
   1505 #endif
   1506       __ ConvertDoubleToInt64(i.InputDoubleRegister(0),
   1507 #if !V8_TARGET_ARCH_PPC64
   1508                               kScratchReg,
   1509 #endif
   1510                               i.OutputRegister(0), kScratchDoubleReg);
   1511 #if V8_TARGET_ARCH_PPC64
   1512       if (check_conversion) {
   1513         // Set 2nd output to zero if conversion fails.
   1514         CRegister cr = cr7;
   1515         int crbit = v8::internal::Assembler::encode_crbit(
   1516             cr, static_cast<CRBit>(VXCVI % CRWIDTH));
   1517         __ mcrfs(cr, VXCVI);  // extract FPSCR field containing VXCVI into cr7
   1518         if (CpuFeatures::IsSupported(ISELECT)) {
   1519           __ li(i.OutputRegister(1), Operand(1));
   1520           __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit);
   1521         } else {
   1522           __ li(i.OutputRegister(1), Operand::Zero());
   1523           __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit);
   1524           __ li(i.OutputRegister(1), Operand(1));
   1525         }
   1526       }
   1527 #endif
   1528       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1529       break;
   1530     }
   1531 #if V8_TARGET_ARCH_PPC64
   1532     case kPPC_DoubleToUint64: {
   1533       bool check_conversion = (i.OutputCount() > 1);
   1534       if (check_conversion) {
   1535         __ mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
   1536       }
   1537       __ ConvertDoubleToUnsignedInt64(i.InputDoubleRegister(0),
   1538                                       i.OutputRegister(0), kScratchDoubleReg);
   1539       if (check_conversion) {
   1540         // Set 2nd output to zero if conversion fails.
   1541         CRegister cr = cr7;
   1542         int crbit = v8::internal::Assembler::encode_crbit(
   1543             cr, static_cast<CRBit>(VXCVI % CRWIDTH));
   1544         __ mcrfs(cr, VXCVI);  // extract FPSCR field containing VXCVI into cr7
   1545         if (CpuFeatures::IsSupported(ISELECT)) {
   1546           __ li(i.OutputRegister(1), Operand(1));
   1547           __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit);
   1548         } else {
   1549           __ li(i.OutputRegister(1), Operand::Zero());
   1550           __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit);
   1551           __ li(i.OutputRegister(1), Operand(1));
   1552         }
   1553       }
   1554       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1555       break;
   1556     }
   1557 #endif
   1558     case kPPC_DoubleToFloat32:
   1559       ASSEMBLE_FLOAT_UNOP_RC(frsp, 0);
   1560       break;
   1561     case kPPC_Float32ToDouble:
   1562       // Nothing to do.
   1563       __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
   1564       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1565       break;
   1566     case kPPC_DoubleExtractLowWord32:
   1567       __ MovDoubleLowToInt(i.OutputRegister(), i.InputDoubleRegister(0));
   1568       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1569       break;
   1570     case kPPC_DoubleExtractHighWord32:
   1571       __ MovDoubleHighToInt(i.OutputRegister(), i.InputDoubleRegister(0));
   1572       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1573       break;
   1574     case kPPC_DoubleInsertLowWord32:
   1575       __ InsertDoubleLow(i.OutputDoubleRegister(), i.InputRegister(1), r0);
   1576       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1577       break;
   1578     case kPPC_DoubleInsertHighWord32:
   1579       __ InsertDoubleHigh(i.OutputDoubleRegister(), i.InputRegister(1), r0);
   1580       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1581       break;
   1582     case kPPC_DoubleConstruct:
   1583 #if V8_TARGET_ARCH_PPC64
   1584       __ MovInt64ComponentsToDouble(i.OutputDoubleRegister(),
   1585                                     i.InputRegister(0), i.InputRegister(1), r0);
   1586 #else
   1587       __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0),
   1588                           i.InputRegister(1));
   1589 #endif
   1590       DCHECK_EQ(LeaveRC, i.OutputRCBit());
   1591       break;
   1592     case kPPC_BitcastFloat32ToInt32:
   1593       __ MovFloatToInt(i.OutputRegister(), i.InputDoubleRegister(0));
   1594       break;
   1595     case kPPC_BitcastInt32ToFloat32:
   1596       __ MovIntToFloat(i.OutputDoubleRegister(), i.InputRegister(0));
   1597       break;
   1598 #if V8_TARGET_ARCH_PPC64
   1599     case kPPC_BitcastDoubleToInt64:
   1600       __ MovDoubleToInt64(i.OutputRegister(), i.InputDoubleRegister(0));
   1601       break;
   1602     case kPPC_BitcastInt64ToDouble:
   1603       __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0));
   1604       break;
   1605 #endif
   1606     case kPPC_LoadWordU8:
   1607       ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
   1608       break;
   1609     case kPPC_LoadWordS8:
   1610       ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
   1611       __ extsb(i.OutputRegister(), i.OutputRegister());
   1612       break;
   1613     case kPPC_LoadWordU16:
   1614       ASSEMBLE_LOAD_INTEGER(lhz, lhzx);
   1615       break;
   1616     case kPPC_LoadWordS16:
   1617       ASSEMBLE_LOAD_INTEGER(lha, lhax);
   1618       break;
   1619     case kPPC_LoadWordU32:
   1620       ASSEMBLE_LOAD_INTEGER(lwz, lwzx);
   1621       break;
   1622     case kPPC_LoadWordS32:
   1623       ASSEMBLE_LOAD_INTEGER(lwa, lwax);
   1624       break;
   1625 #if V8_TARGET_ARCH_PPC64
   1626     case kPPC_LoadWord64:
   1627       ASSEMBLE_LOAD_INTEGER(ld, ldx);
   1628       break;
   1629 #endif
   1630     case kPPC_LoadFloat32:
   1631       ASSEMBLE_LOAD_FLOAT(lfs, lfsx);
   1632       break;
   1633     case kPPC_LoadDouble:
   1634       ASSEMBLE_LOAD_FLOAT(lfd, lfdx);
   1635       break;
   1636     case kPPC_StoreWord8:
   1637       ASSEMBLE_STORE_INTEGER(stb, stbx);
   1638       break;
   1639     case kPPC_StoreWord16:
   1640       ASSEMBLE_STORE_INTEGER(sth, sthx);
   1641       break;
   1642     case kPPC_StoreWord32:
   1643       ASSEMBLE_STORE_INTEGER(stw, stwx);
   1644       break;
   1645 #if V8_TARGET_ARCH_PPC64
   1646     case kPPC_StoreWord64:
   1647       ASSEMBLE_STORE_INTEGER(std, stdx);
   1648       break;
   1649 #endif
   1650     case kPPC_StoreFloat32:
   1651       ASSEMBLE_STORE_FLOAT32();
   1652       break;
   1653     case kPPC_StoreDouble:
   1654       ASSEMBLE_STORE_DOUBLE();
   1655       break;
   1656     case kCheckedLoadInt8:
   1657       ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx);
   1658       __ extsb(i.OutputRegister(), i.OutputRegister());
   1659       break;
   1660     case kCheckedLoadUint8:
   1661       ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx);
   1662       break;
   1663     case kCheckedLoadInt16:
   1664       ASSEMBLE_CHECKED_LOAD_INTEGER(lha, lhax);
   1665       break;
   1666     case kCheckedLoadUint16:
   1667       ASSEMBLE_CHECKED_LOAD_INTEGER(lhz, lhzx);
   1668       break;
   1669     case kCheckedLoadWord32:
   1670       ASSEMBLE_CHECKED_LOAD_INTEGER(lwz, lwzx);
   1671       break;
   1672     case kCheckedLoadWord64:
   1673 #if V8_TARGET_ARCH_PPC64
   1674       ASSEMBLE_CHECKED_LOAD_INTEGER(ld, ldx);
   1675 #else
   1676       UNREACHABLE();
   1677 #endif
   1678       break;
   1679     case kCheckedLoadFloat32:
   1680       ASSEMBLE_CHECKED_LOAD_FLOAT(lfs, lfsx, 32);
   1681       break;
   1682     case kCheckedLoadFloat64:
   1683       ASSEMBLE_CHECKED_LOAD_FLOAT(lfd, lfdx, 64);
   1684       break;
   1685     case kCheckedStoreWord8:
   1686       ASSEMBLE_CHECKED_STORE_INTEGER(stb, stbx);
   1687       break;
   1688     case kCheckedStoreWord16:
   1689       ASSEMBLE_CHECKED_STORE_INTEGER(sth, sthx);
   1690       break;
   1691     case kCheckedStoreWord32:
   1692       ASSEMBLE_CHECKED_STORE_INTEGER(stw, stwx);
   1693       break;
   1694     case kCheckedStoreWord64:
   1695 #if V8_TARGET_ARCH_PPC64
   1696       ASSEMBLE_CHECKED_STORE_INTEGER(std, stdx);
   1697 #else
   1698       UNREACHABLE();
   1699 #endif
   1700       break;
   1701     case kCheckedStoreFloat32:
   1702       ASSEMBLE_CHECKED_STORE_FLOAT32();
   1703       break;
   1704     case kCheckedStoreFloat64:
   1705       ASSEMBLE_CHECKED_STORE_DOUBLE();
   1706       break;
   1707 
   1708     case kAtomicLoadInt8:
   1709       ASSEMBLE_ATOMIC_LOAD_INTEGER(lbz, lbzx);
   1710       __ extsb(i.OutputRegister(), i.OutputRegister());
   1711       break;
   1712     case kAtomicLoadUint8:
   1713       ASSEMBLE_ATOMIC_LOAD_INTEGER(lbz, lbzx);
   1714       break;
   1715     case kAtomicLoadInt16:
   1716       ASSEMBLE_ATOMIC_LOAD_INTEGER(lha, lhax);
   1717       break;
   1718     case kAtomicLoadUint16:
   1719       ASSEMBLE_ATOMIC_LOAD_INTEGER(lhz, lhzx);
   1720       break;
   1721     case kAtomicLoadWord32:
   1722       ASSEMBLE_ATOMIC_LOAD_INTEGER(lwz, lwzx);
   1723       break;
   1724 
   1725     case kAtomicStoreWord8:
   1726       ASSEMBLE_ATOMIC_STORE_INTEGER(stb, stbx);
   1727       break;
   1728     case kAtomicStoreWord16:
   1729       ASSEMBLE_ATOMIC_STORE_INTEGER(sth, sthx);
   1730       break;
   1731     case kAtomicStoreWord32:
   1732       ASSEMBLE_ATOMIC_STORE_INTEGER(stw, stwx);
   1733       break;
   1734     default:
   1735       UNREACHABLE();
   1736       break;
   1737   }
   1738   return kSuccess;
   1739 }  // NOLINT(readability/fn_size)
   1740 
   1741 
   1742 // Assembles branches after an instruction.
   1743 void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
   1744   PPCOperandConverter i(this, instr);
   1745   Label* tlabel = branch->true_label;
   1746   Label* flabel = branch->false_label;
   1747   ArchOpcode op = instr->arch_opcode();
   1748   FlagsCondition condition = branch->condition;
   1749   CRegister cr = cr0;
   1750 
   1751   Condition cond = FlagsConditionToCondition(condition, op);
   1752   if (op == kPPC_CmpDouble) {
   1753     // check for unordered if necessary
   1754     if (cond == le) {
   1755       __ bunordered(flabel, cr);
   1756       // Unnecessary for eq/lt since only FU bit will be set.
   1757     } else if (cond == gt) {
   1758       __ bunordered(tlabel, cr);
   1759       // Unnecessary for ne/ge since only FU bit will be set.
   1760     }
   1761   }
   1762   __ b(cond, tlabel, cr);
   1763   if (!branch->fallthru) __ b(flabel);  // no fallthru to flabel.
   1764 }
   1765 
   1766 
   1767 void CodeGenerator::AssembleArchJump(RpoNumber target) {
   1768   if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
   1769 }
   1770 
   1771 
   1772 // Assembles boolean materializations after an instruction.
   1773 void CodeGenerator::AssembleArchBoolean(Instruction* instr,
   1774                                         FlagsCondition condition) {
   1775   PPCOperandConverter i(this, instr);
   1776   Label done;
   1777   ArchOpcode op = instr->arch_opcode();
   1778   CRegister cr = cr0;
   1779   int reg_value = -1;
   1780 
   1781   // Materialize a full 32-bit 1 or 0 value. The result register is always the
   1782   // last output of the instruction.
   1783   DCHECK_NE(0u, instr->OutputCount());
   1784   Register reg = i.OutputRegister(instr->OutputCount() - 1);
   1785 
   1786   Condition cond = FlagsConditionToCondition(condition, op);
   1787   if (op == kPPC_CmpDouble) {
   1788     // check for unordered if necessary
   1789     if (cond == le) {
   1790       reg_value = 0;
   1791       __ li(reg, Operand::Zero());
   1792       __ bunordered(&done, cr);
   1793     } else if (cond == gt) {
   1794       reg_value = 1;
   1795       __ li(reg, Operand(1));
   1796       __ bunordered(&done, cr);
   1797     }
   1798     // Unnecessary for eq/lt & ne/ge since only FU bit will be set.
   1799   }
   1800 
   1801   if (CpuFeatures::IsSupported(ISELECT)) {
   1802     switch (cond) {
   1803       case eq:
   1804       case lt:
   1805       case gt:
   1806         if (reg_value != 1) __ li(reg, Operand(1));
   1807         __ li(kScratchReg, Operand::Zero());
   1808         __ isel(cond, reg, reg, kScratchReg, cr);
   1809         break;
   1810       case ne:
   1811       case ge:
   1812       case le:
   1813         if (reg_value != 1) __ li(reg, Operand(1));
   1814         // r0 implies logical zero in this form
   1815         __ isel(NegateCondition(cond), reg, r0, reg, cr);
   1816         break;
   1817     default:
   1818       UNREACHABLE();
   1819       break;
   1820     }
   1821   } else {
   1822     if (reg_value != 0) __ li(reg, Operand::Zero());
   1823     __ b(NegateCondition(cond), &done, cr);
   1824     __ li(reg, Operand(1));
   1825   }
   1826   __ bind(&done);
   1827 }
   1828 
   1829 
   1830 void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
   1831   PPCOperandConverter i(this, instr);
   1832   Register input = i.InputRegister(0);
   1833   for (size_t index = 2; index < instr->InputCount(); index += 2) {
   1834     __ Cmpwi(input, Operand(i.InputInt32(index + 0)), r0);
   1835     __ beq(GetLabel(i.InputRpo(index + 1)));
   1836   }
   1837   AssembleArchJump(i.InputRpo(1));
   1838 }
   1839 
   1840 
   1841 void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
   1842   PPCOperandConverter i(this, instr);
   1843   Register input = i.InputRegister(0);
   1844   int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2);
   1845   Label** cases = zone()->NewArray<Label*>(case_count);
   1846   for (int32_t index = 0; index < case_count; ++index) {
   1847     cases[index] = GetLabel(i.InputRpo(index + 2));
   1848   }
   1849   Label* const table = AddJumpTable(cases, case_count);
   1850   __ Cmpli(input, Operand(case_count), r0);
   1851   __ bge(GetLabel(i.InputRpo(1)));
   1852   __ mov_label_addr(kScratchReg, table);
   1853   __ ShiftLeftImm(r0, input, Operand(kPointerSizeLog2));
   1854   __ LoadPX(kScratchReg, MemOperand(kScratchReg, r0));
   1855   __ Jump(kScratchReg);
   1856 }
   1857 
   1858 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
   1859     int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
   1860   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
   1861       isolate(), deoptimization_id, bailout_type);
   1862   // TODO(turbofan): We should be able to generate better code by sharing the
   1863   // actual final call site and just bl'ing to it here, similar to what we do
   1864   // in the lithium backend.
   1865   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   1866   __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   1867   return kSuccess;
   1868 }
   1869 
   1870 void CodeGenerator::FinishFrame(Frame* frame) {
   1871   CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
   1872   const RegList double_saves = descriptor->CalleeSavedFPRegisters();
   1873 
   1874   // Save callee-saved Double registers.
   1875   if (double_saves != 0) {
   1876     frame->AlignSavedCalleeRegisterSlots();
   1877     DCHECK(kNumCalleeSavedDoubles ==
   1878            base::bits::CountPopulation32(double_saves));
   1879     frame->AllocateSavedCalleeRegisterSlots(kNumCalleeSavedDoubles *
   1880                                              (kDoubleSize / kPointerSize));
   1881   }
   1882   // Save callee-saved registers.
   1883   const RegList saves =
   1884       FLAG_enable_embedded_constant_pool
   1885           ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
   1886           : descriptor->CalleeSavedRegisters();
   1887   if (saves != 0) {
   1888     // register save area does not include the fp or constant pool pointer.
   1889     const int num_saves =
   1890         kNumCalleeSaved - 1 - (FLAG_enable_embedded_constant_pool ? 1 : 0);
   1891     DCHECK(num_saves == base::bits::CountPopulation32(saves));
   1892     frame->AllocateSavedCalleeRegisterSlots(num_saves);
   1893   }
   1894 }
   1895 
   1896 void CodeGenerator::AssembleConstructFrame() {
   1897   CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
   1898   if (frame_access_state()->has_frame()) {
   1899     if (descriptor->IsCFunctionCall()) {
   1900       __ function_descriptor();
   1901       __ mflr(r0);
   1902       if (FLAG_enable_embedded_constant_pool) {
   1903         __ Push(r0, fp, kConstantPoolRegister);
   1904         // Adjust FP to point to saved FP.
   1905         __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
   1906       } else {
   1907         __ Push(r0, fp);
   1908         __ mr(fp, sp);
   1909       }
   1910     } else if (descriptor->IsJSFunctionCall()) {
   1911       __ Prologue(this->info()->GeneratePreagedPrologue(), ip);
   1912     } else {
   1913       StackFrame::Type type = info()->GetOutputStackFrameType();
   1914       // TODO(mbrandy): Detect cases where ip is the entrypoint (for
   1915       // efficient intialization of the constant pool pointer register).
   1916       __ StubPrologue(type);
   1917     }
   1918   }
   1919 
   1920   int shrink_slots = frame()->GetSpillSlotCount();
   1921   if (info()->is_osr()) {
   1922     // TurboFan OSR-compiled functions cannot be entered directly.
   1923     __ Abort(kShouldNotDirectlyEnterOsrFunction);
   1924 
   1925     // Unoptimized code jumps directly to this entrypoint while the unoptimized
   1926     // frame is still on the stack. Optimized code uses OSR values directly from
   1927     // the unoptimized frame. Thus, all that needs to be done is to allocate the
   1928     // remaining stack slots.
   1929     if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
   1930     osr_pc_offset_ = __ pc_offset();
   1931     shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
   1932   }
   1933 
   1934   const RegList double_saves = descriptor->CalleeSavedFPRegisters();
   1935   if (shrink_slots > 0) {
   1936     __ Add(sp, sp, -shrink_slots * kPointerSize, r0);
   1937   }
   1938 
   1939   // Save callee-saved Double registers.
   1940   if (double_saves != 0) {
   1941     __ MultiPushDoubles(double_saves);
   1942     DCHECK(kNumCalleeSavedDoubles ==
   1943            base::bits::CountPopulation32(double_saves));
   1944   }
   1945 
   1946   // Save callee-saved registers.
   1947   const RegList saves =
   1948       FLAG_enable_embedded_constant_pool
   1949           ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
   1950           : descriptor->CalleeSavedRegisters();
   1951   if (saves != 0) {
   1952     __ MultiPush(saves);
   1953     // register save area does not include the fp or constant pool pointer.
   1954   }
   1955 }
   1956 
   1957 
   1958 void CodeGenerator::AssembleReturn() {
   1959   CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
   1960   int pop_count = static_cast<int>(descriptor->StackParameterCount());
   1961 
   1962   // Restore registers.
   1963   const RegList saves =
   1964       FLAG_enable_embedded_constant_pool
   1965           ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
   1966           : descriptor->CalleeSavedRegisters();
   1967   if (saves != 0) {
   1968     __ MultiPop(saves);
   1969   }
   1970 
   1971   // Restore double registers.
   1972   const RegList double_saves = descriptor->CalleeSavedFPRegisters();
   1973   if (double_saves != 0) {
   1974     __ MultiPopDoubles(double_saves);
   1975   }
   1976 
   1977   if (descriptor->IsCFunctionCall()) {
   1978     AssembleDeconstructFrame();
   1979   } else if (frame_access_state()->has_frame()) {
   1980     // Canonicalize JSFunction return sites for now.
   1981     if (return_label_.is_bound()) {
   1982       __ b(&return_label_);
   1983       return;
   1984     } else {
   1985       __ bind(&return_label_);
   1986       AssembleDeconstructFrame();
   1987     }
   1988   }
   1989   __ Ret(pop_count);
   1990 }
   1991 
   1992 
   1993 void CodeGenerator::AssembleMove(InstructionOperand* source,
   1994                                  InstructionOperand* destination) {
   1995   PPCOperandConverter g(this, nullptr);
   1996   // Dispatch on the source and destination operand kinds.  Not all
   1997   // combinations are possible.
   1998   if (source->IsRegister()) {
   1999     DCHECK(destination->IsRegister() || destination->IsStackSlot());
   2000     Register src = g.ToRegister(source);
   2001     if (destination->IsRegister()) {
   2002       __ Move(g.ToRegister(destination), src);
   2003     } else {
   2004       __ StoreP(src, g.ToMemOperand(destination), r0);
   2005     }
   2006   } else if (source->IsStackSlot()) {
   2007     DCHECK(destination->IsRegister() || destination->IsStackSlot());
   2008     MemOperand src = g.ToMemOperand(source);
   2009     if (destination->IsRegister()) {
   2010       __ LoadP(g.ToRegister(destination), src, r0);
   2011     } else {
   2012       Register temp = kScratchReg;
   2013       __ LoadP(temp, src, r0);
   2014       __ StoreP(temp, g.ToMemOperand(destination), r0);
   2015     }
   2016   } else if (source->IsConstant()) {
   2017     Constant src = g.ToConstant(source);
   2018     if (destination->IsRegister() || destination->IsStackSlot()) {
   2019       Register dst =
   2020           destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
   2021       switch (src.type()) {
   2022         case Constant::kInt32:
   2023 #if V8_TARGET_ARCH_PPC64
   2024           if (src.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
   2025 #else
   2026           if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
   2027               src.rmode() == RelocInfo::WASM_GLOBAL_REFERENCE ||
   2028               src.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
   2029 #endif
   2030             __ mov(dst, Operand(src.ToInt32(), src.rmode()));
   2031           } else {
   2032             __ mov(dst, Operand(src.ToInt32()));
   2033           }
   2034           break;
   2035         case Constant::kInt64:
   2036 #if V8_TARGET_ARCH_PPC64
   2037           if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
   2038               src.rmode() == RelocInfo::WASM_GLOBAL_REFERENCE) {
   2039             __ mov(dst, Operand(src.ToInt64(), src.rmode()));
   2040           } else {
   2041             DCHECK(src.rmode() != RelocInfo::WASM_MEMORY_SIZE_REFERENCE);
   2042 #endif
   2043             __ mov(dst, Operand(src.ToInt64()));
   2044 #if V8_TARGET_ARCH_PPC64
   2045           }
   2046 #endif
   2047           break;
   2048         case Constant::kFloat32:
   2049           __ Move(dst,
   2050                   isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
   2051           break;
   2052         case Constant::kFloat64:
   2053           __ Move(dst,
   2054                   isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
   2055           break;
   2056         case Constant::kExternalReference:
   2057           __ mov(dst, Operand(src.ToExternalReference()));
   2058           break;
   2059         case Constant::kHeapObject: {
   2060           Handle<HeapObject> src_object = src.ToHeapObject();
   2061           Heap::RootListIndex index;
   2062           int slot;
   2063           if (IsMaterializableFromFrame(src_object, &slot)) {
   2064             __ LoadP(dst, g.SlotToMemOperand(slot));
   2065           } else if (IsMaterializableFromRoot(src_object, &index)) {
   2066             __ LoadRoot(dst, index);
   2067           } else {
   2068             __ Move(dst, src_object);
   2069           }
   2070           break;
   2071         }
   2072         case Constant::kRpoNumber:
   2073           UNREACHABLE();  // TODO(dcarney): loading RPO constants on PPC.
   2074           break;
   2075       }
   2076       if (destination->IsStackSlot()) {
   2077         __ StoreP(dst, g.ToMemOperand(destination), r0);
   2078       }
   2079     } else {
   2080       DoubleRegister dst = destination->IsFPRegister()
   2081                                ? g.ToDoubleRegister(destination)
   2082                                : kScratchDoubleReg;
   2083       double value = (src.type() == Constant::kFloat32) ? src.ToFloat32()
   2084                                                         : src.ToFloat64();
   2085       __ LoadDoubleLiteral(dst, value, kScratchReg);
   2086       if (destination->IsFPStackSlot()) {
   2087         __ StoreDouble(dst, g.ToMemOperand(destination), r0);
   2088       }
   2089     }
   2090   } else if (source->IsFPRegister()) {
   2091     DoubleRegister src = g.ToDoubleRegister(source);
   2092     if (destination->IsFPRegister()) {
   2093       DoubleRegister dst = g.ToDoubleRegister(destination);
   2094       __ Move(dst, src);
   2095     } else {
   2096       DCHECK(destination->IsFPStackSlot());
   2097       LocationOperand* op = LocationOperand::cast(source);
   2098       if (op->representation() == MachineRepresentation::kFloat64) {
   2099         __ StoreDouble(src, g.ToMemOperand(destination), r0);
   2100       } else {
   2101         __ StoreSingle(src, g.ToMemOperand(destination), r0);
   2102       }
   2103     }
   2104   } else if (source->IsFPStackSlot()) {
   2105     DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
   2106     MemOperand src = g.ToMemOperand(source);
   2107     if (destination->IsFPRegister()) {
   2108       LocationOperand* op = LocationOperand::cast(source);
   2109       if (op->representation() == MachineRepresentation::kFloat64) {
   2110         __ LoadDouble(g.ToDoubleRegister(destination), src, r0);
   2111       } else {
   2112         __ LoadSingle(g.ToDoubleRegister(destination), src, r0);
   2113       }
   2114     } else {
   2115       LocationOperand* op = LocationOperand::cast(source);
   2116       DoubleRegister temp = kScratchDoubleReg;
   2117       if (op->representation() == MachineRepresentation::kFloat64) {
   2118         __ LoadDouble(temp, src, r0);
   2119         __ StoreDouble(temp, g.ToMemOperand(destination), r0);
   2120       } else {
   2121         __ LoadSingle(temp, src, r0);
   2122         __ StoreSingle(temp, g.ToMemOperand(destination), r0);
   2123       }
   2124     }
   2125   } else {
   2126     UNREACHABLE();
   2127   }
   2128 }
   2129 
   2130 
   2131 void CodeGenerator::AssembleSwap(InstructionOperand* source,
   2132                                  InstructionOperand* destination) {
   2133   PPCOperandConverter g(this, nullptr);
   2134   // Dispatch on the source and destination operand kinds.  Not all
   2135   // combinations are possible.
   2136   if (source->IsRegister()) {
   2137     // Register-register.
   2138     Register temp = kScratchReg;
   2139     Register src = g.ToRegister(source);
   2140     if (destination->IsRegister()) {
   2141       Register dst = g.ToRegister(destination);
   2142       __ mr(temp, src);
   2143       __ mr(src, dst);
   2144       __ mr(dst, temp);
   2145     } else {
   2146       DCHECK(destination->IsStackSlot());
   2147       MemOperand dst = g.ToMemOperand(destination);
   2148       __ mr(temp, src);
   2149       __ LoadP(src, dst);
   2150       __ StoreP(temp, dst);
   2151     }
   2152 #if V8_TARGET_ARCH_PPC64
   2153   } else if (source->IsStackSlot() || source->IsFPStackSlot()) {
   2154 #else
   2155   } else if (source->IsStackSlot()) {
   2156     DCHECK(destination->IsStackSlot());
   2157 #endif
   2158     Register temp_0 = kScratchReg;
   2159     Register temp_1 = r0;
   2160     MemOperand src = g.ToMemOperand(source);
   2161     MemOperand dst = g.ToMemOperand(destination);
   2162     __ LoadP(temp_0, src);
   2163     __ LoadP(temp_1, dst);
   2164     __ StoreP(temp_0, dst);
   2165     __ StoreP(temp_1, src);
   2166   } else if (source->IsFPRegister()) {
   2167     DoubleRegister temp = kScratchDoubleReg;
   2168     DoubleRegister src = g.ToDoubleRegister(source);
   2169     if (destination->IsFPRegister()) {
   2170       DoubleRegister dst = g.ToDoubleRegister(destination);
   2171       __ fmr(temp, src);
   2172       __ fmr(src, dst);
   2173       __ fmr(dst, temp);
   2174     } else {
   2175       DCHECK(destination->IsFPStackSlot());
   2176       MemOperand dst = g.ToMemOperand(destination);
   2177       __ fmr(temp, src);
   2178       __ lfd(src, dst);
   2179       __ stfd(temp, dst);
   2180     }
   2181 #if !V8_TARGET_ARCH_PPC64
   2182   } else if (source->IsFPStackSlot()) {
   2183     DCHECK(destination->IsFPStackSlot());
   2184     DoubleRegister temp_0 = kScratchDoubleReg;
   2185     DoubleRegister temp_1 = d0;
   2186     MemOperand src = g.ToMemOperand(source);
   2187     MemOperand dst = g.ToMemOperand(destination);
   2188     __ lfd(temp_0, src);
   2189     __ lfd(temp_1, dst);
   2190     __ stfd(temp_0, dst);
   2191     __ stfd(temp_1, src);
   2192 #endif
   2193   } else {
   2194     // No other combinations are possible.
   2195     UNREACHABLE();
   2196   }
   2197 }
   2198 
   2199 
   2200 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
   2201   for (size_t index = 0; index < target_count; ++index) {
   2202     __ emit_label_addr(targets[index]);
   2203   }
   2204 }
   2205 
   2206 
   2207 void CodeGenerator::EnsureSpaceForLazyDeopt() {
   2208   if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
   2209     return;
   2210   }
   2211 
   2212   int space_needed = Deoptimizer::patch_size();
   2213   // Ensure that we have enough space after the previous lazy-bailout
   2214   // instruction for patching the code here.
   2215   int current_pc = masm()->pc_offset();
   2216   if (current_pc < last_lazy_deopt_pc_ + space_needed) {
   2217     // Block tramoline pool emission for duration of padding.
   2218     v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
   2219         masm());
   2220     int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
   2221     DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
   2222     while (padding_size > 0) {
   2223       __ nop();
   2224       padding_size -= v8::internal::Assembler::kInstrSize;
   2225     }
   2226   }
   2227 }
   2228 
   2229 #undef __
   2230 
   2231 }  // namespace compiler
   2232 }  // namespace internal
   2233 }  // namespace v8
   2234