Home | History | Annotate | Download | only in compiler
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/compiler/move-optimizer.h"
      6 #include "src/compiler/pipeline.h"
      7 #include "test/unittests/compiler/instruction-sequence-unittest.h"
      8 
      9 namespace v8 {
     10 namespace internal {
     11 namespace compiler {
     12 
     13 class MoveOptimizerTest : public InstructionSequenceTest {
     14  public:
     15   Instruction* LastInstruction() { return sequence()->instructions().back(); }
     16 
     17   void AddMove(Instruction* instr, TestOperand from, TestOperand to,
     18                Instruction::GapPosition pos = Instruction::START) {
     19     auto parallel_move = instr->GetOrCreateParallelMove(pos, zone());
     20     parallel_move->AddMove(ConvertMoveArg(from), ConvertMoveArg(to));
     21   }
     22 
     23   int NonRedundantSize(ParallelMove* moves) {
     24     int i = 0;
     25     for (auto move : *moves) {
     26       if (move->IsRedundant()) continue;
     27       i++;
     28     }
     29     return i;
     30   }
     31 
     32   bool Contains(ParallelMove* moves, TestOperand from_op, TestOperand to_op) {
     33     auto from = ConvertMoveArg(from_op);
     34     auto to = ConvertMoveArg(to_op);
     35     for (auto move : *moves) {
     36       if (move->IsRedundant()) continue;
     37       if (move->source().Equals(from) && move->destination().Equals(to)) {
     38         return true;
     39       }
     40     }
     41     return false;
     42   }
     43 
     44   // TODO(dcarney): add a verifier.
     45   void Optimize() {
     46     WireBlocks();
     47     if (FLAG_trace_turbo) {
     48       OFStream os(stdout);
     49       PrintableInstructionSequence printable = {config(), sequence()};
     50       os << "----- Instruction sequence before move optimization -----\n"
     51          << printable;
     52     }
     53     MoveOptimizer move_optimizer(zone(), sequence());
     54     move_optimizer.Run();
     55     if (FLAG_trace_turbo) {
     56       OFStream os(stdout);
     57       PrintableInstructionSequence printable = {config(), sequence()};
     58       os << "----- Instruction sequence after move optimization -----\n"
     59          << printable;
     60     }
     61   }
     62 
     63  private:
     64   InstructionOperand ConvertMoveArg(TestOperand op) {
     65     CHECK_EQ(kNoValue, op.vreg_.value_);
     66     CHECK_NE(kNoValue, op.value_);
     67     switch (op.type_) {
     68       case kConstant:
     69         return ConstantOperand(op.value_);
     70       case kFixedSlot:
     71         return AllocatedOperand(LocationOperand::STACK_SLOT,
     72                                 MachineRepresentation::kWord32, op.value_);
     73       case kFixedRegister:
     74         CHECK(0 <= op.value_ && op.value_ < num_general_registers());
     75         return AllocatedOperand(LocationOperand::REGISTER,
     76                                 MachineRepresentation::kWord32, op.value_);
     77       case kExplicit:
     78         CHECK(0 <= op.value_ && op.value_ < num_general_registers());
     79         return ExplicitOperand(LocationOperand::REGISTER,
     80                                MachineRepresentation::kWord32, op.value_);
     81       default:
     82         break;
     83     }
     84     CHECK(false);
     85     return InstructionOperand();
     86   }
     87 };
     88 
     89 
     90 TEST_F(MoveOptimizerTest, RemovesRedundant) {
     91   StartBlock();
     92   auto first_instr = EmitNop();
     93   AddMove(first_instr, Reg(0), Reg(1));
     94   auto last_instr = EmitNop();
     95   AddMove(last_instr, Reg(1), Reg(0));
     96   EndBlock(Last());
     97 
     98   Optimize();
     99 
    100   CHECK_EQ(0, NonRedundantSize(first_instr->parallel_moves()[0]));
    101   auto move = last_instr->parallel_moves()[0];
    102   CHECK_EQ(1, NonRedundantSize(move));
    103   CHECK(Contains(move, Reg(0), Reg(1)));
    104 }
    105 
    106 
    107 TEST_F(MoveOptimizerTest, RemovesRedundantExplicit) {
    108   int first_reg_index =
    109       RegisterConfiguration::Turbofan()->GetAllocatableGeneralCode(0);
    110   int second_reg_index =
    111       RegisterConfiguration::Turbofan()->GetAllocatableGeneralCode(1);
    112 
    113   StartBlock();
    114   auto first_instr = EmitNop();
    115   AddMove(first_instr, Reg(first_reg_index), ExplicitReg(second_reg_index));
    116   auto last_instr = EmitNop();
    117   AddMove(last_instr, Reg(second_reg_index), Reg(first_reg_index));
    118   EndBlock(Last());
    119 
    120   Optimize();
    121 
    122   CHECK_EQ(0, NonRedundantSize(first_instr->parallel_moves()[0]));
    123   auto move = last_instr->parallel_moves()[0];
    124   CHECK_EQ(1, NonRedundantSize(move));
    125   CHECK(Contains(move, Reg(first_reg_index), ExplicitReg(second_reg_index)));
    126 }
    127 
    128 
    129 TEST_F(MoveOptimizerTest, SplitsConstants) {
    130   StartBlock();
    131   EndBlock(Last());
    132 
    133   auto gap = LastInstruction();
    134   AddMove(gap, Const(1), Slot(0));
    135   AddMove(gap, Const(1), Slot(1));
    136   AddMove(gap, Const(1), Reg(0));
    137   AddMove(gap, Const(1), Slot(2));
    138 
    139   Optimize();
    140 
    141   auto move = gap->parallel_moves()[0];
    142   CHECK_EQ(1, NonRedundantSize(move));
    143   CHECK(Contains(move, Const(1), Reg(0)));
    144 
    145   move = gap->parallel_moves()[1];
    146   CHECK_EQ(3, NonRedundantSize(move));
    147   CHECK(Contains(move, Reg(0), Slot(0)));
    148   CHECK(Contains(move, Reg(0), Slot(1)));
    149   CHECK(Contains(move, Reg(0), Slot(2)));
    150 }
    151 
    152 
    153 TEST_F(MoveOptimizerTest, SimpleMerge) {
    154   StartBlock();
    155   EndBlock(Branch(Imm(), 1, 2));
    156 
    157   StartBlock();
    158   EndBlock(Jump(2));
    159   AddMove(LastInstruction(), Reg(0), Reg(1));
    160 
    161   StartBlock();
    162   EndBlock(Jump(1));
    163   AddMove(LastInstruction(), Reg(0), Reg(1));
    164 
    165   StartBlock();
    166   EndBlock(Last());
    167 
    168   auto last = LastInstruction();
    169 
    170   Optimize();
    171 
    172   auto move = last->parallel_moves()[0];
    173   CHECK_EQ(1, NonRedundantSize(move));
    174   CHECK(Contains(move, Reg(0), Reg(1)));
    175 }
    176 
    177 
    178 TEST_F(MoveOptimizerTest, SimpleMergeCycle) {
    179   StartBlock();
    180   EndBlock(Branch(Imm(), 1, 2));
    181 
    182   StartBlock();
    183   EndBlock(Jump(2));
    184   auto gap_0 = LastInstruction();
    185   AddMove(gap_0, Reg(0), Reg(1));
    186   AddMove(LastInstruction(), Reg(1), Reg(0));
    187 
    188   StartBlock();
    189   EndBlock(Jump(1));
    190   auto gap_1 = LastInstruction();
    191   AddMove(gap_1, Reg(0), Reg(1));
    192   AddMove(gap_1, Reg(1), Reg(0));
    193 
    194   StartBlock();
    195   EndBlock(Last());
    196 
    197   auto last = LastInstruction();
    198 
    199   Optimize();
    200 
    201   CHECK(gap_0->AreMovesRedundant());
    202   CHECK(gap_1->AreMovesRedundant());
    203   auto move = last->parallel_moves()[0];
    204   CHECK_EQ(2, NonRedundantSize(move));
    205   CHECK(Contains(move, Reg(0), Reg(1)));
    206   CHECK(Contains(move, Reg(1), Reg(0)));
    207 }
    208 
    209 
    210 TEST_F(MoveOptimizerTest, GapsCanMoveOverInstruction) {
    211   StartBlock();
    212   int const_index = 1;
    213   DefineConstant(const_index);
    214   Instruction* ctant_def = LastInstruction();
    215   AddMove(ctant_def, Reg(1), Reg(0));
    216 
    217   Instruction* last = EmitNop();
    218   AddMove(last, Const(const_index), Reg(0));
    219   AddMove(last, Reg(0), Reg(1));
    220   EndBlock(Last());
    221   Optimize();
    222 
    223   ParallelMove* inst1_start =
    224       ctant_def->GetParallelMove(Instruction::GapPosition::START);
    225   ParallelMove* inst1_end =
    226       ctant_def->GetParallelMove(Instruction::GapPosition::END);
    227   ParallelMove* last_start =
    228       last->GetParallelMove(Instruction::GapPosition::START);
    229   CHECK(inst1_start == nullptr || NonRedundantSize(inst1_start) == 0);
    230   CHECK(inst1_end == nullptr || NonRedundantSize(inst1_end) == 0);
    231   CHECK(last_start->size() == 2);
    232   int redundants = 0;
    233   int assignment = 0;
    234   for (MoveOperands* move : *last_start) {
    235     if (move->IsRedundant()) {
    236       ++redundants;
    237     } else {
    238       ++assignment;
    239       CHECK(move->destination().IsRegister());
    240       CHECK(move->source().IsConstant());
    241     }
    242   }
    243   CHECK_EQ(1, redundants);
    244   CHECK_EQ(1, assignment);
    245 }
    246 
    247 
    248 TEST_F(MoveOptimizerTest, SubsetMovesMerge) {
    249   StartBlock();
    250   EndBlock(Branch(Imm(), 1, 2));
    251 
    252   StartBlock();
    253   EndBlock(Jump(2));
    254   Instruction* last_move_b1 = LastInstruction();
    255   AddMove(last_move_b1, Reg(0), Reg(1));
    256   AddMove(last_move_b1, Reg(2), Reg(3));
    257 
    258   StartBlock();
    259   EndBlock(Jump(1));
    260   Instruction* last_move_b2 = LastInstruction();
    261   AddMove(last_move_b2, Reg(0), Reg(1));
    262   AddMove(last_move_b2, Reg(4), Reg(5));
    263 
    264   StartBlock();
    265   EndBlock(Last());
    266 
    267   Instruction* last = LastInstruction();
    268 
    269   Optimize();
    270 
    271   ParallelMove* last_move = last->parallel_moves()[0];
    272   CHECK_EQ(1, NonRedundantSize(last_move));
    273   CHECK(Contains(last_move, Reg(0), Reg(1)));
    274 
    275   ParallelMove* b1_move = last_move_b1->parallel_moves()[0];
    276   CHECK_EQ(1, NonRedundantSize(b1_move));
    277   CHECK(Contains(b1_move, Reg(2), Reg(3)));
    278 
    279   ParallelMove* b2_move = last_move_b2->parallel_moves()[0];
    280   CHECK_EQ(1, NonRedundantSize(b2_move));
    281   CHECK(Contains(b2_move, Reg(4), Reg(5)));
    282 }
    283 
    284 
    285 TEST_F(MoveOptimizerTest, GapConflictSubsetMovesDoNotMerge) {
    286   StartBlock();
    287   EndBlock(Branch(Imm(), 1, 2));
    288 
    289   StartBlock();
    290   EndBlock(Jump(2));
    291   Instruction* last_move_b1 = LastInstruction();
    292   AddMove(last_move_b1, Reg(0), Reg(1));
    293   AddMove(last_move_b1, Reg(2), Reg(0));
    294   AddMove(last_move_b1, Reg(4), Reg(5));
    295 
    296   StartBlock();
    297   EndBlock(Jump(1));
    298   Instruction* last_move_b2 = LastInstruction();
    299   AddMove(last_move_b2, Reg(0), Reg(1));
    300   AddMove(last_move_b2, Reg(4), Reg(5));
    301 
    302   StartBlock();
    303   EndBlock(Last());
    304 
    305   Instruction* last = LastInstruction();
    306 
    307   Optimize();
    308 
    309   ParallelMove* last_move = last->parallel_moves()[0];
    310   CHECK_EQ(1, NonRedundantSize(last_move));
    311   CHECK(Contains(last_move, Reg(4), Reg(5)));
    312 
    313   ParallelMove* b1_move = last_move_b1->parallel_moves()[0];
    314   CHECK_EQ(2, NonRedundantSize(b1_move));
    315   CHECK(Contains(b1_move, Reg(0), Reg(1)));
    316   CHECK(Contains(b1_move, Reg(2), Reg(0)));
    317 
    318   ParallelMove* b2_move = last_move_b2->parallel_moves()[0];
    319   CHECK_EQ(1, NonRedundantSize(b2_move));
    320   CHECK(Contains(b1_move, Reg(0), Reg(1)));
    321 }
    322 
    323 TEST_F(MoveOptimizerTest, ClobberedDestinationsAreEliminated) {
    324   StartBlock();
    325   EmitNop();
    326   Instruction* first_instr = LastInstruction();
    327   AddMove(first_instr, Reg(0), Reg(1));
    328   EmitOI(Reg(1), 0, nullptr);
    329   Instruction* last_instr = LastInstruction();
    330   EndBlock();
    331   Optimize();
    332 
    333   ParallelMove* first_move = first_instr->parallel_moves()[0];
    334   CHECK_EQ(0, NonRedundantSize(first_move));
    335 
    336   ParallelMove* last_move = last_instr->parallel_moves()[0];
    337   CHECK_EQ(0, NonRedundantSize(last_move));
    338 }
    339 
    340 }  // namespace compiler
    341 }  // namespace internal
    342 }  // namespace v8
    343