Home | History | Annotate | Download | only in compiler
      1 // Copyright 2016 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/compiler/bytecode-analysis.h"
      6 
      7 #include "src/interpreter/bytecode-array-iterator.h"
      8 #include "src/interpreter/bytecode-array-random-iterator.h"
      9 #include "src/objects-inl.h"
     10 
     11 namespace v8 {
     12 namespace internal {
     13 namespace compiler {
     14 
     15 using interpreter::Bytecode;
     16 using interpreter::Bytecodes;
     17 using interpreter::OperandType;
     18 
     19 BytecodeLoopAssignments::BytecodeLoopAssignments(int parameter_count,
     20                                                  int register_count, Zone* zone)
     21     : parameter_count_(parameter_count),
     22       bit_vector_(new (zone)
     23                       BitVector(parameter_count + register_count, zone)) {}
     24 
     25 void BytecodeLoopAssignments::Add(interpreter::Register r) {
     26   if (r.is_parameter()) {
     27     bit_vector_->Add(r.ToParameterIndex(parameter_count_));
     28   } else {
     29     bit_vector_->Add(parameter_count_ + r.index());
     30   }
     31 }
     32 
     33 void BytecodeLoopAssignments::AddList(interpreter::Register r, uint32_t count) {
     34   if (r.is_parameter()) {
     35     for (uint32_t i = 0; i < count; i++) {
     36       DCHECK(interpreter::Register(r.index() + i).is_parameter());
     37       bit_vector_->Add(r.ToParameterIndex(parameter_count_) + i);
     38     }
     39   } else {
     40     for (uint32_t i = 0; i < count; i++) {
     41       DCHECK(!interpreter::Register(r.index() + i).is_parameter());
     42       bit_vector_->Add(parameter_count_ + r.index() + i);
     43     }
     44   }
     45 }
     46 
     47 
     48 void BytecodeLoopAssignments::Union(const BytecodeLoopAssignments& other) {
     49   bit_vector_->Union(*other.bit_vector_);
     50 }
     51 
     52 bool BytecodeLoopAssignments::ContainsParameter(int index) const {
     53   DCHECK_GE(index, 0);
     54   DCHECK_LT(index, parameter_count());
     55   return bit_vector_->Contains(index);
     56 }
     57 
     58 bool BytecodeLoopAssignments::ContainsLocal(int index) const {
     59   DCHECK_GE(index, 0);
     60   DCHECK_LT(index, local_count());
     61   return bit_vector_->Contains(parameter_count_ + index);
     62 }
     63 
     64 ResumeJumpTarget::ResumeJumpTarget(int suspend_id, int target_offset,
     65                                    int final_target_offset)
     66     : suspend_id_(suspend_id),
     67       target_offset_(target_offset),
     68       final_target_offset_(final_target_offset) {}
     69 
     70 ResumeJumpTarget ResumeJumpTarget::Leaf(int suspend_id, int target_offset) {
     71   return ResumeJumpTarget(suspend_id, target_offset, target_offset);
     72 }
     73 
     74 ResumeJumpTarget ResumeJumpTarget::AtLoopHeader(int loop_header_offset,
     75                                                 const ResumeJumpTarget& next) {
     76   return ResumeJumpTarget(next.suspend_id(), loop_header_offset,
     77                           next.target_offset());
     78 }
     79 
     80 BytecodeAnalysis::BytecodeAnalysis(Handle<BytecodeArray> bytecode_array,
     81                                    Zone* zone, bool do_liveness_analysis)
     82     : bytecode_array_(bytecode_array),
     83       do_liveness_analysis_(do_liveness_analysis),
     84       zone_(zone),
     85       loop_stack_(zone),
     86       loop_end_index_queue_(zone),
     87       resume_jump_targets_(zone),
     88       end_to_header_(zone),
     89       header_to_info_(zone),
     90       osr_entry_point_(-1),
     91       liveness_map_(bytecode_array->length(), zone) {}
     92 
     93 namespace {
     94 
     95 void UpdateInLiveness(Bytecode bytecode, BytecodeLivenessState& in_liveness,
     96                       const interpreter::BytecodeArrayAccessor& accessor) {
     97   int num_operands = Bytecodes::NumberOfOperands(bytecode);
     98   const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
     99 
    100   // Special case Suspend and Resume to just pass through liveness.
    101   if (bytecode == Bytecode::kSuspendGenerator) {
    102     // The generator object has to be live.
    103     in_liveness.MarkRegisterLive(accessor.GetRegisterOperand(0).index());
    104     // Suspend additionally reads and returns the accumulator
    105     DCHECK(Bytecodes::ReadsAccumulator(bytecode));
    106     in_liveness.MarkAccumulatorLive();
    107     return;
    108   }
    109   if (bytecode == Bytecode::kResumeGenerator) {
    110     // The generator object has to be live.
    111     in_liveness.MarkRegisterLive(accessor.GetRegisterOperand(0).index());
    112     return;
    113   }
    114 
    115   if (Bytecodes::WritesAccumulator(bytecode)) {
    116     in_liveness.MarkAccumulatorDead();
    117   }
    118   for (int i = 0; i < num_operands; ++i) {
    119     switch (operand_types[i]) {
    120       case OperandType::kRegOut: {
    121         interpreter::Register r = accessor.GetRegisterOperand(i);
    122         if (!r.is_parameter()) {
    123           in_liveness.MarkRegisterDead(r.index());
    124         }
    125         break;
    126       }
    127       case OperandType::kRegOutList: {
    128         interpreter::Register r = accessor.GetRegisterOperand(i++);
    129         uint32_t reg_count = accessor.GetRegisterCountOperand(i);
    130         if (!r.is_parameter()) {
    131           for (uint32_t j = 0; j < reg_count; ++j) {
    132             DCHECK(!interpreter::Register(r.index() + j).is_parameter());
    133             in_liveness.MarkRegisterDead(r.index() + j);
    134           }
    135         }
    136         break;
    137       }
    138       case OperandType::kRegOutPair: {
    139         interpreter::Register r = accessor.GetRegisterOperand(i);
    140         if (!r.is_parameter()) {
    141           DCHECK(!interpreter::Register(r.index() + 1).is_parameter());
    142           in_liveness.MarkRegisterDead(r.index());
    143           in_liveness.MarkRegisterDead(r.index() + 1);
    144         }
    145         break;
    146       }
    147       case OperandType::kRegOutTriple: {
    148         interpreter::Register r = accessor.GetRegisterOperand(i);
    149         if (!r.is_parameter()) {
    150           DCHECK(!interpreter::Register(r.index() + 1).is_parameter());
    151           DCHECK(!interpreter::Register(r.index() + 2).is_parameter());
    152           in_liveness.MarkRegisterDead(r.index());
    153           in_liveness.MarkRegisterDead(r.index() + 1);
    154           in_liveness.MarkRegisterDead(r.index() + 2);
    155         }
    156         break;
    157       }
    158       default:
    159         DCHECK(!Bytecodes::IsRegisterOutputOperandType(operand_types[i]));
    160         break;
    161     }
    162   }
    163 
    164   if (Bytecodes::ReadsAccumulator(bytecode)) {
    165     in_liveness.MarkAccumulatorLive();
    166   }
    167   for (int i = 0; i < num_operands; ++i) {
    168     switch (operand_types[i]) {
    169       case OperandType::kReg: {
    170         interpreter::Register r = accessor.GetRegisterOperand(i);
    171         if (!r.is_parameter()) {
    172           in_liveness.MarkRegisterLive(r.index());
    173         }
    174         break;
    175       }
    176       case OperandType::kRegPair: {
    177         interpreter::Register r = accessor.GetRegisterOperand(i);
    178         if (!r.is_parameter()) {
    179           DCHECK(!interpreter::Register(r.index() + 1).is_parameter());
    180           in_liveness.MarkRegisterLive(r.index());
    181           in_liveness.MarkRegisterLive(r.index() + 1);
    182         }
    183         break;
    184       }
    185       case OperandType::kRegList: {
    186         interpreter::Register r = accessor.GetRegisterOperand(i++);
    187         uint32_t reg_count = accessor.GetRegisterCountOperand(i);
    188         if (!r.is_parameter()) {
    189           for (uint32_t j = 0; j < reg_count; ++j) {
    190             DCHECK(!interpreter::Register(r.index() + j).is_parameter());
    191             in_liveness.MarkRegisterLive(r.index() + j);
    192           }
    193         }
    194         break;
    195       }
    196       default:
    197         DCHECK(!Bytecodes::IsRegisterInputOperandType(operand_types[i]));
    198         break;
    199     }
    200   }
    201 }
    202 
    203 void UpdateOutLiveness(Bytecode bytecode, BytecodeLivenessState& out_liveness,
    204                        BytecodeLivenessState* next_bytecode_in_liveness,
    205                        const interpreter::BytecodeArrayAccessor& accessor,
    206                        const BytecodeLivenessMap& liveness_map) {
    207   int current_offset = accessor.current_offset();
    208   const Handle<BytecodeArray>& bytecode_array = accessor.bytecode_array();
    209 
    210   // Special case Suspend and Resume to just pass through liveness.
    211   if (bytecode == Bytecode::kSuspendGenerator ||
    212       bytecode == Bytecode::kResumeGenerator) {
    213     out_liveness.Union(*next_bytecode_in_liveness);
    214     return;
    215   }
    216 
    217   // Update from jump target (if any). Skip loops, we update these manually in
    218   // the liveness iterations.
    219   if (Bytecodes::IsForwardJump(bytecode)) {
    220     int target_offset = accessor.GetJumpTargetOffset();
    221     out_liveness.Union(*liveness_map.GetInLiveness(target_offset));
    222   } else if (Bytecodes::IsSwitch(bytecode)) {
    223     for (const auto& entry : accessor.GetJumpTableTargetOffsets()) {
    224       out_liveness.Union(*liveness_map.GetInLiveness(entry.target_offset));
    225     }
    226   }
    227 
    228   // Update from next bytecode (unless there isn't one or this is an
    229   // unconditional jump).
    230   if (next_bytecode_in_liveness != nullptr &&
    231       !Bytecodes::IsUnconditionalJump(bytecode)) {
    232     out_liveness.Union(*next_bytecode_in_liveness);
    233   }
    234 
    235   // Update from exception handler (if any).
    236   if (!interpreter::Bytecodes::IsWithoutExternalSideEffects(bytecode)) {
    237     int handler_context;
    238     // TODO(leszeks): We should look up this range only once per entry.
    239     HandlerTable table(*bytecode_array);
    240     int handler_offset =
    241         table.LookupRange(current_offset, &handler_context, nullptr);
    242 
    243     if (handler_offset != -1) {
    244       bool was_accumulator_live = out_liveness.AccumulatorIsLive();
    245       out_liveness.Union(*liveness_map.GetInLiveness(handler_offset));
    246       out_liveness.MarkRegisterLive(handler_context);
    247       if (!was_accumulator_live) {
    248         // The accumulator is reset to the exception on entry into a handler,
    249         // and so shouldn't be considered live coming out of this bytecode just
    250         // because it's live coming into the handler. So, kill the accumulator
    251         // if the handler is the only thing that made it live.
    252         out_liveness.MarkAccumulatorDead();
    253 
    254         // TODO(leszeks): Ideally the accumulator wouldn't be considered live at
    255         // the start of the handler, but looking up if the current bytecode is
    256         // the start of a handler is not free, so we should only do it if we
    257         // decide it's necessary.
    258       }
    259     }
    260   }
    261 }
    262 
    263 void UpdateLiveness(Bytecode bytecode, BytecodeLiveness& liveness,
    264                     BytecodeLivenessState** next_bytecode_in_liveness,
    265                     const interpreter::BytecodeArrayAccessor& accessor,
    266                     const BytecodeLivenessMap& liveness_map) {
    267   UpdateOutLiveness(bytecode, *liveness.out, *next_bytecode_in_liveness,
    268                     accessor, liveness_map);
    269   liveness.in->CopyFrom(*liveness.out);
    270   UpdateInLiveness(bytecode, *liveness.in, accessor);
    271 
    272   *next_bytecode_in_liveness = liveness.in;
    273 }
    274 
    275 void UpdateAssignments(Bytecode bytecode, BytecodeLoopAssignments& assignments,
    276                        const interpreter::BytecodeArrayAccessor& accessor) {
    277   int num_operands = Bytecodes::NumberOfOperands(bytecode);
    278   const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
    279 
    280   for (int i = 0; i < num_operands; ++i) {
    281     switch (operand_types[i]) {
    282       case OperandType::kRegOut: {
    283         assignments.Add(accessor.GetRegisterOperand(i));
    284         break;
    285       }
    286       case OperandType::kRegOutList: {
    287         interpreter::Register r = accessor.GetRegisterOperand(i++);
    288         uint32_t reg_count = accessor.GetRegisterCountOperand(i);
    289         assignments.AddList(r, reg_count);
    290         break;
    291       }
    292       case OperandType::kRegOutPair: {
    293         assignments.AddList(accessor.GetRegisterOperand(i), 2);
    294         break;
    295       }
    296       case OperandType::kRegOutTriple: {
    297         assignments.AddList(accessor.GetRegisterOperand(i), 3);
    298         break;
    299       }
    300       default:
    301         DCHECK(!Bytecodes::IsRegisterOutputOperandType(operand_types[i]));
    302         break;
    303     }
    304   }
    305 }
    306 
    307 }  // namespace
    308 
    309 void BytecodeAnalysis::Analyze(BailoutId osr_bailout_id) {
    310   loop_stack_.push({-1, nullptr});
    311 
    312   BytecodeLivenessState* next_bytecode_in_liveness = nullptr;
    313 
    314   bool is_osr = !osr_bailout_id.IsNone();
    315   int osr_loop_end_offset = is_osr ? osr_bailout_id.ToInt() : -1;
    316 
    317   int generator_switch_index = -1;
    318 
    319   interpreter::BytecodeArrayRandomIterator iterator(bytecode_array(), zone());
    320   for (iterator.GoToEnd(); iterator.IsValid(); --iterator) {
    321     Bytecode bytecode = iterator.current_bytecode();
    322     int current_offset = iterator.current_offset();
    323 
    324     if (bytecode == Bytecode::kSwitchOnGeneratorState) {
    325       DCHECK_EQ(generator_switch_index, -1);
    326       generator_switch_index = iterator.current_index();
    327     }
    328 
    329     if (bytecode == Bytecode::kJumpLoop) {
    330       // Every byte up to and including the last byte within the backwards jump
    331       // instruction is considered part of the loop, set loop end accordingly.
    332       int loop_end = current_offset + iterator.current_bytecode_size();
    333       int loop_header = iterator.GetJumpTargetOffset();
    334       PushLoop(loop_header, loop_end);
    335 
    336       if (current_offset == osr_loop_end_offset) {
    337         osr_entry_point_ = loop_header;
    338       } else if (current_offset < osr_loop_end_offset) {
    339         // Check we've found the osr_entry_point if we've gone past the
    340         // osr_loop_end_offset. Note, we are iterating the bytecode in reverse,
    341         // so the less than in the check is correct.
    342         DCHECK_NE(-1, osr_entry_point_);
    343       }
    344 
    345       // Save the index so that we can do another pass later.
    346       if (do_liveness_analysis_) {
    347         loop_end_index_queue_.push_back(iterator.current_index());
    348       }
    349     } else if (loop_stack_.size() > 1) {
    350       LoopStackEntry& current_loop = loop_stack_.top();
    351       LoopInfo* current_loop_info = current_loop.loop_info;
    352 
    353       // TODO(leszeks): Ideally, we'd only set values that were assigned in
    354       // the loop *and* are live when the loop exits. However, this requires
    355       // tracking the out-liveness of *all* loop exits, which is not
    356       // information we currently have.
    357       UpdateAssignments(bytecode, current_loop_info->assignments(), iterator);
    358 
    359       // Update suspend counts for this loop, though only if not OSR.
    360       if (!is_osr && bytecode == Bytecode::kSuspendGenerator) {
    361         int suspend_id = iterator.GetUnsignedImmediateOperand(3);
    362         int resume_offset = current_offset + iterator.current_bytecode_size();
    363         current_loop_info->AddResumeTarget(
    364             ResumeJumpTarget::Leaf(suspend_id, resume_offset));
    365       }
    366 
    367       // If we've reached the header of the loop, pop it off the stack.
    368       if (current_offset == current_loop.header_offset) {
    369         loop_stack_.pop();
    370         if (loop_stack_.size() > 1) {
    371           // If there is still an outer loop, propagate inner loop assignments.
    372           LoopInfo* parent_loop_info = loop_stack_.top().loop_info;
    373 
    374           parent_loop_info->assignments().Union(
    375               current_loop_info->assignments());
    376 
    377           // Also, propagate resume targets. Instead of jumping to the target
    378           // itself, the outer loop will jump to this loop header for any
    379           // targets that are inside the current loop, so that this loop stays
    380           // reducible. Hence, a nested loop of the form:
    381           //
    382           //                switch (#1 -> suspend1, #2 -> suspend2)
    383           //                loop {
    384           //     suspend1:    suspend #1
    385           //                  loop {
    386           //     suspend2:      suspend #2
    387           //                  }
    388           //                }
    389           //
    390           // becomes:
    391           //
    392           //                switch (#1 -> loop1, #2 -> loop1)
    393           //     loop1:     loop {
    394           //                  switch (#1 -> suspend1, #2 -> loop2)
    395           //     suspend1:    suspend #1
    396           //     loop2:       loop {
    397           //                    switch (#2 -> suspend2)
    398           //     suspend2:      suspend #2
    399           //                  }
    400           //                }
    401           for (const auto& target : current_loop_info->resume_jump_targets()) {
    402             parent_loop_info->AddResumeTarget(
    403                 ResumeJumpTarget::AtLoopHeader(current_offset, target));
    404           }
    405 
    406         } else {
    407           // Otherwise, just propagate inner loop suspends to top-level.
    408           for (const auto& target : current_loop_info->resume_jump_targets()) {
    409             resume_jump_targets_.push_back(
    410                 ResumeJumpTarget::AtLoopHeader(current_offset, target));
    411           }
    412         }
    413       }
    414     } else if (!is_osr && bytecode == Bytecode::kSuspendGenerator) {
    415       // If we're not in a loop, we still need to look for suspends.
    416       // TODO(leszeks): It would be nice to de-duplicate this with the in-loop
    417       // case
    418       int suspend_id = iterator.GetUnsignedImmediateOperand(3);
    419       int resume_offset = current_offset + iterator.current_bytecode_size();
    420       resume_jump_targets_.push_back(
    421           ResumeJumpTarget::Leaf(suspend_id, resume_offset));
    422     }
    423 
    424     if (do_liveness_analysis_) {
    425       BytecodeLiveness& liveness = liveness_map_.InitializeLiveness(
    426           current_offset, bytecode_array()->register_count(), zone());
    427       UpdateLiveness(bytecode, liveness, &next_bytecode_in_liveness, iterator,
    428                      liveness_map_);
    429     }
    430   }
    431 
    432   DCHECK_EQ(loop_stack_.size(), 1u);
    433   DCHECK_EQ(loop_stack_.top().header_offset, -1);
    434 
    435   DCHECK(ResumeJumpTargetsAreValid());
    436 
    437   if (!do_liveness_analysis_) return;
    438 
    439   // At this point, every bytecode has a valid in and out liveness, except for
    440   // propagating liveness across back edges (i.e. JumpLoop). Subsequent liveness
    441   // analysis iterations can only add additional liveness bits that are pulled
    442   // across these back edges.
    443   //
    444   // Furthermore, a loop header's in-liveness can only change based on any
    445   // bytecodes *after* the loop end --  it cannot change as a result of the
    446   // JumpLoop liveness being updated, as the only liveness bits than can be
    447   // added to the loop body are those of the loop header.
    448   //
    449   // So, if we know that the liveness of bytecodes after a loop header won't
    450   // change (e.g. because there are no loops in them, or we have already ensured
    451   // those loops are valid), we can safely update the loop end and pass over the
    452   // loop body, and then never have to pass over that loop end again, because we
    453   // have shown that its target, the loop header, can't change from the entries
    454   // after the loop, and can't change from any loop body pass.
    455   //
    456   // This means that in a pass, we can iterate backwards over the bytecode
    457   // array, process any loops that we encounter, and on subsequent passes we can
    458   // skip processing those loops (though we still have to process inner loops).
    459   //
    460   // Equivalently, we can queue up loop ends from back to front, and pass over
    461   // the loops in that order, as this preserves both the bottom-to-top and
    462   // outer-to-inner requirements.
    463 
    464   for (int loop_end_index : loop_end_index_queue_) {
    465     iterator.GoToIndex(loop_end_index);
    466 
    467     DCHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpLoop);
    468 
    469     int header_offset = iterator.GetJumpTargetOffset();
    470     int end_offset = iterator.current_offset();
    471 
    472     BytecodeLiveness& header_liveness =
    473         liveness_map_.GetLiveness(header_offset);
    474     BytecodeLiveness& end_liveness = liveness_map_.GetLiveness(end_offset);
    475 
    476     if (!end_liveness.out->UnionIsChanged(*header_liveness.in)) {
    477       // Only update the loop body if the loop end liveness changed.
    478       continue;
    479     }
    480     end_liveness.in->CopyFrom(*end_liveness.out);
    481     next_bytecode_in_liveness = end_liveness.in;
    482 
    483     // Advance into the loop body.
    484     --iterator;
    485     for (; iterator.current_offset() > header_offset; --iterator) {
    486       Bytecode bytecode = iterator.current_bytecode();
    487       int current_offset = iterator.current_offset();
    488       BytecodeLiveness& liveness = liveness_map_.GetLiveness(current_offset);
    489 
    490       UpdateLiveness(bytecode, liveness, &next_bytecode_in_liveness, iterator,
    491                      liveness_map_);
    492     }
    493     // Now we are at the loop header. Since the in-liveness of the header
    494     // can't change, we need only to update the out-liveness.
    495     UpdateOutLiveness(iterator.current_bytecode(), *header_liveness.out,
    496                       next_bytecode_in_liveness, iterator, liveness_map_);
    497   }
    498 
    499   // Process the generator switch statement separately, once the loops are done.
    500   // This has to be a separate pass because the generator switch can jump into
    501   // the middle of loops (and is the only kind of jump that can jump across a
    502   // loop header).
    503   if (generator_switch_index != -1) {
    504     iterator.GoToIndex(generator_switch_index);
    505     DCHECK_EQ(iterator.current_bytecode(), Bytecode::kSwitchOnGeneratorState);
    506 
    507     int current_offset = iterator.current_offset();
    508     BytecodeLiveness& switch_liveness =
    509         liveness_map_.GetLiveness(current_offset);
    510 
    511     bool any_changed = false;
    512     for (const auto& entry : iterator.GetJumpTableTargetOffsets()) {
    513       if (switch_liveness.out->UnionIsChanged(
    514               *liveness_map_.GetInLiveness(entry.target_offset))) {
    515         any_changed = true;
    516       }
    517     }
    518 
    519     // If the switch liveness changed, we have to propagate it up the remaining
    520     // bytecodes before it.
    521     if (any_changed) {
    522       switch_liveness.in->CopyFrom(*switch_liveness.out);
    523       UpdateInLiveness(Bytecode::kSwitchOnGeneratorState, *switch_liveness.in,
    524                        iterator);
    525       next_bytecode_in_liveness = switch_liveness.in;
    526       for (--iterator; iterator.IsValid(); --iterator) {
    527         Bytecode bytecode = iterator.current_bytecode();
    528         int current_offset = iterator.current_offset();
    529         BytecodeLiveness& liveness = liveness_map_.GetLiveness(current_offset);
    530 
    531         // There shouldn't be any more loops.
    532         DCHECK_NE(bytecode, Bytecode::kJumpLoop);
    533 
    534         UpdateLiveness(bytecode, liveness, &next_bytecode_in_liveness, iterator,
    535                        liveness_map_);
    536       }
    537     }
    538   }
    539 
    540   DCHECK(LivenessIsValid());
    541 }
    542 
    543 void BytecodeAnalysis::PushLoop(int loop_header, int loop_end) {
    544   DCHECK(loop_header < loop_end);
    545   DCHECK(loop_stack_.top().header_offset < loop_header);
    546   DCHECK(end_to_header_.find(loop_end) == end_to_header_.end());
    547   DCHECK(header_to_info_.find(loop_header) == header_to_info_.end());
    548 
    549   int parent_offset = loop_stack_.top().header_offset;
    550 
    551   end_to_header_.insert({loop_end, loop_header});
    552   auto it = header_to_info_.insert(
    553       {loop_header, LoopInfo(parent_offset, bytecode_array_->parameter_count(),
    554                              bytecode_array_->register_count(), zone_)});
    555   // Get the loop info pointer from the output of insert.
    556   LoopInfo* loop_info = &it.first->second;
    557 
    558   loop_stack_.push({loop_header, loop_info});
    559 }
    560 
    561 bool BytecodeAnalysis::IsLoopHeader(int offset) const {
    562   return header_to_info_.find(offset) != header_to_info_.end();
    563 }
    564 
    565 int BytecodeAnalysis::GetLoopOffsetFor(int offset) const {
    566   auto loop_end_to_header = end_to_header_.upper_bound(offset);
    567   // If there is no next end => offset is not in a loop.
    568   if (loop_end_to_header == end_to_header_.end()) {
    569     return -1;
    570   }
    571   // If the header precedes the offset, this is the loop
    572   //
    573   //   .> header  <--loop_end_to_header
    574   //   |
    575   //   |  <--offset
    576   //   |
    577   //   `- end
    578   if (loop_end_to_header->second <= offset) {
    579     return loop_end_to_header->second;
    580   }
    581   // Otherwise there is a (potentially nested) loop after this offset.
    582   //
    583   //    <--offset
    584   //
    585   //   .> header
    586   //   |
    587   //   | .> header  <--loop_end_to_header
    588   //   | |
    589   //   | `- end
    590   //   |
    591   //   `- end
    592   // We just return the parent of the next loop (might be -1).
    593   DCHECK(header_to_info_.upper_bound(offset) != header_to_info_.end());
    594 
    595   return header_to_info_.upper_bound(offset)->second.parent_offset();
    596 }
    597 
    598 const LoopInfo& BytecodeAnalysis::GetLoopInfoFor(int header_offset) const {
    599   DCHECK(IsLoopHeader(header_offset));
    600 
    601   return header_to_info_.find(header_offset)->second;
    602 }
    603 
    604 const BytecodeLivenessState* BytecodeAnalysis::GetInLivenessFor(
    605     int offset) const {
    606   if (!do_liveness_analysis_) return nullptr;
    607 
    608   return liveness_map_.GetInLiveness(offset);
    609 }
    610 
    611 const BytecodeLivenessState* BytecodeAnalysis::GetOutLivenessFor(
    612     int offset) const {
    613   if (!do_liveness_analysis_) return nullptr;
    614 
    615   return liveness_map_.GetOutLiveness(offset);
    616 }
    617 
    618 std::ostream& BytecodeAnalysis::PrintLivenessTo(std::ostream& os) const {
    619   interpreter::BytecodeArrayIterator iterator(bytecode_array());
    620 
    621   for (; !iterator.done(); iterator.Advance()) {
    622     int current_offset = iterator.current_offset();
    623 
    624     const BitVector& in_liveness =
    625         GetInLivenessFor(current_offset)->bit_vector();
    626     const BitVector& out_liveness =
    627         GetOutLivenessFor(current_offset)->bit_vector();
    628 
    629     for (int i = 0; i < in_liveness.length(); ++i) {
    630       os << (in_liveness.Contains(i) ? "L" : ".");
    631     }
    632     os << " -> ";
    633 
    634     for (int i = 0; i < out_liveness.length(); ++i) {
    635       os << (out_liveness.Contains(i) ? "L" : ".");
    636     }
    637 
    638     os << " | " << current_offset << ": ";
    639     iterator.PrintTo(os) << std::endl;
    640   }
    641 
    642   return os;
    643 }
    644 
    645 #if DEBUG
    646 bool BytecodeAnalysis::ResumeJumpTargetsAreValid() {
    647   bool valid = true;
    648 
    649   // Find the generator switch.
    650   interpreter::BytecodeArrayRandomIterator iterator(bytecode_array(), zone());
    651   for (iterator.GoToStart(); iterator.IsValid(); ++iterator) {
    652     if (iterator.current_bytecode() == Bytecode::kSwitchOnGeneratorState) {
    653       break;
    654     }
    655   }
    656 
    657   // If the iterator is invalid, we've reached the end without finding the
    658   // generator switch. Similarly, if we are OSR-ing, we're not resuming, so we
    659   // need no jump targets. So, ensure there are no jump targets and exit.
    660   if (!iterator.IsValid() || HasOsrEntryPoint()) {
    661     // Check top-level.
    662     if (!resume_jump_targets().empty()) {
    663       PrintF(stderr,
    664              "Found %zu top-level resume targets but no resume switch\n",
    665              resume_jump_targets().size());
    666       valid = false;
    667     }
    668     // Check loops.
    669     for (const std::pair<int, LoopInfo>& loop_info : header_to_info_) {
    670       if (!loop_info.second.resume_jump_targets().empty()) {
    671         PrintF(stderr,
    672                "Found %zu resume targets at loop at offset %d, but no resume "
    673                "switch\n",
    674                loop_info.second.resume_jump_targets().size(), loop_info.first);
    675         valid = false;
    676       }
    677     }
    678 
    679     return valid;
    680   }
    681 
    682   // Otherwise, we've found the resume switch. Check that the top level jumps
    683   // only to leaves and loop headers, then check that each loop header handles
    684   // all the unresolved jumps, also jumping only to leaves and inner loop
    685   // headers.
    686 
    687   // First collect all required suspend ids.
    688   std::map<int, int> unresolved_suspend_ids;
    689   for (const interpreter::JumpTableTargetOffset& offset :
    690        iterator.GetJumpTableTargetOffsets()) {
    691     int suspend_id = offset.case_value;
    692     int resume_offset = offset.target_offset;
    693 
    694     unresolved_suspend_ids[suspend_id] = resume_offset;
    695   }
    696 
    697   // Check top-level.
    698   if (!ResumeJumpTargetLeavesResolveSuspendIds(-1, resume_jump_targets(),
    699                                                &unresolved_suspend_ids)) {
    700     valid = false;
    701   }
    702   // Check loops.
    703   for (const std::pair<int, LoopInfo>& loop_info : header_to_info_) {
    704     if (!ResumeJumpTargetLeavesResolveSuspendIds(
    705             loop_info.first, loop_info.second.resume_jump_targets(),
    706             &unresolved_suspend_ids)) {
    707       valid = false;
    708     }
    709   }
    710 
    711   // Check that everything is resolved.
    712   if (!unresolved_suspend_ids.empty()) {
    713     PrintF(stderr,
    714            "Found suspend ids that are not resolved by a final leaf resume "
    715            "jump:\n");
    716 
    717     for (const std::pair<int, int>& target : unresolved_suspend_ids) {
    718       PrintF(stderr, "  %d -> %d\n", target.first, target.second);
    719     }
    720     valid = false;
    721   }
    722 
    723   return valid;
    724 }
    725 
    726 bool BytecodeAnalysis::ResumeJumpTargetLeavesResolveSuspendIds(
    727     int parent_offset, const ZoneVector<ResumeJumpTarget>& resume_jump_targets,
    728     std::map<int, int>* unresolved_suspend_ids) {
    729   bool valid = true;
    730   for (const ResumeJumpTarget& target : resume_jump_targets) {
    731     std::map<int, int>::iterator it =
    732         unresolved_suspend_ids->find(target.suspend_id());
    733     if (it == unresolved_suspend_ids->end()) {
    734       PrintF(
    735           stderr,
    736           "No unresolved suspend found for resume target with suspend id %d\n",
    737           target.suspend_id());
    738       valid = false;
    739       continue;
    740     }
    741     int expected_target = it->second;
    742 
    743     if (target.is_leaf()) {
    744       // Leaves should have the expected target as their target.
    745       if (target.target_offset() != expected_target) {
    746         PrintF(
    747             stderr,
    748             "Expected leaf resume target for id %d to have target offset %d, "
    749             "but had %d\n",
    750             target.suspend_id(), expected_target, target.target_offset());
    751         valid = false;
    752       } else {
    753         // Make sure we're resuming to a Resume bytecode
    754         interpreter::BytecodeArrayAccessor assessor(bytecode_array(),
    755                                                     target.target_offset());
    756         if (assessor.current_bytecode() != Bytecode::kResumeGenerator) {
    757           PrintF(stderr,
    758                  "Expected resume target for id %d, offset %d, to be "
    759                  "ResumeGenerator, but found %s\n",
    760                  target.suspend_id(), target.target_offset(),
    761                  Bytecodes::ToString(assessor.current_bytecode()));
    762 
    763           valid = false;
    764         }
    765       }
    766       // We've resolved this suspend id, so erase it to make sure we don't
    767       // resolve it twice.
    768       unresolved_suspend_ids->erase(it);
    769     } else {
    770       // Non-leaves should have a direct inner loop header as their target.
    771       if (!IsLoopHeader(target.target_offset())) {
    772         PrintF(stderr,
    773                "Expected non-leaf resume target for id %d to have a loop "
    774                "header at target offset %d\n",
    775                target.suspend_id(), target.target_offset());
    776         valid = false;
    777       } else {
    778         LoopInfo loop_info = GetLoopInfoFor(target.target_offset());
    779         if (loop_info.parent_offset() != parent_offset) {
    780           PrintF(stderr,
    781                  "Expected non-leaf resume target for id %d to have a direct "
    782                  "inner loop at target offset %d\n",
    783                  target.suspend_id(), target.target_offset());
    784           valid = false;
    785         }
    786         // If the target loop is a valid inner loop, we'll check its validity
    787         // when we analyze its resume targets.
    788       }
    789     }
    790   }
    791   return valid;
    792 }
    793 
    794 bool BytecodeAnalysis::LivenessIsValid() {
    795   interpreter::BytecodeArrayRandomIterator iterator(bytecode_array(), zone());
    796 
    797   BytecodeLivenessState previous_liveness(bytecode_array()->register_count(),
    798                                           zone());
    799 
    800   int invalid_offset = -1;
    801   int which_invalid = -1;
    802 
    803   BytecodeLivenessState* next_bytecode_in_liveness = nullptr;
    804 
    805   // Ensure that there are no liveness changes if we iterate one more time.
    806   for (iterator.GoToEnd(); iterator.IsValid(); --iterator) {
    807     Bytecode bytecode = iterator.current_bytecode();
    808 
    809     int current_offset = iterator.current_offset();
    810 
    811     BytecodeLiveness& liveness = liveness_map_.GetLiveness(current_offset);
    812 
    813     previous_liveness.CopyFrom(*liveness.out);
    814 
    815     UpdateOutLiveness(bytecode, *liveness.out, next_bytecode_in_liveness,
    816                       iterator, liveness_map_);
    817     // UpdateOutLiveness skips kJumpLoop, so we update it manually.
    818     if (bytecode == Bytecode::kJumpLoop) {
    819       int target_offset = iterator.GetJumpTargetOffset();
    820       liveness.out->Union(*liveness_map_.GetInLiveness(target_offset));
    821     }
    822 
    823     if (!liveness.out->Equals(previous_liveness)) {
    824       // Reset the invalid liveness.
    825       liveness.out->CopyFrom(previous_liveness);
    826       invalid_offset = current_offset;
    827       which_invalid = 1;
    828       break;
    829     }
    830 
    831     previous_liveness.CopyFrom(*liveness.in);
    832 
    833     liveness.in->CopyFrom(*liveness.out);
    834     UpdateInLiveness(bytecode, *liveness.in, iterator);
    835 
    836     if (!liveness.in->Equals(previous_liveness)) {
    837       // Reset the invalid liveness.
    838       liveness.in->CopyFrom(previous_liveness);
    839       invalid_offset = current_offset;
    840       which_invalid = 0;
    841       break;
    842     }
    843 
    844     next_bytecode_in_liveness = liveness.in;
    845   }
    846 
    847   // Ensure that the accumulator is not live when jumping out of a loop, or on
    848   // the back-edge of a loop.
    849   for (iterator.GoToStart(); iterator.IsValid() && invalid_offset == -1;
    850        ++iterator) {
    851     Bytecode bytecode = iterator.current_bytecode();
    852     int current_offset = iterator.current_offset();
    853     int loop_header = GetLoopOffsetFor(current_offset);
    854 
    855     // We only care if we're inside a loop.
    856     if (loop_header == -1) continue;
    857 
    858     // We only care about jumps.
    859     if (!Bytecodes::IsJump(bytecode)) continue;
    860 
    861     int jump_target = iterator.GetJumpTargetOffset();
    862 
    863     // If this is a forward jump to somewhere else in the same loop, ignore it.
    864     if (Bytecodes::IsForwardJump(bytecode) &&
    865         GetLoopOffsetFor(jump_target) == loop_header) {
    866       continue;
    867     }
    868 
    869     // The accumulator must be dead at the start of the target of the jump.
    870     if (liveness_map_.GetLiveness(jump_target).in->AccumulatorIsLive()) {
    871       invalid_offset = jump_target;
    872       which_invalid = 0;
    873       break;
    874     }
    875   }
    876 
    877   if (invalid_offset != -1) {
    878     OFStream of(stderr);
    879     of << "Invalid liveness:" << std::endl;
    880 
    881     // Dump the bytecode, annotated with the liveness and marking loops.
    882 
    883     int loop_indent = 0;
    884 
    885     interpreter::BytecodeArrayIterator forward_iterator(bytecode_array());
    886     for (; !forward_iterator.done(); forward_iterator.Advance()) {
    887       int current_offset = forward_iterator.current_offset();
    888       const BitVector& in_liveness =
    889           GetInLivenessFor(current_offset)->bit_vector();
    890       const BitVector& out_liveness =
    891           GetOutLivenessFor(current_offset)->bit_vector();
    892 
    893       for (int i = 0; i < in_liveness.length(); ++i) {
    894         of << (in_liveness.Contains(i) ? 'L' : '.');
    895       }
    896 
    897       of << " | ";
    898 
    899       for (int i = 0; i < out_liveness.length(); ++i) {
    900         of << (out_liveness.Contains(i) ? 'L' : '.');
    901       }
    902 
    903       of << " : " << current_offset << " : ";
    904 
    905       // Draw loop back edges by indentin everything between loop headers and
    906       // jump loop instructions.
    907       if (forward_iterator.current_bytecode() == Bytecode::kJumpLoop) {
    908         loop_indent--;
    909       }
    910       for (int i = 0; i < loop_indent; ++i) {
    911         of << "| ";
    912       }
    913       if (forward_iterator.current_bytecode() == Bytecode::kJumpLoop) {
    914         of << "`-";
    915       } else if (IsLoopHeader(current_offset)) {
    916         of << ".>";
    917         loop_indent++;
    918       }
    919       forward_iterator.PrintTo(of);
    920       if (Bytecodes::IsJump(forward_iterator.current_bytecode())) {
    921         of << " (@" << forward_iterator.GetJumpTargetOffset() << ")";
    922       }
    923       of << std::endl;
    924 
    925       if (current_offset == invalid_offset) {
    926         // Underline the invalid liveness.
    927         if (which_invalid == 0) {
    928           for (int i = 0; i < in_liveness.length(); ++i) {
    929             of << '^';
    930           }
    931           for (int i = 0; i < out_liveness.length() + 3; ++i) {
    932             of << ' ';
    933           }
    934         } else {
    935           for (int i = 0; i < in_liveness.length() + 3; ++i) {
    936             of << ' ';
    937           }
    938           for (int i = 0; i < out_liveness.length(); ++i) {
    939             of << '^';
    940           }
    941         }
    942 
    943         // Make sure to draw the loop indentation marks on this additional line.
    944         of << " : " << current_offset << " : ";
    945         for (int i = 0; i < loop_indent; ++i) {
    946           of << "| ";
    947         }
    948 
    949         of << std::endl;
    950       }
    951     }
    952   }
    953 
    954   return invalid_offset == -1;
    955 }
    956 #endif
    957 
    958 }  // namespace compiler
    959 }  // namespace internal
    960 }  // namespace v8
    961