Home | History | Annotate | Download | only in crankshaft
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/crankshaft/lithium-allocator.h"
      6 
      7 #include "src/crankshaft/hydrogen.h"
      8 #include "src/crankshaft/lithium-allocator-inl.h"
      9 #include "src/crankshaft/lithium-inl.h"
     10 #include "src/objects-inl.h"
     11 #include "src/register-configuration.h"
     12 #include "src/string-stream.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 const auto GetRegConfig = RegisterConfiguration::Crankshaft;
     18 
     19 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
     20   return a.Value() < b.Value() ? a : b;
     21 }
     22 
     23 
     24 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
     25   return a.Value() > b.Value() ? a : b;
     26 }
     27 
     28 
     29 UsePosition::UsePosition(LifetimePosition pos,
     30                          LOperand* operand,
     31                          LOperand* hint)
     32     : operand_(operand),
     33       hint_(hint),
     34       pos_(pos),
     35       next_(NULL),
     36       requires_reg_(false),
     37       register_beneficial_(true) {
     38   if (operand_ != NULL && operand_->IsUnallocated()) {
     39     LUnallocated* unalloc = LUnallocated::cast(operand_);
     40     requires_reg_ = unalloc->HasRegisterPolicy() ||
     41         unalloc->HasDoubleRegisterPolicy();
     42     register_beneficial_ = !unalloc->HasAnyPolicy();
     43   }
     44   DCHECK(pos_.IsValid());
     45 }
     46 
     47 
     48 bool UsePosition::HasHint() const {
     49   return hint_ != NULL && !hint_->IsUnallocated();
     50 }
     51 
     52 
     53 bool UsePosition::RequiresRegister() const {
     54   return requires_reg_;
     55 }
     56 
     57 
     58 bool UsePosition::RegisterIsBeneficial() const {
     59   return register_beneficial_;
     60 }
     61 
     62 
     63 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
     64   DCHECK(Contains(pos) && pos.Value() != start().Value());
     65   UseInterval* after = new(zone) UseInterval(pos, end_);
     66   after->next_ = next_;
     67   next_ = after;
     68   end_ = pos;
     69 }
     70 
     71 
     72 #ifdef DEBUG
     73 
     74 
     75 void LiveRange::Verify() const {
     76   UsePosition* cur = first_pos_;
     77   while (cur != NULL) {
     78     DCHECK(Start().Value() <= cur->pos().Value() &&
     79            cur->pos().Value() <= End().Value());
     80     cur = cur->next();
     81   }
     82 }
     83 
     84 
     85 bool LiveRange::HasOverlap(UseInterval* target) const {
     86   UseInterval* current_interval = first_interval_;
     87   while (current_interval != NULL) {
     88     // Intervals overlap if the start of one is contained in the other.
     89     if (current_interval->Contains(target->start()) ||
     90         target->Contains(current_interval->start())) {
     91       return true;
     92     }
     93     current_interval = current_interval->next();
     94   }
     95   return false;
     96 }
     97 
     98 
     99 #endif
    100 
    101 
    102 LiveRange::LiveRange(int id, Zone* zone)
    103     : id_(id),
    104       spilled_(false),
    105       kind_(UNALLOCATED_REGISTERS),
    106       assigned_register_(kInvalidAssignment),
    107       last_interval_(NULL),
    108       first_interval_(NULL),
    109       first_pos_(NULL),
    110       parent_(NULL),
    111       next_(NULL),
    112       current_interval_(NULL),
    113       last_processed_use_(NULL),
    114       current_hint_operand_(NULL),
    115       spill_operand_(new (zone) LOperand()),
    116       spill_start_index_(kMaxInt) {}
    117 
    118 
    119 void LiveRange::set_assigned_register(int reg, Zone* zone) {
    120   DCHECK(!HasRegisterAssigned() && !IsSpilled());
    121   assigned_register_ = reg;
    122   ConvertOperands(zone);
    123 }
    124 
    125 
    126 void LiveRange::MakeSpilled(Zone* zone) {
    127   DCHECK(!IsSpilled());
    128   DCHECK(TopLevel()->HasAllocatedSpillOperand());
    129   spilled_ = true;
    130   assigned_register_ = kInvalidAssignment;
    131   ConvertOperands(zone);
    132 }
    133 
    134 
    135 bool LiveRange::HasAllocatedSpillOperand() const {
    136   DCHECK(spill_operand_ != NULL);
    137   return !spill_operand_->IsIgnored();
    138 }
    139 
    140 
    141 void LiveRange::SetSpillOperand(LOperand* operand) {
    142   DCHECK(!operand->IsUnallocated());
    143   DCHECK(spill_operand_ != NULL);
    144   DCHECK(spill_operand_->IsIgnored());
    145   spill_operand_->ConvertTo(operand->kind(), operand->index());
    146 }
    147 
    148 
    149 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
    150   UsePosition* use_pos = last_processed_use_;
    151   if (use_pos == NULL) use_pos = first_pos();
    152   while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
    153     use_pos = use_pos->next();
    154   }
    155   last_processed_use_ = use_pos;
    156   return use_pos;
    157 }
    158 
    159 
    160 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
    161     LifetimePosition start) {
    162   UsePosition* pos = NextUsePosition(start);
    163   while (pos != NULL && !pos->RegisterIsBeneficial()) {
    164     pos = pos->next();
    165   }
    166   return pos;
    167 }
    168 
    169 
    170 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
    171     LifetimePosition start) {
    172   UsePosition* pos = first_pos();
    173   UsePosition* prev = NULL;
    174   while (pos != NULL && pos->pos().Value() < start.Value()) {
    175     if (pos->RegisterIsBeneficial()) prev = pos;
    176     pos = pos->next();
    177   }
    178   return prev;
    179 }
    180 
    181 
    182 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
    183   UsePosition* pos = NextUsePosition(start);
    184   while (pos != NULL && !pos->RequiresRegister()) {
    185     pos = pos->next();
    186   }
    187   return pos;
    188 }
    189 
    190 
    191 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
    192   // We cannot spill a live range that has a use requiring a register
    193   // at the current or the immediate next position.
    194   UsePosition* use_pos = NextRegisterPosition(pos);
    195   if (use_pos == NULL) return true;
    196   return
    197       use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
    198 }
    199 
    200 
    201 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
    202   LOperand* op = NULL;
    203   if (HasRegisterAssigned()) {
    204     DCHECK(!IsSpilled());
    205     switch (Kind()) {
    206       case GENERAL_REGISTERS:
    207         op = LRegister::Create(assigned_register(), zone);
    208         break;
    209       case DOUBLE_REGISTERS:
    210         op = LDoubleRegister::Create(assigned_register(), zone);
    211         break;
    212       default:
    213         UNREACHABLE();
    214     }
    215   } else if (IsSpilled()) {
    216     DCHECK(!HasRegisterAssigned());
    217     op = TopLevel()->GetSpillOperand();
    218     DCHECK(!op->IsUnallocated());
    219   } else {
    220     LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
    221     unalloc->set_virtual_register(id_);
    222     op = unalloc;
    223   }
    224   return op;
    225 }
    226 
    227 
    228 UseInterval* LiveRange::FirstSearchIntervalForPosition(
    229     LifetimePosition position) const {
    230   if (current_interval_ == NULL) return first_interval_;
    231   if (current_interval_->start().Value() > position.Value()) {
    232     current_interval_ = NULL;
    233     return first_interval_;
    234   }
    235   return current_interval_;
    236 }
    237 
    238 
    239 void LiveRange::AdvanceLastProcessedMarker(
    240     UseInterval* to_start_of, LifetimePosition but_not_past) const {
    241   if (to_start_of == NULL) return;
    242   if (to_start_of->start().Value() > but_not_past.Value()) return;
    243   LifetimePosition start =
    244       current_interval_ == NULL ? LifetimePosition::Invalid()
    245                                 : current_interval_->start();
    246   if (to_start_of->start().Value() > start.Value()) {
    247     current_interval_ = to_start_of;
    248   }
    249 }
    250 
    251 
    252 void LiveRange::SplitAt(LifetimePosition position,
    253                         LiveRange* result,
    254                         Zone* zone) {
    255   DCHECK(Start().Value() < position.Value());
    256   DCHECK(result->IsEmpty());
    257   // Find the last interval that ends before the position. If the
    258   // position is contained in one of the intervals in the chain, we
    259   // split that interval and use the first part.
    260   UseInterval* current = FirstSearchIntervalForPosition(position);
    261 
    262   // If the split position coincides with the beginning of a use interval
    263   // we need to split use positons in a special way.
    264   bool split_at_start = false;
    265 
    266   if (current->start().Value() == position.Value()) {
    267     // When splitting at start we need to locate the previous use interval.
    268     current = first_interval_;
    269   }
    270 
    271   while (current != NULL) {
    272     if (current->Contains(position)) {
    273       current->SplitAt(position, zone);
    274       break;
    275     }
    276     UseInterval* next = current->next();
    277     if (next->start().Value() >= position.Value()) {
    278       split_at_start = (next->start().Value() == position.Value());
    279       break;
    280     }
    281     current = next;
    282   }
    283 
    284   // Partition original use intervals to the two live ranges.
    285   UseInterval* before = current;
    286   UseInterval* after = before->next();
    287   result->last_interval_ = (last_interval_ == before)
    288       ? after            // Only interval in the range after split.
    289       : last_interval_;  // Last interval of the original range.
    290   result->first_interval_ = after;
    291   last_interval_ = before;
    292 
    293   // Find the last use position before the split and the first use
    294   // position after it.
    295   UsePosition* use_after = first_pos_;
    296   UsePosition* use_before = NULL;
    297   if (split_at_start) {
    298     // The split position coincides with the beginning of a use interval (the
    299     // end of a lifetime hole). Use at this position should be attributed to
    300     // the split child because split child owns use interval covering it.
    301     while (use_after != NULL && use_after->pos().Value() < position.Value()) {
    302       use_before = use_after;
    303       use_after = use_after->next();
    304     }
    305   } else {
    306     while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
    307       use_before = use_after;
    308       use_after = use_after->next();
    309     }
    310   }
    311 
    312   // Partition original use positions to the two live ranges.
    313   if (use_before != NULL) {
    314     use_before->next_ = NULL;
    315   } else {
    316     first_pos_ = NULL;
    317   }
    318   result->first_pos_ = use_after;
    319 
    320   // Discard cached iteration state. It might be pointing
    321   // to the use that no longer belongs to this live range.
    322   last_processed_use_ = NULL;
    323   current_interval_ = NULL;
    324 
    325   // Link the new live range in the chain before any of the other
    326   // ranges linked from the range before the split.
    327   result->parent_ = (parent_ == NULL) ? this : parent_;
    328   result->kind_ = result->parent_->kind_;
    329   result->next_ = next_;
    330   next_ = result;
    331 
    332 #ifdef DEBUG
    333   Verify();
    334   result->Verify();
    335 #endif
    336 }
    337 
    338 
    339 // This implements an ordering on live ranges so that they are ordered by their
    340 // start positions.  This is needed for the correctness of the register
    341 // allocation algorithm.  If two live ranges start at the same offset then there
    342 // is a tie breaker based on where the value is first used.  This part of the
    343 // ordering is merely a heuristic.
    344 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
    345   LifetimePosition start = Start();
    346   LifetimePosition other_start = other->Start();
    347   if (start.Value() == other_start.Value()) {
    348     UsePosition* pos = first_pos();
    349     if (pos == NULL) return false;
    350     UsePosition* other_pos = other->first_pos();
    351     if (other_pos == NULL) return true;
    352     return pos->pos().Value() < other_pos->pos().Value();
    353   }
    354   return start.Value() < other_start.Value();
    355 }
    356 
    357 
    358 void LiveRange::ShortenTo(LifetimePosition start) {
    359   LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
    360   DCHECK(first_interval_ != NULL);
    361   DCHECK(first_interval_->start().Value() <= start.Value());
    362   DCHECK(start.Value() < first_interval_->end().Value());
    363   first_interval_->set_start(start);
    364 }
    365 
    366 
    367 void LiveRange::EnsureInterval(LifetimePosition start,
    368                                LifetimePosition end,
    369                                Zone* zone) {
    370   LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
    371                          id_,
    372                          start.Value(),
    373                          end.Value());
    374   LifetimePosition new_end = end;
    375   while (first_interval_ != NULL &&
    376          first_interval_->start().Value() <= end.Value()) {
    377     if (first_interval_->end().Value() > end.Value()) {
    378       new_end = first_interval_->end();
    379     }
    380     first_interval_ = first_interval_->next();
    381   }
    382 
    383   UseInterval* new_interval = new(zone) UseInterval(start, new_end);
    384   new_interval->next_ = first_interval_;
    385   first_interval_ = new_interval;
    386   if (new_interval->next() == NULL) {
    387     last_interval_ = new_interval;
    388   }
    389 }
    390 
    391 
    392 void LiveRange::AddUseInterval(LifetimePosition start,
    393                                LifetimePosition end,
    394                                Zone* zone) {
    395   LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
    396                          id_,
    397                          start.Value(),
    398                          end.Value());
    399   if (first_interval_ == NULL) {
    400     UseInterval* interval = new(zone) UseInterval(start, end);
    401     first_interval_ = interval;
    402     last_interval_ = interval;
    403   } else {
    404     if (end.Value() == first_interval_->start().Value()) {
    405       first_interval_->set_start(start);
    406     } else if (end.Value() < first_interval_->start().Value()) {
    407       UseInterval* interval = new(zone) UseInterval(start, end);
    408       interval->set_next(first_interval_);
    409       first_interval_ = interval;
    410     } else {
    411       // Order of instruction's processing (see ProcessInstructions) guarantees
    412       // that each new use interval either precedes or intersects with
    413       // last added interval.
    414       DCHECK(start.Value() < first_interval_->end().Value());
    415       first_interval_->start_ = Min(start, first_interval_->start_);
    416       first_interval_->end_ = Max(end, first_interval_->end_);
    417     }
    418   }
    419 }
    420 
    421 
    422 void LiveRange::AddUsePosition(LifetimePosition pos,
    423                                LOperand* operand,
    424                                LOperand* hint,
    425                                Zone* zone) {
    426   LAllocator::TraceAlloc("Add to live range %d use position %d\n",
    427                          id_,
    428                          pos.Value());
    429   UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
    430   UsePosition* prev_hint = NULL;
    431   UsePosition* prev = NULL;
    432   UsePosition* current = first_pos_;
    433   while (current != NULL && current->pos().Value() < pos.Value()) {
    434     prev_hint = current->HasHint() ? current : prev_hint;
    435     prev = current;
    436     current = current->next();
    437   }
    438 
    439   if (prev == NULL) {
    440     use_pos->set_next(first_pos_);
    441     first_pos_ = use_pos;
    442   } else {
    443     use_pos->next_ = prev->next_;
    444     prev->next_ = use_pos;
    445   }
    446 
    447   if (prev_hint == NULL && use_pos->HasHint()) {
    448     current_hint_operand_ = hint;
    449   }
    450 }
    451 
    452 
    453 void LiveRange::ConvertOperands(Zone* zone) {
    454   LOperand* op = CreateAssignedOperand(zone);
    455   UsePosition* use_pos = first_pos();
    456   while (use_pos != NULL) {
    457     DCHECK(Start().Value() <= use_pos->pos().Value() &&
    458            use_pos->pos().Value() <= End().Value());
    459 
    460     if (use_pos->HasOperand()) {
    461       DCHECK(op->IsRegister() || op->IsDoubleRegister() ||
    462              !use_pos->RequiresRegister());
    463       use_pos->operand()->ConvertTo(op->kind(), op->index());
    464     }
    465     use_pos = use_pos->next();
    466   }
    467 }
    468 
    469 
    470 bool LiveRange::CanCover(LifetimePosition position) const {
    471   if (IsEmpty()) return false;
    472   return Start().Value() <= position.Value() &&
    473          position.Value() < End().Value();
    474 }
    475 
    476 
    477 bool LiveRange::Covers(LifetimePosition position) {
    478   if (!CanCover(position)) return false;
    479   UseInterval* start_search = FirstSearchIntervalForPosition(position);
    480   for (UseInterval* interval = start_search;
    481        interval != NULL;
    482        interval = interval->next()) {
    483     DCHECK(interval->next() == NULL ||
    484            interval->next()->start().Value() >= interval->start().Value());
    485     AdvanceLastProcessedMarker(interval, position);
    486     if (interval->Contains(position)) return true;
    487     if (interval->start().Value() > position.Value()) return false;
    488   }
    489   return false;
    490 }
    491 
    492 
    493 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
    494   UseInterval* b = other->first_interval();
    495   if (b == NULL) return LifetimePosition::Invalid();
    496   LifetimePosition advance_last_processed_up_to = b->start();
    497   UseInterval* a = FirstSearchIntervalForPosition(b->start());
    498   while (a != NULL && b != NULL) {
    499     if (a->start().Value() > other->End().Value()) break;
    500     if (b->start().Value() > End().Value()) break;
    501     LifetimePosition cur_intersection = a->Intersect(b);
    502     if (cur_intersection.IsValid()) {
    503       return cur_intersection;
    504     }
    505     if (a->start().Value() < b->start().Value()) {
    506       a = a->next();
    507       if (a == NULL || a->start().Value() > other->End().Value()) break;
    508       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
    509     } else {
    510       b = b->next();
    511     }
    512   }
    513   return LifetimePosition::Invalid();
    514 }
    515 
    516 LAllocator::LAllocator(int num_values, HGraph* graph)
    517     : zone_(graph->isolate()->allocator(), ZONE_NAME),
    518       chunk_(NULL),
    519       live_in_sets_(graph->blocks()->length(), zone()),
    520       live_ranges_(num_values * 2, zone()),
    521       fixed_live_ranges_(NULL),
    522       fixed_double_live_ranges_(NULL),
    523       unhandled_live_ranges_(num_values * 2, zone()),
    524       active_live_ranges_(8, zone()),
    525       inactive_live_ranges_(8, zone()),
    526       reusable_slots_(8, zone()),
    527       next_virtual_register_(num_values),
    528       first_artificial_register_(num_values),
    529       mode_(UNALLOCATED_REGISTERS),
    530       num_registers_(-1),
    531       graph_(graph),
    532       has_osr_entry_(false),
    533       allocation_ok_(true) {}
    534 
    535 void LAllocator::InitializeLivenessAnalysis() {
    536   // Initialize the live_in sets for each block to NULL.
    537   int block_count = graph_->blocks()->length();
    538   live_in_sets_.Initialize(block_count, zone());
    539   live_in_sets_.AddBlock(NULL, block_count, zone());
    540 }
    541 
    542 
    543 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
    544   // Compute live out for the given block, except not including backward
    545   // successor edges.
    546   BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
    547 
    548   // Process all successor blocks.
    549   for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
    550     // Add values live on entry to the successor. Note the successor's
    551     // live_in will not be computed yet for backwards edges.
    552     HBasicBlock* successor = it.Current();
    553     BitVector* live_in = live_in_sets_[successor->block_id()];
    554     if (live_in != NULL) live_out->Union(*live_in);
    555 
    556     // All phi input operands corresponding to this successor edge are live
    557     // out from this block.
    558     int index = successor->PredecessorIndexOf(block);
    559     const ZoneList<HPhi*>* phis = successor->phis();
    560     for (int i = 0; i < phis->length(); ++i) {
    561       HPhi* phi = phis->at(i);
    562       if (!phi->OperandAt(index)->IsConstant()) {
    563         live_out->Add(phi->OperandAt(index)->id());
    564       }
    565     }
    566   }
    567 
    568   return live_out;
    569 }
    570 
    571 
    572 void LAllocator::AddInitialIntervals(HBasicBlock* block,
    573                                      BitVector* live_out) {
    574   // Add an interval that includes the entire block to the live range for
    575   // each live_out value.
    576   LifetimePosition start = LifetimePosition::FromInstructionIndex(
    577       block->first_instruction_index());
    578   LifetimePosition end = LifetimePosition::FromInstructionIndex(
    579       block->last_instruction_index()).NextInstruction();
    580   BitVector::Iterator iterator(live_out);
    581   while (!iterator.Done()) {
    582     int operand_index = iterator.Current();
    583     LiveRange* range = LiveRangeFor(operand_index);
    584     range->AddUseInterval(start, end, zone());
    585     iterator.Advance();
    586   }
    587 }
    588 
    589 
    590 int LAllocator::FixedDoubleLiveRangeID(int index) {
    591   return -index - 1 - Register::kNumRegisters;
    592 }
    593 
    594 
    595 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
    596                                     int pos,
    597                                     bool is_tagged) {
    598   TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
    599   DCHECK(operand->HasFixedPolicy());
    600   if (operand->HasFixedSlotPolicy()) {
    601     operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
    602   } else if (operand->HasFixedRegisterPolicy()) {
    603     int reg_index = operand->fixed_register_index();
    604     operand->ConvertTo(LOperand::REGISTER, reg_index);
    605   } else if (operand->HasFixedDoubleRegisterPolicy()) {
    606     int reg_index = operand->fixed_register_index();
    607     operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
    608   } else {
    609     UNREACHABLE();
    610   }
    611   if (is_tagged) {
    612     TraceAlloc("Fixed reg is tagged at %d\n", pos);
    613     LInstruction* instr = InstructionAt(pos);
    614     if (instr->HasPointerMap()) {
    615       instr->pointer_map()->RecordPointer(operand, chunk()->zone());
    616     }
    617   }
    618   return operand;
    619 }
    620 
    621 
    622 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
    623   DCHECK(index < Register::kNumRegisters);
    624   LiveRange* result = fixed_live_ranges_[index];
    625   if (result == NULL) {
    626     result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
    627     DCHECK(result->IsFixed());
    628     result->kind_ = GENERAL_REGISTERS;
    629     SetLiveRangeAssignedRegister(result, index);
    630     fixed_live_ranges_[index] = result;
    631   }
    632   return result;
    633 }
    634 
    635 
    636 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
    637   DCHECK(index < DoubleRegister::kMaxNumRegisters);
    638   LiveRange* result = fixed_double_live_ranges_[index];
    639   if (result == NULL) {
    640     result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
    641                                    chunk()->zone());
    642     DCHECK(result->IsFixed());
    643     result->kind_ = DOUBLE_REGISTERS;
    644     SetLiveRangeAssignedRegister(result, index);
    645     fixed_double_live_ranges_[index] = result;
    646   }
    647   return result;
    648 }
    649 
    650 
    651 LiveRange* LAllocator::LiveRangeFor(int index) {
    652   if (index >= live_ranges_.length()) {
    653     live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
    654   }
    655   LiveRange* result = live_ranges_[index];
    656   if (result == NULL) {
    657     result = new(zone()) LiveRange(index, chunk()->zone());
    658     live_ranges_[index] = result;
    659   }
    660   return result;
    661 }
    662 
    663 
    664 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
    665   int last_instruction = block->last_instruction_index();
    666   int index = chunk_->NearestGapPos(last_instruction);
    667   return GapAt(index);
    668 }
    669 
    670 
    671 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
    672   if (!operand->IsUnallocated()) return NULL;
    673   int index = LUnallocated::cast(operand)->virtual_register();
    674   HValue* instr = graph_->LookupValue(index);
    675   if (instr != NULL && instr->IsPhi()) {
    676     return HPhi::cast(instr);
    677   }
    678   return NULL;
    679 }
    680 
    681 
    682 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
    683   if (operand->IsUnallocated()) {
    684     return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
    685   } else if (operand->IsRegister()) {
    686     return FixedLiveRangeFor(operand->index());
    687   } else if (operand->IsDoubleRegister()) {
    688     return FixedDoubleLiveRangeFor(operand->index());
    689   } else {
    690     return NULL;
    691   }
    692 }
    693 
    694 
    695 void LAllocator::Define(LifetimePosition position,
    696                         LOperand* operand,
    697                         LOperand* hint) {
    698   LiveRange* range = LiveRangeFor(operand);
    699   if (range == NULL) return;
    700 
    701   if (range->IsEmpty() || range->Start().Value() > position.Value()) {
    702     // Can happen if there is a definition without use.
    703     range->AddUseInterval(position, position.NextInstruction(), zone());
    704     range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
    705   } else {
    706     range->ShortenTo(position);
    707   }
    708 
    709   if (operand->IsUnallocated()) {
    710     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
    711     range->AddUsePosition(position, unalloc_operand, hint, zone());
    712   }
    713 }
    714 
    715 
    716 void LAllocator::Use(LifetimePosition block_start,
    717                      LifetimePosition position,
    718                      LOperand* operand,
    719                      LOperand* hint) {
    720   LiveRange* range = LiveRangeFor(operand);
    721   if (range == NULL) return;
    722   if (operand->IsUnallocated()) {
    723     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
    724     range->AddUsePosition(position, unalloc_operand, hint, zone());
    725   }
    726   range->AddUseInterval(block_start, position, zone());
    727 }
    728 
    729 
    730 void LAllocator::AddConstraintsGapMove(int index,
    731                                        LOperand* from,
    732                                        LOperand* to) {
    733   LGap* gap = GapAt(index);
    734   LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
    735                                                      chunk()->zone());
    736   if (from->IsUnallocated()) {
    737     const ZoneList<LMoveOperands>* move_operands = move->move_operands();
    738     for (int i = 0; i < move_operands->length(); ++i) {
    739       LMoveOperands cur = move_operands->at(i);
    740       LOperand* cur_to = cur.destination();
    741       if (cur_to->IsUnallocated()) {
    742         if (LUnallocated::cast(cur_to)->virtual_register() ==
    743             LUnallocated::cast(from)->virtual_register()) {
    744           move->AddMove(cur.source(), to, chunk()->zone());
    745           return;
    746         }
    747       }
    748     }
    749   }
    750   move->AddMove(from, to, chunk()->zone());
    751 }
    752 
    753 
    754 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
    755   int start = block->first_instruction_index();
    756   int end = block->last_instruction_index();
    757   if (start == -1) return;
    758   for (int i = start; i <= end; ++i) {
    759     if (IsGapAt(i)) {
    760       LInstruction* instr = NULL;
    761       LInstruction* prev_instr = NULL;
    762       if (i < end) instr = InstructionAt(i + 1);
    763       if (i > start) prev_instr = InstructionAt(i - 1);
    764       MeetConstraintsBetween(prev_instr, instr, i);
    765       if (!AllocationOk()) return;
    766     }
    767   }
    768 }
    769 
    770 
    771 void LAllocator::MeetConstraintsBetween(LInstruction* first,
    772                                         LInstruction* second,
    773                                         int gap_index) {
    774   // Handle fixed temporaries.
    775   if (first != NULL) {
    776     for (TempIterator it(first); !it.Done(); it.Advance()) {
    777       LUnallocated* temp = LUnallocated::cast(it.Current());
    778       if (temp->HasFixedPolicy()) {
    779         AllocateFixed(temp, gap_index - 1, false);
    780       }
    781     }
    782   }
    783 
    784   // Handle fixed output operand.
    785   if (first != NULL && first->Output() != NULL) {
    786     LUnallocated* first_output = LUnallocated::cast(first->Output());
    787     LiveRange* range = LiveRangeFor(first_output->virtual_register());
    788     bool assigned = false;
    789     if (first_output->HasFixedPolicy()) {
    790       LUnallocated* output_copy = first_output->CopyUnconstrained(
    791           chunk()->zone());
    792       bool is_tagged = HasTaggedValue(first_output->virtual_register());
    793       AllocateFixed(first_output, gap_index, is_tagged);
    794 
    795       // This value is produced on the stack, we never need to spill it.
    796       if (first_output->IsStackSlot()) {
    797         range->SetSpillOperand(first_output);
    798         range->SetSpillStartIndex(gap_index - 1);
    799         assigned = true;
    800       }
    801       chunk_->AddGapMove(gap_index, first_output, output_copy);
    802     }
    803 
    804     if (!assigned) {
    805       range->SetSpillStartIndex(gap_index);
    806 
    807       // This move to spill operand is not a real use. Liveness analysis
    808       // and splitting of live ranges do not account for it.
    809       // Thus it should be inserted to a lifetime position corresponding to
    810       // the instruction end.
    811       LGap* gap = GapAt(gap_index);
    812       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
    813                                                          chunk()->zone());
    814       move->AddMove(first_output, range->GetSpillOperand(),
    815                     chunk()->zone());
    816     }
    817   }
    818 
    819   // Handle fixed input operands of second instruction.
    820   if (second != NULL) {
    821     for (UseIterator it(second); !it.Done(); it.Advance()) {
    822       LUnallocated* cur_input = LUnallocated::cast(it.Current());
    823       if (cur_input->HasFixedPolicy()) {
    824         LUnallocated* input_copy = cur_input->CopyUnconstrained(
    825             chunk()->zone());
    826         bool is_tagged = HasTaggedValue(cur_input->virtual_register());
    827         AllocateFixed(cur_input, gap_index + 1, is_tagged);
    828         AddConstraintsGapMove(gap_index, input_copy, cur_input);
    829       } else if (cur_input->HasWritableRegisterPolicy()) {
    830         // The live range of writable input registers always goes until the end
    831         // of the instruction.
    832         DCHECK(!cur_input->IsUsedAtStart());
    833 
    834         LUnallocated* input_copy = cur_input->CopyUnconstrained(
    835             chunk()->zone());
    836         int vreg = GetVirtualRegister();
    837         if (!AllocationOk()) return;
    838         cur_input->set_virtual_register(vreg);
    839 
    840         if (RequiredRegisterKind(input_copy->virtual_register()) ==
    841             DOUBLE_REGISTERS) {
    842           double_artificial_registers_.Add(
    843               cur_input->virtual_register() - first_artificial_register_,
    844               zone());
    845         }
    846 
    847         AddConstraintsGapMove(gap_index, input_copy, cur_input);
    848       }
    849     }
    850   }
    851 
    852   // Handle "output same as input" for second instruction.
    853   if (second != NULL && second->Output() != NULL) {
    854     LUnallocated* second_output = LUnallocated::cast(second->Output());
    855     if (second_output->HasSameAsInputPolicy()) {
    856       LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
    857       int output_vreg = second_output->virtual_register();
    858       int input_vreg = cur_input->virtual_register();
    859 
    860       LUnallocated* input_copy = cur_input->CopyUnconstrained(
    861           chunk()->zone());
    862       cur_input->set_virtual_register(second_output->virtual_register());
    863       AddConstraintsGapMove(gap_index, input_copy, cur_input);
    864 
    865       if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
    866         int index = gap_index + 1;
    867         LInstruction* instr = InstructionAt(index);
    868         if (instr->HasPointerMap()) {
    869           instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
    870         }
    871       } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
    872         // The input is assumed to immediately have a tagged representation,
    873         // before the pointer map can be used. I.e. the pointer map at the
    874         // instruction will include the output operand (whose value at the
    875         // beginning of the instruction is equal to the input operand). If
    876         // this is not desired, then the pointer map at this instruction needs
    877         // to be adjusted manually.
    878       }
    879     }
    880   }
    881 }
    882 
    883 
    884 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
    885   int block_start = block->first_instruction_index();
    886   int index = block->last_instruction_index();
    887 
    888   LifetimePosition block_start_position =
    889       LifetimePosition::FromInstructionIndex(block_start);
    890 
    891   while (index >= block_start) {
    892     LifetimePosition curr_position =
    893         LifetimePosition::FromInstructionIndex(index);
    894 
    895     if (IsGapAt(index)) {
    896       // We have a gap at this position.
    897       LGap* gap = GapAt(index);
    898       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
    899                                                          chunk()->zone());
    900       const ZoneList<LMoveOperands>* move_operands = move->move_operands();
    901       for (int i = 0; i < move_operands->length(); ++i) {
    902         LMoveOperands* cur = &move_operands->at(i);
    903         if (cur->IsIgnored()) continue;
    904         LOperand* from = cur->source();
    905         LOperand* to = cur->destination();
    906         HPhi* phi = LookupPhi(to);
    907         LOperand* hint = to;
    908         if (phi != NULL) {
    909           // This is a phi resolving move.
    910           if (!phi->block()->IsLoopHeader()) {
    911             hint = LiveRangeFor(phi->id())->current_hint_operand();
    912           }
    913         } else {
    914           if (to->IsUnallocated()) {
    915             if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
    916               Define(curr_position, to, from);
    917               live->Remove(LUnallocated::cast(to)->virtual_register());
    918             } else {
    919               cur->Eliminate();
    920               continue;
    921             }
    922           } else {
    923             Define(curr_position, to, from);
    924           }
    925         }
    926         Use(block_start_position, curr_position, from, hint);
    927         if (from->IsUnallocated()) {
    928           live->Add(LUnallocated::cast(from)->virtual_register());
    929         }
    930       }
    931     } else {
    932       DCHECK(!IsGapAt(index));
    933       LInstruction* instr = InstructionAt(index);
    934 
    935       if (instr != NULL) {
    936         LOperand* output = instr->Output();
    937         if (output != NULL) {
    938           if (output->IsUnallocated()) {
    939             live->Remove(LUnallocated::cast(output)->virtual_register());
    940           }
    941           Define(curr_position, output, NULL);
    942         }
    943 
    944         if (instr->ClobbersRegisters()) {
    945           for (int i = 0; i < Register::kNumRegisters; ++i) {
    946             if (GetRegConfig()->IsAllocatableGeneralCode(i)) {
    947               if (output == NULL || !output->IsRegister() ||
    948                   output->index() != i) {
    949                 LiveRange* range = FixedLiveRangeFor(i);
    950                 range->AddUseInterval(curr_position,
    951                                       curr_position.InstructionEnd(), zone());
    952               }
    953             }
    954           }
    955         }
    956 
    957         if (instr->ClobbersDoubleRegisters(isolate())) {
    958           for (int i = 0; i < DoubleRegister::kMaxNumRegisters; ++i) {
    959             if (GetRegConfig()->IsAllocatableDoubleCode(i)) {
    960               if (output == NULL || !output->IsDoubleRegister() ||
    961                   output->index() != i) {
    962                 LiveRange* range = FixedDoubleLiveRangeFor(i);
    963                 range->AddUseInterval(curr_position,
    964                                       curr_position.InstructionEnd(), zone());
    965               }
    966             }
    967           }
    968         }
    969 
    970         for (UseIterator it(instr); !it.Done(); it.Advance()) {
    971           LOperand* input = it.Current();
    972 
    973           LifetimePosition use_pos;
    974           if (input->IsUnallocated() &&
    975               LUnallocated::cast(input)->IsUsedAtStart()) {
    976             use_pos = curr_position;
    977           } else {
    978             use_pos = curr_position.InstructionEnd();
    979           }
    980 
    981           Use(block_start_position, use_pos, input, NULL);
    982           if (input->IsUnallocated()) {
    983             live->Add(LUnallocated::cast(input)->virtual_register());
    984           }
    985         }
    986 
    987         for (TempIterator it(instr); !it.Done(); it.Advance()) {
    988           LOperand* temp = it.Current();
    989           if (instr->ClobbersTemps()) {
    990             if (temp->IsRegister()) continue;
    991             if (temp->IsUnallocated()) {
    992               LUnallocated* temp_unalloc = LUnallocated::cast(temp);
    993               if (temp_unalloc->HasFixedPolicy()) {
    994                 continue;
    995               }
    996             }
    997           }
    998           Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
    999           Define(curr_position, temp, NULL);
   1000 
   1001           if (temp->IsUnallocated()) {
   1002             LUnallocated* temp_unalloc = LUnallocated::cast(temp);
   1003             if (temp_unalloc->HasDoubleRegisterPolicy()) {
   1004               double_artificial_registers_.Add(
   1005                   temp_unalloc->virtual_register() - first_artificial_register_,
   1006                   zone());
   1007             }
   1008           }
   1009         }
   1010       }
   1011     }
   1012 
   1013     index = index - 1;
   1014   }
   1015 }
   1016 
   1017 
   1018 void LAllocator::ResolvePhis(HBasicBlock* block) {
   1019   const ZoneList<HPhi*>* phis = block->phis();
   1020   for (int i = 0; i < phis->length(); ++i) {
   1021     HPhi* phi = phis->at(i);
   1022     LUnallocated* phi_operand =
   1023         new (chunk()->zone()) LUnallocated(LUnallocated::NONE);
   1024     phi_operand->set_virtual_register(phi->id());
   1025     for (int j = 0; j < phi->OperandCount(); ++j) {
   1026       HValue* op = phi->OperandAt(j);
   1027       LOperand* operand = NULL;
   1028       if (op->IsConstant() && op->EmitAtUses()) {
   1029         HConstant* constant = HConstant::cast(op);
   1030         operand = chunk_->DefineConstantOperand(constant);
   1031       } else {
   1032         DCHECK(!op->EmitAtUses());
   1033         LUnallocated* unalloc =
   1034             new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
   1035         unalloc->set_virtual_register(op->id());
   1036         operand = unalloc;
   1037       }
   1038       HBasicBlock* cur_block = block->predecessors()->at(j);
   1039       // The gap move must be added without any special processing as in
   1040       // the AddConstraintsGapMove.
   1041       chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
   1042                          operand,
   1043                          phi_operand);
   1044 
   1045       // We are going to insert a move before the branch instruction.
   1046       // Some branch instructions (e.g. loops' back edges)
   1047       // can potentially cause a GC so they have a pointer map.
   1048       // By inserting a move we essentially create a copy of a
   1049       // value which is invisible to PopulatePointerMaps(), because we store
   1050       // it into a location different from the operand of a live range
   1051       // covering a branch instruction.
   1052       // Thus we need to manually record a pointer.
   1053       LInstruction* branch =
   1054           InstructionAt(cur_block->last_instruction_index());
   1055       if (branch->HasPointerMap()) {
   1056         if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
   1057           branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
   1058         } else if (!phi->representation().IsDouble()) {
   1059           branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
   1060         }
   1061       }
   1062     }
   1063 
   1064     LiveRange* live_range = LiveRangeFor(phi->id());
   1065     LLabel* label = chunk_->GetLabel(phi->block()->block_id());
   1066     label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
   1067         AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
   1068     live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
   1069   }
   1070 }
   1071 
   1072 
   1073 bool LAllocator::Allocate(LChunk* chunk) {
   1074   DCHECK(chunk_ == NULL);
   1075   chunk_ = static_cast<LPlatformChunk*>(chunk);
   1076   assigned_registers_ =
   1077       new (chunk->zone()) BitVector(Register::kNumRegisters, chunk->zone());
   1078   assigned_double_registers_ = new (chunk->zone())
   1079       BitVector(DoubleRegister::kMaxNumRegisters, chunk->zone());
   1080   MeetRegisterConstraints();
   1081   if (!AllocationOk()) return false;
   1082   ResolvePhis();
   1083   BuildLiveRanges();
   1084   AllocateGeneralRegisters();
   1085   if (!AllocationOk()) return false;
   1086   AllocateDoubleRegisters();
   1087   if (!AllocationOk()) return false;
   1088   PopulatePointerMaps();
   1089   ConnectRanges();
   1090   ResolveControlFlow();
   1091   return true;
   1092 }
   1093 
   1094 
   1095 void LAllocator::MeetRegisterConstraints() {
   1096   LAllocatorPhase phase("L_Register constraints", this);
   1097   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1098   for (int i = 0; i < blocks->length(); ++i) {
   1099     HBasicBlock* block = blocks->at(i);
   1100     MeetRegisterConstraints(block);
   1101     if (!AllocationOk()) return;
   1102   }
   1103 }
   1104 
   1105 
   1106 void LAllocator::ResolvePhis() {
   1107   LAllocatorPhase phase("L_Resolve phis", this);
   1108 
   1109   // Process the blocks in reverse order.
   1110   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1111   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
   1112     HBasicBlock* block = blocks->at(block_id);
   1113     ResolvePhis(block);
   1114   }
   1115 }
   1116 
   1117 
   1118 void LAllocator::ResolveControlFlow(LiveRange* range,
   1119                                     HBasicBlock* block,
   1120                                     HBasicBlock* pred) {
   1121   LifetimePosition pred_end =
   1122       LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
   1123   LifetimePosition cur_start =
   1124       LifetimePosition::FromInstructionIndex(block->first_instruction_index());
   1125   LiveRange* pred_cover = NULL;
   1126   LiveRange* cur_cover = NULL;
   1127   LiveRange* cur_range = range;
   1128   while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
   1129     if (cur_range->CanCover(cur_start)) {
   1130       DCHECK(cur_cover == NULL);
   1131       cur_cover = cur_range;
   1132     }
   1133     if (cur_range->CanCover(pred_end)) {
   1134       DCHECK(pred_cover == NULL);
   1135       pred_cover = cur_range;
   1136     }
   1137     cur_range = cur_range->next();
   1138   }
   1139 
   1140   if (cur_cover->IsSpilled()) return;
   1141   DCHECK(pred_cover != NULL && cur_cover != NULL);
   1142   if (pred_cover != cur_cover) {
   1143     LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
   1144     LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
   1145     if (!pred_op->Equals(cur_op)) {
   1146       LGap* gap = NULL;
   1147       if (block->predecessors()->length() == 1) {
   1148         gap = GapAt(block->first_instruction_index());
   1149       } else {
   1150         DCHECK(pred->end()->SecondSuccessor() == NULL);
   1151         gap = GetLastGap(pred);
   1152 
   1153         // We are going to insert a move before the branch instruction.
   1154         // Some branch instructions (e.g. loops' back edges)
   1155         // can potentially cause a GC so they have a pointer map.
   1156         // By inserting a move we essentially create a copy of a
   1157         // value which is invisible to PopulatePointerMaps(), because we store
   1158         // it into a location different from the operand of a live range
   1159         // covering a branch instruction.
   1160         // Thus we need to manually record a pointer.
   1161         LInstruction* branch = InstructionAt(pred->last_instruction_index());
   1162         if (branch->HasPointerMap()) {
   1163           if (HasTaggedValue(range->id())) {
   1164             branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
   1165           } else if (!cur_op->IsDoubleStackSlot() &&
   1166                      !cur_op->IsDoubleRegister()) {
   1167             branch->pointer_map()->RemovePointer(cur_op);
   1168           }
   1169         }
   1170       }
   1171       gap->GetOrCreateParallelMove(
   1172           LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
   1173                                                  chunk()->zone());
   1174     }
   1175   }
   1176 }
   1177 
   1178 
   1179 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
   1180   int index = pos.InstructionIndex();
   1181   if (IsGapAt(index)) {
   1182     LGap* gap = GapAt(index);
   1183     return gap->GetOrCreateParallelMove(
   1184         pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
   1185   }
   1186   int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
   1187   return GapAt(gap_pos)->GetOrCreateParallelMove(
   1188       (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
   1189 }
   1190 
   1191 
   1192 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
   1193   LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
   1194   return gap->block();
   1195 }
   1196 
   1197 
   1198 void LAllocator::ConnectRanges() {
   1199   LAllocatorPhase phase("L_Connect ranges", this);
   1200   for (int i = 0; i < live_ranges()->length(); ++i) {
   1201     LiveRange* first_range = live_ranges()->at(i);
   1202     if (first_range == NULL || first_range->parent() != NULL) continue;
   1203 
   1204     LiveRange* second_range = first_range->next();
   1205     while (second_range != NULL) {
   1206       LifetimePosition pos = second_range->Start();
   1207 
   1208       if (!second_range->IsSpilled()) {
   1209         // Add gap move if the two live ranges touch and there is no block
   1210         // boundary.
   1211         if (first_range->End().Value() == pos.Value()) {
   1212           bool should_insert = true;
   1213           if (IsBlockBoundary(pos)) {
   1214             should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
   1215           }
   1216           if (should_insert) {
   1217             LParallelMove* move = GetConnectingParallelMove(pos);
   1218             LOperand* prev_operand = first_range->CreateAssignedOperand(
   1219                 chunk()->zone());
   1220             LOperand* cur_operand = second_range->CreateAssignedOperand(
   1221                 chunk()->zone());
   1222             move->AddMove(prev_operand, cur_operand,
   1223                           chunk()->zone());
   1224           }
   1225         }
   1226       }
   1227 
   1228       first_range = second_range;
   1229       second_range = second_range->next();
   1230     }
   1231   }
   1232 }
   1233 
   1234 
   1235 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
   1236   if (block->predecessors()->length() != 1) return false;
   1237   return block->predecessors()->first()->block_id() == block->block_id() - 1;
   1238 }
   1239 
   1240 
   1241 void LAllocator::ResolveControlFlow() {
   1242   LAllocatorPhase phase("L_Resolve control flow", this);
   1243   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1244   for (int block_id = 1; block_id < blocks->length(); ++block_id) {
   1245     HBasicBlock* block = blocks->at(block_id);
   1246     if (CanEagerlyResolveControlFlow(block)) continue;
   1247     BitVector* live = live_in_sets_[block->block_id()];
   1248     BitVector::Iterator iterator(live);
   1249     while (!iterator.Done()) {
   1250       int operand_index = iterator.Current();
   1251       for (int i = 0; i < block->predecessors()->length(); ++i) {
   1252         HBasicBlock* cur = block->predecessors()->at(i);
   1253         LiveRange* cur_range = LiveRangeFor(operand_index);
   1254         ResolveControlFlow(cur_range, block, cur);
   1255       }
   1256       iterator.Advance();
   1257     }
   1258   }
   1259 }
   1260 
   1261 
   1262 void LAllocator::BuildLiveRanges() {
   1263   LAllocatorPhase phase("L_Build live ranges", this);
   1264   InitializeLivenessAnalysis();
   1265   // Process the blocks in reverse order.
   1266   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1267   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
   1268     HBasicBlock* block = blocks->at(block_id);
   1269     BitVector* live = ComputeLiveOut(block);
   1270     // Initially consider all live_out values live for the entire block. We
   1271     // will shorten these intervals if necessary.
   1272     AddInitialIntervals(block, live);
   1273 
   1274     // Process the instructions in reverse order, generating and killing
   1275     // live values.
   1276     ProcessInstructions(block, live);
   1277     // All phi output operands are killed by this block.
   1278     const ZoneList<HPhi*>* phis = block->phis();
   1279     for (int i = 0; i < phis->length(); ++i) {
   1280       // The live range interval already ends at the first instruction of the
   1281       // block.
   1282       HPhi* phi = phis->at(i);
   1283       live->Remove(phi->id());
   1284 
   1285       LOperand* hint = NULL;
   1286       LOperand* phi_operand = NULL;
   1287       LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
   1288       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
   1289                                                          chunk()->zone());
   1290       for (int j = 0; j < move->move_operands()->length(); ++j) {
   1291         LOperand* to = move->move_operands()->at(j).destination();
   1292         if (to->IsUnallocated() &&
   1293             LUnallocated::cast(to)->virtual_register() == phi->id()) {
   1294           hint = move->move_operands()->at(j).source();
   1295           phi_operand = to;
   1296           break;
   1297         }
   1298       }
   1299       DCHECK(hint != NULL);
   1300 
   1301       LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
   1302               block->first_instruction_index());
   1303       Define(block_start, phi_operand, hint);
   1304     }
   1305 
   1306     // Now live is live_in for this block except not including values live
   1307     // out on backward successor edges.
   1308     live_in_sets_[block_id] = live;
   1309 
   1310     // If this block is a loop header go back and patch up the necessary
   1311     // predecessor blocks.
   1312     if (block->IsLoopHeader()) {
   1313       // TODO(kmillikin): Need to be able to get the last block of the loop
   1314       // in the loop information. Add a live range stretching from the first
   1315       // loop instruction to the last for each value live on entry to the
   1316       // header.
   1317       HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
   1318       BitVector::Iterator iterator(live);
   1319       LifetimePosition start = LifetimePosition::FromInstructionIndex(
   1320           block->first_instruction_index());
   1321       LifetimePosition end = LifetimePosition::FromInstructionIndex(
   1322           back_edge->last_instruction_index()).NextInstruction();
   1323       while (!iterator.Done()) {
   1324         int operand_index = iterator.Current();
   1325         LiveRange* range = LiveRangeFor(operand_index);
   1326         range->EnsureInterval(start, end, zone());
   1327         iterator.Advance();
   1328       }
   1329 
   1330       for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
   1331         live_in_sets_[i]->Union(*live);
   1332       }
   1333     }
   1334 
   1335 #ifdef DEBUG
   1336     if (block_id == 0) {
   1337       BitVector::Iterator iterator(live);
   1338       bool found = false;
   1339       while (!iterator.Done()) {
   1340         found = true;
   1341         int operand_index = iterator.Current();
   1342         {
   1343           AllowHandleDereference allow_deref;
   1344           PrintF("Function: %s\n", chunk_->info()->GetDebugName().get());
   1345         }
   1346         PrintF("Value %d used before first definition!\n", operand_index);
   1347         LiveRange* range = LiveRangeFor(operand_index);
   1348         PrintF("First use is at %d\n", range->first_pos()->pos().Value());
   1349         iterator.Advance();
   1350       }
   1351       DCHECK(!found);
   1352     }
   1353 #endif
   1354   }
   1355 
   1356   for (int i = 0; i < live_ranges_.length(); ++i) {
   1357     if (live_ranges_[i] != NULL) {
   1358       live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
   1359     }
   1360   }
   1361 }
   1362 
   1363 
   1364 bool LAllocator::SafePointsAreInOrder() const {
   1365   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
   1366   int safe_point = 0;
   1367   for (int i = 0; i < pointer_maps->length(); ++i) {
   1368     LPointerMap* map = pointer_maps->at(i);
   1369     if (safe_point > map->lithium_position()) return false;
   1370     safe_point = map->lithium_position();
   1371   }
   1372   return true;
   1373 }
   1374 
   1375 
   1376 void LAllocator::PopulatePointerMaps() {
   1377   LAllocatorPhase phase("L_Populate pointer maps", this);
   1378   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
   1379 
   1380   DCHECK(SafePointsAreInOrder());
   1381 
   1382   // Iterate over all safe point positions and record a pointer
   1383   // for all spilled live ranges at this point.
   1384   int first_safe_point_index = 0;
   1385   int last_range_start = 0;
   1386   for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
   1387     LiveRange* range = live_ranges()->at(range_idx);
   1388     if (range == NULL) continue;
   1389     // Iterate over the first parts of multi-part live ranges.
   1390     if (range->parent() != NULL) continue;
   1391     // Skip non-pointer values.
   1392     if (!HasTaggedValue(range->id())) continue;
   1393     // Skip empty live ranges.
   1394     if (range->IsEmpty()) continue;
   1395 
   1396     // Find the extent of the range and its children.
   1397     int start = range->Start().InstructionIndex();
   1398     int end = 0;
   1399     for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
   1400       LifetimePosition this_end = cur->End();
   1401       if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
   1402       DCHECK(cur->Start().InstructionIndex() >= start);
   1403     }
   1404 
   1405     // Most of the ranges are in order, but not all.  Keep an eye on when
   1406     // they step backwards and reset the first_safe_point_index so we don't
   1407     // miss any safe points.
   1408     if (start < last_range_start) {
   1409       first_safe_point_index = 0;
   1410     }
   1411     last_range_start = start;
   1412 
   1413     // Step across all the safe points that are before the start of this range,
   1414     // recording how far we step in order to save doing this for the next range.
   1415     while (first_safe_point_index < pointer_maps->length()) {
   1416       LPointerMap* map = pointer_maps->at(first_safe_point_index);
   1417       int safe_point = map->lithium_position();
   1418       if (safe_point >= start) break;
   1419       first_safe_point_index++;
   1420     }
   1421 
   1422     // Step through the safe points to see whether they are in the range.
   1423     for (int safe_point_index = first_safe_point_index;
   1424          safe_point_index < pointer_maps->length();
   1425          ++safe_point_index) {
   1426       LPointerMap* map = pointer_maps->at(safe_point_index);
   1427       int safe_point = map->lithium_position();
   1428 
   1429       // The safe points are sorted so we can stop searching here.
   1430       if (safe_point - 1 > end) break;
   1431 
   1432       // Advance to the next active range that covers the current
   1433       // safe point position.
   1434       LifetimePosition safe_point_pos =
   1435           LifetimePosition::FromInstructionIndex(safe_point);
   1436       LiveRange* cur = range;
   1437       while (cur != NULL && !cur->Covers(safe_point_pos)) {
   1438         cur = cur->next();
   1439       }
   1440       if (cur == NULL) continue;
   1441 
   1442       // Check if the live range is spilled and the safe point is after
   1443       // the spill position.
   1444       if (range->HasAllocatedSpillOperand() &&
   1445           safe_point >= range->spill_start_index()) {
   1446         TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
   1447                    range->id(), range->spill_start_index(), safe_point);
   1448         map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
   1449       }
   1450 
   1451       if (!cur->IsSpilled()) {
   1452         TraceAlloc("Pointer in register for range %d (start at %d) "
   1453                    "at safe point %d\n",
   1454                    cur->id(), cur->Start().Value(), safe_point);
   1455         LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
   1456         DCHECK(!operand->IsStackSlot());
   1457         map->RecordPointer(operand, chunk()->zone());
   1458       }
   1459     }
   1460   }
   1461 }
   1462 
   1463 
   1464 void LAllocator::AllocateGeneralRegisters() {
   1465   LAllocatorPhase phase("L_Allocate general registers", this);
   1466   num_registers_ = GetRegConfig()->num_allocatable_general_registers();
   1467   allocatable_register_codes_ = GetRegConfig()->allocatable_general_codes();
   1468   mode_ = GENERAL_REGISTERS;
   1469   AllocateRegisters();
   1470 }
   1471 
   1472 
   1473 void LAllocator::AllocateDoubleRegisters() {
   1474   LAllocatorPhase phase("L_Allocate double registers", this);
   1475   num_registers_ = GetRegConfig()->num_allocatable_double_registers();
   1476   allocatable_register_codes_ = GetRegConfig()->allocatable_double_codes();
   1477   mode_ = DOUBLE_REGISTERS;
   1478   AllocateRegisters();
   1479 }
   1480 
   1481 
   1482 void LAllocator::AllocateRegisters() {
   1483   DCHECK(unhandled_live_ranges_.is_empty());
   1484 
   1485   for (int i = 0; i < live_ranges_.length(); ++i) {
   1486     if (live_ranges_[i] != NULL) {
   1487       if (live_ranges_[i]->Kind() == mode_) {
   1488         AddToUnhandledUnsorted(live_ranges_[i]);
   1489       }
   1490     }
   1491   }
   1492   SortUnhandled();
   1493   DCHECK(UnhandledIsSorted());
   1494 
   1495   DCHECK(reusable_slots_.is_empty());
   1496   DCHECK(active_live_ranges_.is_empty());
   1497   DCHECK(inactive_live_ranges_.is_empty());
   1498 
   1499   if (mode_ == DOUBLE_REGISTERS) {
   1500     for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
   1501       LiveRange* current = fixed_double_live_ranges_.at(i);
   1502       if (current != NULL) {
   1503         AddToInactive(current);
   1504       }
   1505     }
   1506   } else {
   1507     DCHECK(mode_ == GENERAL_REGISTERS);
   1508     for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
   1509       LiveRange* current = fixed_live_ranges_.at(i);
   1510       if (current != NULL) {
   1511         AddToInactive(current);
   1512       }
   1513     }
   1514   }
   1515 
   1516   while (!unhandled_live_ranges_.is_empty()) {
   1517     DCHECK(UnhandledIsSorted());
   1518     LiveRange* current = unhandled_live_ranges_.RemoveLast();
   1519     DCHECK(UnhandledIsSorted());
   1520     LifetimePosition position = current->Start();
   1521 #ifdef DEBUG
   1522     allocation_finger_ = position;
   1523 #endif
   1524     TraceAlloc("Processing interval %d start=%d\n",
   1525                current->id(),
   1526                position.Value());
   1527 
   1528     if (current->HasAllocatedSpillOperand()) {
   1529       TraceAlloc("Live range %d already has a spill operand\n", current->id());
   1530       LifetimePosition next_pos = position;
   1531       if (IsGapAt(next_pos.InstructionIndex())) {
   1532         next_pos = next_pos.NextInstruction();
   1533       }
   1534       UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
   1535       // If the range already has a spill operand and it doesn't need a
   1536       // register immediately, split it and spill the first part of the range.
   1537       if (pos == NULL) {
   1538         Spill(current);
   1539         continue;
   1540       } else if (pos->pos().Value() >
   1541                  current->Start().NextInstruction().Value()) {
   1542         // Do not spill live range eagerly if use position that can benefit from
   1543         // the register is too close to the start of live range.
   1544         SpillBetween(current, current->Start(), pos->pos());
   1545         if (!AllocationOk()) return;
   1546         DCHECK(UnhandledIsSorted());
   1547         continue;
   1548       }
   1549     }
   1550 
   1551     for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1552       LiveRange* cur_active = active_live_ranges_.at(i);
   1553       if (cur_active->End().Value() <= position.Value()) {
   1554         ActiveToHandled(cur_active);
   1555         --i;  // The live range was removed from the list of active live ranges.
   1556       } else if (!cur_active->Covers(position)) {
   1557         ActiveToInactive(cur_active);
   1558         --i;  // The live range was removed from the list of active live ranges.
   1559       }
   1560     }
   1561 
   1562     for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1563       LiveRange* cur_inactive = inactive_live_ranges_.at(i);
   1564       if (cur_inactive->End().Value() <= position.Value()) {
   1565         InactiveToHandled(cur_inactive);
   1566         --i;  // Live range was removed from the list of inactive live ranges.
   1567       } else if (cur_inactive->Covers(position)) {
   1568         InactiveToActive(cur_inactive);
   1569         --i;  // Live range was removed from the list of inactive live ranges.
   1570       }
   1571     }
   1572 
   1573     DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled());
   1574 
   1575     bool result = TryAllocateFreeReg(current);
   1576     if (!AllocationOk()) return;
   1577 
   1578     if (!result) AllocateBlockedReg(current);
   1579     if (!AllocationOk()) return;
   1580 
   1581     if (current->HasRegisterAssigned()) {
   1582       AddToActive(current);
   1583     }
   1584   }
   1585 
   1586   reusable_slots_.Rewind(0);
   1587   active_live_ranges_.Rewind(0);
   1588   inactive_live_ranges_.Rewind(0);
   1589 }
   1590 
   1591 
   1592 const char* LAllocator::RegisterName(int allocation_index) {
   1593   if (mode_ == GENERAL_REGISTERS) {
   1594     return GetRegConfig()->GetGeneralRegisterName(allocation_index);
   1595   } else {
   1596     return GetRegConfig()->GetDoubleRegisterName(allocation_index);
   1597   }
   1598 }
   1599 
   1600 
   1601 void LAllocator::TraceAlloc(const char* msg, ...) {
   1602   if (FLAG_trace_alloc) {
   1603     va_list arguments;
   1604     va_start(arguments, msg);
   1605     base::OS::VPrint(msg, arguments);
   1606     va_end(arguments);
   1607   }
   1608 }
   1609 
   1610 
   1611 bool LAllocator::HasTaggedValue(int virtual_register) const {
   1612   HValue* value = graph_->LookupValue(virtual_register);
   1613   if (value == NULL) return false;
   1614   return value->representation().IsTagged() && !value->type().IsSmi();
   1615 }
   1616 
   1617 
   1618 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
   1619   if (virtual_register < first_artificial_register_) {
   1620     HValue* value = graph_->LookupValue(virtual_register);
   1621     if (value != NULL && value->representation().IsDouble()) {
   1622       return DOUBLE_REGISTERS;
   1623     }
   1624   } else if (double_artificial_registers_.Contains(
   1625       virtual_register - first_artificial_register_)) {
   1626     return DOUBLE_REGISTERS;
   1627   }
   1628 
   1629   return GENERAL_REGISTERS;
   1630 }
   1631 
   1632 
   1633 void LAllocator::AddToActive(LiveRange* range) {
   1634   TraceAlloc("Add live range %d to active\n", range->id());
   1635   active_live_ranges_.Add(range, zone());
   1636 }
   1637 
   1638 
   1639 void LAllocator::AddToInactive(LiveRange* range) {
   1640   TraceAlloc("Add live range %d to inactive\n", range->id());
   1641   inactive_live_ranges_.Add(range, zone());
   1642 }
   1643 
   1644 
   1645 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
   1646   if (range == NULL || range->IsEmpty()) return;
   1647   DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
   1648   DCHECK(allocation_finger_.Value() <= range->Start().Value());
   1649   for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
   1650     LiveRange* cur_range = unhandled_live_ranges_.at(i);
   1651     if (range->ShouldBeAllocatedBefore(cur_range)) {
   1652       TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
   1653       unhandled_live_ranges_.InsertAt(i + 1, range, zone());
   1654       DCHECK(UnhandledIsSorted());
   1655       return;
   1656     }
   1657   }
   1658   TraceAlloc("Add live range %d to unhandled at start\n", range->id());
   1659   unhandled_live_ranges_.InsertAt(0, range, zone());
   1660   DCHECK(UnhandledIsSorted());
   1661 }
   1662 
   1663 
   1664 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
   1665   if (range == NULL || range->IsEmpty()) return;
   1666   DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
   1667   TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
   1668   unhandled_live_ranges_.Add(range, zone());
   1669 }
   1670 
   1671 
   1672 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
   1673   DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) ||
   1674          !(*b)->ShouldBeAllocatedBefore(*a));
   1675   if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
   1676   if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
   1677   return (*a)->id() - (*b)->id();
   1678 }
   1679 
   1680 
   1681 // Sort the unhandled live ranges so that the ranges to be processed first are
   1682 // at the end of the array list.  This is convenient for the register allocation
   1683 // algorithm because it is efficient to remove elements from the end.
   1684 void LAllocator::SortUnhandled() {
   1685   TraceAlloc("Sort unhandled\n");
   1686   unhandled_live_ranges_.Sort(&UnhandledSortHelper);
   1687 }
   1688 
   1689 
   1690 bool LAllocator::UnhandledIsSorted() {
   1691   int len = unhandled_live_ranges_.length();
   1692   for (int i = 1; i < len; i++) {
   1693     LiveRange* a = unhandled_live_ranges_.at(i - 1);
   1694     LiveRange* b = unhandled_live_ranges_.at(i);
   1695     if (a->Start().Value() < b->Start().Value()) return false;
   1696   }
   1697   return true;
   1698 }
   1699 
   1700 
   1701 void LAllocator::FreeSpillSlot(LiveRange* range) {
   1702   // Check that we are the last range.
   1703   if (range->next() != NULL) return;
   1704 
   1705   if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
   1706 
   1707   int index = range->TopLevel()->GetSpillOperand()->index();
   1708   if (index >= 0) {
   1709     reusable_slots_.Add(range, zone());
   1710   }
   1711 }
   1712 
   1713 
   1714 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
   1715   if (reusable_slots_.is_empty()) return NULL;
   1716   if (reusable_slots_.first()->End().Value() >
   1717       range->TopLevel()->Start().Value()) {
   1718     return NULL;
   1719   }
   1720   LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
   1721   reusable_slots_.Remove(0);
   1722   return result;
   1723 }
   1724 
   1725 
   1726 void LAllocator::ActiveToHandled(LiveRange* range) {
   1727   DCHECK(active_live_ranges_.Contains(range));
   1728   active_live_ranges_.RemoveElement(range);
   1729   TraceAlloc("Moving live range %d from active to handled\n", range->id());
   1730   FreeSpillSlot(range);
   1731 }
   1732 
   1733 
   1734 void LAllocator::ActiveToInactive(LiveRange* range) {
   1735   DCHECK(active_live_ranges_.Contains(range));
   1736   active_live_ranges_.RemoveElement(range);
   1737   inactive_live_ranges_.Add(range, zone());
   1738   TraceAlloc("Moving live range %d from active to inactive\n", range->id());
   1739 }
   1740 
   1741 
   1742 void LAllocator::InactiveToHandled(LiveRange* range) {
   1743   DCHECK(inactive_live_ranges_.Contains(range));
   1744   inactive_live_ranges_.RemoveElement(range);
   1745   TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
   1746   FreeSpillSlot(range);
   1747 }
   1748 
   1749 
   1750 void LAllocator::InactiveToActive(LiveRange* range) {
   1751   DCHECK(inactive_live_ranges_.Contains(range));
   1752   inactive_live_ranges_.RemoveElement(range);
   1753   active_live_ranges_.Add(range, zone());
   1754   TraceAlloc("Moving live range %d from inactive to active\n", range->id());
   1755 }
   1756 
   1757 
   1758 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
   1759   DCHECK(DoubleRegister::kMaxNumRegisters >= Register::kNumRegisters);
   1760 
   1761   LifetimePosition free_until_pos[DoubleRegister::kMaxNumRegisters];
   1762 
   1763   for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
   1764     free_until_pos[i] = LifetimePosition::MaxPosition();
   1765   }
   1766 
   1767   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1768     LiveRange* cur_active = active_live_ranges_.at(i);
   1769     free_until_pos[cur_active->assigned_register()] =
   1770         LifetimePosition::FromInstructionIndex(0);
   1771   }
   1772 
   1773   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1774     LiveRange* cur_inactive = inactive_live_ranges_.at(i);
   1775     DCHECK(cur_inactive->End().Value() > current->Start().Value());
   1776     LifetimePosition next_intersection =
   1777         cur_inactive->FirstIntersection(current);
   1778     if (!next_intersection.IsValid()) continue;
   1779     int cur_reg = cur_inactive->assigned_register();
   1780     free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
   1781   }
   1782 
   1783   LOperand* hint = current->FirstHint();
   1784   if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
   1785     int register_index = hint->index();
   1786     TraceAlloc(
   1787         "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
   1788         RegisterName(register_index),
   1789         free_until_pos[register_index].Value(),
   1790         current->id(),
   1791         current->End().Value());
   1792 
   1793     // The desired register is free until the end of the current live range.
   1794     if (free_until_pos[register_index].Value() >= current->End().Value()) {
   1795       TraceAlloc("Assigning preferred reg %s to live range %d\n",
   1796                  RegisterName(register_index),
   1797                  current->id());
   1798       SetLiveRangeAssignedRegister(current, register_index);
   1799       return true;
   1800     }
   1801   }
   1802 
   1803   // Find the register which stays free for the longest time.
   1804   int reg = allocatable_register_codes_[0];
   1805   for (int i = 1; i < RegisterCount(); ++i) {
   1806     int code = allocatable_register_codes_[i];
   1807     if (free_until_pos[code].Value() > free_until_pos[reg].Value()) {
   1808       reg = code;
   1809     }
   1810   }
   1811 
   1812   LifetimePosition pos = free_until_pos[reg];
   1813 
   1814   if (pos.Value() <= current->Start().Value()) {
   1815     // All registers are blocked.
   1816     return false;
   1817   }
   1818 
   1819   if (pos.Value() < current->End().Value()) {
   1820     // Register reg is available at the range start but becomes blocked before
   1821     // the range end. Split current at position where it becomes blocked.
   1822     LiveRange* tail = SplitRangeAt(current, pos);
   1823     if (!AllocationOk()) return false;
   1824     AddToUnhandledSorted(tail);
   1825   }
   1826 
   1827 
   1828   // Register reg is available at the range start and is free until
   1829   // the range end.
   1830   DCHECK(pos.Value() >= current->End().Value());
   1831   TraceAlloc("Assigning free reg %s to live range %d\n",
   1832              RegisterName(reg),
   1833              current->id());
   1834   SetLiveRangeAssignedRegister(current, reg);
   1835 
   1836   return true;
   1837 }
   1838 
   1839 
   1840 void LAllocator::AllocateBlockedReg(LiveRange* current) {
   1841   UsePosition* register_use = current->NextRegisterPosition(current->Start());
   1842   if (register_use == NULL) {
   1843     // There is no use in the current live range that requires a register.
   1844     // We can just spill it.
   1845     Spill(current);
   1846     return;
   1847   }
   1848 
   1849 
   1850   LifetimePosition use_pos[DoubleRegister::kMaxNumRegisters];
   1851   LifetimePosition block_pos[DoubleRegister::kMaxNumRegisters];
   1852 
   1853   for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
   1854     use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
   1855   }
   1856 
   1857   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1858     LiveRange* range = active_live_ranges_[i];
   1859     int cur_reg = range->assigned_register();
   1860     if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
   1861       block_pos[cur_reg] = use_pos[cur_reg] =
   1862           LifetimePosition::FromInstructionIndex(0);
   1863     } else {
   1864       UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
   1865           current->Start());
   1866       if (next_use == NULL) {
   1867         use_pos[cur_reg] = range->End();
   1868       } else {
   1869         use_pos[cur_reg] = next_use->pos();
   1870       }
   1871     }
   1872   }
   1873 
   1874   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1875     LiveRange* range = inactive_live_ranges_.at(i);
   1876     DCHECK(range->End().Value() > current->Start().Value());
   1877     LifetimePosition next_intersection = range->FirstIntersection(current);
   1878     if (!next_intersection.IsValid()) continue;
   1879     int cur_reg = range->assigned_register();
   1880     if (range->IsFixed()) {
   1881       block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
   1882       use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
   1883     } else {
   1884       use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
   1885     }
   1886   }
   1887 
   1888   int reg = allocatable_register_codes_[0];
   1889   for (int i = 1; i < RegisterCount(); ++i) {
   1890     int code = allocatable_register_codes_[i];
   1891     if (use_pos[code].Value() > use_pos[reg].Value()) {
   1892       reg = code;
   1893     }
   1894   }
   1895 
   1896   LifetimePosition pos = use_pos[reg];
   1897 
   1898   if (pos.Value() < register_use->pos().Value()) {
   1899     // All registers are blocked before the first use that requires a register.
   1900     // Spill starting part of live range up to that use.
   1901     SpillBetween(current, current->Start(), register_use->pos());
   1902     return;
   1903   }
   1904 
   1905   if (block_pos[reg].Value() < current->End().Value()) {
   1906     // Register becomes blocked before the current range end. Split before that
   1907     // position.
   1908     LiveRange* tail = SplitBetween(current,
   1909                                    current->Start(),
   1910                                    block_pos[reg].InstructionStart());
   1911     if (!AllocationOk()) return;
   1912     AddToUnhandledSorted(tail);
   1913   }
   1914 
   1915   // Register reg is not blocked for the whole range.
   1916   DCHECK(block_pos[reg].Value() >= current->End().Value());
   1917   TraceAlloc("Assigning blocked reg %s to live range %d\n",
   1918              RegisterName(reg),
   1919              current->id());
   1920   SetLiveRangeAssignedRegister(current, reg);
   1921 
   1922   // This register was not free. Thus we need to find and spill
   1923   // parts of active and inactive live regions that use the same register
   1924   // at the same lifetime positions as current.
   1925   SplitAndSpillIntersecting(current);
   1926 }
   1927 
   1928 
   1929 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
   1930                                                     LifetimePosition pos) {
   1931   HBasicBlock* block = GetBlock(pos.InstructionStart());
   1932   HBasicBlock* loop_header =
   1933       block->IsLoopHeader() ? block : block->parent_loop_header();
   1934 
   1935   if (loop_header == NULL) return pos;
   1936 
   1937   UsePosition* prev_use =
   1938     range->PreviousUsePositionRegisterIsBeneficial(pos);
   1939 
   1940   while (loop_header != NULL) {
   1941     // We are going to spill live range inside the loop.
   1942     // If possible try to move spilling position backwards to loop header.
   1943     // This will reduce number of memory moves on the back edge.
   1944     LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
   1945         loop_header->first_instruction_index());
   1946 
   1947     if (range->Covers(loop_start)) {
   1948       if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
   1949         // No register beneficial use inside the loop before the pos.
   1950         pos = loop_start;
   1951       }
   1952     }
   1953 
   1954     // Try hoisting out to an outer loop.
   1955     loop_header = loop_header->parent_loop_header();
   1956   }
   1957 
   1958   return pos;
   1959 }
   1960 
   1961 
   1962 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
   1963   DCHECK(current->HasRegisterAssigned());
   1964   int reg = current->assigned_register();
   1965   LifetimePosition split_pos = current->Start();
   1966   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1967     LiveRange* range = active_live_ranges_[i];
   1968     if (range->assigned_register() == reg) {
   1969       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
   1970       LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
   1971       if (next_pos == NULL) {
   1972         SpillAfter(range, spill_pos);
   1973       } else {
   1974         // When spilling between spill_pos and next_pos ensure that the range
   1975         // remains spilled at least until the start of the current live range.
   1976         // This guarantees that we will not introduce new unhandled ranges that
   1977         // start before the current range as this violates allocation invariant
   1978         // and will lead to an inconsistent state of active and inactive
   1979         // live-ranges: ranges are allocated in order of their start positions,
   1980         // ranges are retired from active/inactive when the start of the
   1981         // current live-range is larger than their end.
   1982         SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
   1983       }
   1984       if (!AllocationOk()) return;
   1985       ActiveToHandled(range);
   1986       --i;
   1987     }
   1988   }
   1989 
   1990   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1991     LiveRange* range = inactive_live_ranges_[i];
   1992     DCHECK(range->End().Value() > current->Start().Value());
   1993     if (range->assigned_register() == reg && !range->IsFixed()) {
   1994       LifetimePosition next_intersection = range->FirstIntersection(current);
   1995       if (next_intersection.IsValid()) {
   1996         UsePosition* next_pos = range->NextRegisterPosition(current->Start());
   1997         if (next_pos == NULL) {
   1998           SpillAfter(range, split_pos);
   1999         } else {
   2000           next_intersection = Min(next_intersection, next_pos->pos());
   2001           SpillBetween(range, split_pos, next_intersection);
   2002         }
   2003         if (!AllocationOk()) return;
   2004         InactiveToHandled(range);
   2005         --i;
   2006       }
   2007     }
   2008   }
   2009 }
   2010 
   2011 
   2012 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
   2013   return pos.IsInstructionStart() &&
   2014       InstructionAt(pos.InstructionIndex())->IsLabel();
   2015 }
   2016 
   2017 
   2018 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
   2019   DCHECK(!range->IsFixed());
   2020   TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
   2021 
   2022   if (pos.Value() <= range->Start().Value()) return range;
   2023 
   2024   // We can't properly connect liveranges if split occured at the end
   2025   // of control instruction.
   2026   DCHECK(pos.IsInstructionStart() ||
   2027          !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
   2028 
   2029   int vreg = GetVirtualRegister();
   2030   if (!AllocationOk()) return NULL;
   2031   LiveRange* result = LiveRangeFor(vreg);
   2032   range->SplitAt(pos, result, zone());
   2033   return result;
   2034 }
   2035 
   2036 
   2037 LiveRange* LAllocator::SplitBetween(LiveRange* range,
   2038                                     LifetimePosition start,
   2039                                     LifetimePosition end) {
   2040   DCHECK(!range->IsFixed());
   2041   TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
   2042              range->id(),
   2043              start.Value(),
   2044              end.Value());
   2045 
   2046   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
   2047   DCHECK(split_pos.Value() >= start.Value());
   2048   return SplitRangeAt(range, split_pos);
   2049 }
   2050 
   2051 
   2052 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
   2053                                                  LifetimePosition end) {
   2054   int start_instr = start.InstructionIndex();
   2055   int end_instr = end.InstructionIndex();
   2056   DCHECK(start_instr <= end_instr);
   2057 
   2058   // We have no choice
   2059   if (start_instr == end_instr) return end;
   2060 
   2061   HBasicBlock* start_block = GetBlock(start);
   2062   HBasicBlock* end_block = GetBlock(end);
   2063 
   2064   if (end_block == start_block) {
   2065     // The interval is split in the same basic block. Split at the latest
   2066     // possible position.
   2067     return end;
   2068   }
   2069 
   2070   HBasicBlock* block = end_block;
   2071   // Find header of outermost loop.
   2072   while (block->parent_loop_header() != NULL &&
   2073       block->parent_loop_header()->block_id() > start_block->block_id()) {
   2074     block = block->parent_loop_header();
   2075   }
   2076 
   2077   // We did not find any suitable outer loop. Split at the latest possible
   2078   // position unless end_block is a loop header itself.
   2079   if (block == end_block && !end_block->IsLoopHeader()) return end;
   2080 
   2081   return LifetimePosition::FromInstructionIndex(
   2082       block->first_instruction_index());
   2083 }
   2084 
   2085 
   2086 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
   2087   LiveRange* second_part = SplitRangeAt(range, pos);
   2088   if (!AllocationOk()) return;
   2089   Spill(second_part);
   2090 }
   2091 
   2092 
   2093 void LAllocator::SpillBetween(LiveRange* range,
   2094                               LifetimePosition start,
   2095                               LifetimePosition end) {
   2096   SpillBetweenUntil(range, start, start, end);
   2097 }
   2098 
   2099 
   2100 void LAllocator::SpillBetweenUntil(LiveRange* range,
   2101                                    LifetimePosition start,
   2102                                    LifetimePosition until,
   2103                                    LifetimePosition end) {
   2104   CHECK(start.Value() < end.Value());
   2105   LiveRange* second_part = SplitRangeAt(range, start);
   2106   if (!AllocationOk()) return;
   2107 
   2108   if (second_part->Start().Value() < end.Value()) {
   2109     // The split result intersects with [start, end[.
   2110     // Split it at position between ]start+1, end[, spill the middle part
   2111     // and put the rest to unhandled.
   2112     LiveRange* third_part = SplitBetween(
   2113         second_part,
   2114         Max(second_part->Start().InstructionEnd(), until),
   2115         end.PrevInstruction().InstructionEnd());
   2116     if (!AllocationOk()) return;
   2117 
   2118     DCHECK(third_part != second_part);
   2119 
   2120     Spill(second_part);
   2121     AddToUnhandledSorted(third_part);
   2122   } else {
   2123     // The split result does not intersect with [start, end[.
   2124     // Nothing to spill. Just put it to unhandled as whole.
   2125     AddToUnhandledSorted(second_part);
   2126   }
   2127 }
   2128 
   2129 
   2130 void LAllocator::Spill(LiveRange* range) {
   2131   DCHECK(!range->IsSpilled());
   2132   TraceAlloc("Spilling live range %d\n", range->id());
   2133   LiveRange* first = range->TopLevel();
   2134 
   2135   if (!first->HasAllocatedSpillOperand()) {
   2136     LOperand* op = TryReuseSpillSlot(range);
   2137     if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind());
   2138     first->SetSpillOperand(op);
   2139   }
   2140   range->MakeSpilled(chunk()->zone());
   2141 }
   2142 
   2143 
   2144 int LAllocator::RegisterCount() const {
   2145   return num_registers_;
   2146 }
   2147 
   2148 
   2149 #ifdef DEBUG
   2150 
   2151 
   2152 void LAllocator::Verify() const {
   2153   for (int i = 0; i < live_ranges()->length(); ++i) {
   2154     LiveRange* current = live_ranges()->at(i);
   2155     if (current != NULL) current->Verify();
   2156   }
   2157 }
   2158 
   2159 
   2160 #endif
   2161 
   2162 
   2163 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
   2164     : CompilationPhase(name, allocator->graph()->info()),
   2165       allocator_(allocator) {
   2166   if (FLAG_hydrogen_stats) {
   2167     allocator_zone_start_allocation_size_ =
   2168         allocator->zone()->allocation_size();
   2169   }
   2170 }
   2171 
   2172 
   2173 LAllocatorPhase::~LAllocatorPhase() {
   2174   if (FLAG_hydrogen_stats) {
   2175     size_t size = allocator_->zone()->allocation_size() -
   2176                   allocator_zone_start_allocation_size_;
   2177     isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size);
   2178   }
   2179 
   2180   if (ShouldProduceTraceOutput()) {
   2181     isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
   2182     isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
   2183   }
   2184 
   2185 #ifdef DEBUG
   2186   if (allocator_ != NULL) allocator_->Verify();
   2187 #endif
   2188 }
   2189 
   2190 
   2191 }  // namespace internal
   2192 }  // namespace v8
   2193