Home | History | Annotate | Download | only in crankshaft
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/crankshaft/lithium-allocator.h"
      6 
      7 #include "src/crankshaft/hydrogen.h"
      8 #include "src/crankshaft/lithium-inl.h"
      9 #include "src/crankshaft/lithium-allocator-inl.h"
     10 #include "src/register-configuration.h"
     11 #include "src/string-stream.h"
     12 
     13 namespace v8 {
     14 namespace internal {
     15 
     16 const auto GetRegConfig = RegisterConfiguration::Crankshaft;
     17 
     18 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
     19   return a.Value() < b.Value() ? a : b;
     20 }
     21 
     22 
     23 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
     24   return a.Value() > b.Value() ? a : b;
     25 }
     26 
     27 
     28 UsePosition::UsePosition(LifetimePosition pos,
     29                          LOperand* operand,
     30                          LOperand* hint)
     31     : operand_(operand),
     32       hint_(hint),
     33       pos_(pos),
     34       next_(NULL),
     35       requires_reg_(false),
     36       register_beneficial_(true) {
     37   if (operand_ != NULL && operand_->IsUnallocated()) {
     38     LUnallocated* unalloc = LUnallocated::cast(operand_);
     39     requires_reg_ = unalloc->HasRegisterPolicy() ||
     40         unalloc->HasDoubleRegisterPolicy();
     41     register_beneficial_ = !unalloc->HasAnyPolicy();
     42   }
     43   DCHECK(pos_.IsValid());
     44 }
     45 
     46 
     47 bool UsePosition::HasHint() const {
     48   return hint_ != NULL && !hint_->IsUnallocated();
     49 }
     50 
     51 
     52 bool UsePosition::RequiresRegister() const {
     53   return requires_reg_;
     54 }
     55 
     56 
     57 bool UsePosition::RegisterIsBeneficial() const {
     58   return register_beneficial_;
     59 }
     60 
     61 
     62 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
     63   DCHECK(Contains(pos) && pos.Value() != start().Value());
     64   UseInterval* after = new(zone) UseInterval(pos, end_);
     65   after->next_ = next_;
     66   next_ = after;
     67   end_ = pos;
     68 }
     69 
     70 
     71 #ifdef DEBUG
     72 
     73 
     74 void LiveRange::Verify() const {
     75   UsePosition* cur = first_pos_;
     76   while (cur != NULL) {
     77     DCHECK(Start().Value() <= cur->pos().Value() &&
     78            cur->pos().Value() <= End().Value());
     79     cur = cur->next();
     80   }
     81 }
     82 
     83 
     84 bool LiveRange::HasOverlap(UseInterval* target) const {
     85   UseInterval* current_interval = first_interval_;
     86   while (current_interval != NULL) {
     87     // Intervals overlap if the start of one is contained in the other.
     88     if (current_interval->Contains(target->start()) ||
     89         target->Contains(current_interval->start())) {
     90       return true;
     91     }
     92     current_interval = current_interval->next();
     93   }
     94   return false;
     95 }
     96 
     97 
     98 #endif
     99 
    100 
    101 LiveRange::LiveRange(int id, Zone* zone)
    102     : id_(id),
    103       spilled_(false),
    104       kind_(UNALLOCATED_REGISTERS),
    105       assigned_register_(kInvalidAssignment),
    106       last_interval_(NULL),
    107       first_interval_(NULL),
    108       first_pos_(NULL),
    109       parent_(NULL),
    110       next_(NULL),
    111       current_interval_(NULL),
    112       last_processed_use_(NULL),
    113       current_hint_operand_(NULL),
    114       spill_operand_(new (zone) LOperand()),
    115       spill_start_index_(kMaxInt) {}
    116 
    117 
    118 void LiveRange::set_assigned_register(int reg, Zone* zone) {
    119   DCHECK(!HasRegisterAssigned() && !IsSpilled());
    120   assigned_register_ = reg;
    121   ConvertOperands(zone);
    122 }
    123 
    124 
    125 void LiveRange::MakeSpilled(Zone* zone) {
    126   DCHECK(!IsSpilled());
    127   DCHECK(TopLevel()->HasAllocatedSpillOperand());
    128   spilled_ = true;
    129   assigned_register_ = kInvalidAssignment;
    130   ConvertOperands(zone);
    131 }
    132 
    133 
    134 bool LiveRange::HasAllocatedSpillOperand() const {
    135   DCHECK(spill_operand_ != NULL);
    136   return !spill_operand_->IsIgnored();
    137 }
    138 
    139 
    140 void LiveRange::SetSpillOperand(LOperand* operand) {
    141   DCHECK(!operand->IsUnallocated());
    142   DCHECK(spill_operand_ != NULL);
    143   DCHECK(spill_operand_->IsIgnored());
    144   spill_operand_->ConvertTo(operand->kind(), operand->index());
    145 }
    146 
    147 
    148 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
    149   UsePosition* use_pos = last_processed_use_;
    150   if (use_pos == NULL) use_pos = first_pos();
    151   while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
    152     use_pos = use_pos->next();
    153   }
    154   last_processed_use_ = use_pos;
    155   return use_pos;
    156 }
    157 
    158 
    159 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
    160     LifetimePosition start) {
    161   UsePosition* pos = NextUsePosition(start);
    162   while (pos != NULL && !pos->RegisterIsBeneficial()) {
    163     pos = pos->next();
    164   }
    165   return pos;
    166 }
    167 
    168 
    169 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
    170     LifetimePosition start) {
    171   UsePosition* pos = first_pos();
    172   UsePosition* prev = NULL;
    173   while (pos != NULL && pos->pos().Value() < start.Value()) {
    174     if (pos->RegisterIsBeneficial()) prev = pos;
    175     pos = pos->next();
    176   }
    177   return prev;
    178 }
    179 
    180 
    181 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
    182   UsePosition* pos = NextUsePosition(start);
    183   while (pos != NULL && !pos->RequiresRegister()) {
    184     pos = pos->next();
    185   }
    186   return pos;
    187 }
    188 
    189 
    190 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
    191   // We cannot spill a live range that has a use requiring a register
    192   // at the current or the immediate next position.
    193   UsePosition* use_pos = NextRegisterPosition(pos);
    194   if (use_pos == NULL) return true;
    195   return
    196       use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
    197 }
    198 
    199 
    200 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
    201   LOperand* op = NULL;
    202   if (HasRegisterAssigned()) {
    203     DCHECK(!IsSpilled());
    204     switch (Kind()) {
    205       case GENERAL_REGISTERS:
    206         op = LRegister::Create(assigned_register(), zone);
    207         break;
    208       case DOUBLE_REGISTERS:
    209         op = LDoubleRegister::Create(assigned_register(), zone);
    210         break;
    211       default:
    212         UNREACHABLE();
    213     }
    214   } else if (IsSpilled()) {
    215     DCHECK(!HasRegisterAssigned());
    216     op = TopLevel()->GetSpillOperand();
    217     DCHECK(!op->IsUnallocated());
    218   } else {
    219     LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
    220     unalloc->set_virtual_register(id_);
    221     op = unalloc;
    222   }
    223   return op;
    224 }
    225 
    226 
    227 UseInterval* LiveRange::FirstSearchIntervalForPosition(
    228     LifetimePosition position) const {
    229   if (current_interval_ == NULL) return first_interval_;
    230   if (current_interval_->start().Value() > position.Value()) {
    231     current_interval_ = NULL;
    232     return first_interval_;
    233   }
    234   return current_interval_;
    235 }
    236 
    237 
    238 void LiveRange::AdvanceLastProcessedMarker(
    239     UseInterval* to_start_of, LifetimePosition but_not_past) const {
    240   if (to_start_of == NULL) return;
    241   if (to_start_of->start().Value() > but_not_past.Value()) return;
    242   LifetimePosition start =
    243       current_interval_ == NULL ? LifetimePosition::Invalid()
    244                                 : current_interval_->start();
    245   if (to_start_of->start().Value() > start.Value()) {
    246     current_interval_ = to_start_of;
    247   }
    248 }
    249 
    250 
    251 void LiveRange::SplitAt(LifetimePosition position,
    252                         LiveRange* result,
    253                         Zone* zone) {
    254   DCHECK(Start().Value() < position.Value());
    255   DCHECK(result->IsEmpty());
    256   // Find the last interval that ends before the position. If the
    257   // position is contained in one of the intervals in the chain, we
    258   // split that interval and use the first part.
    259   UseInterval* current = FirstSearchIntervalForPosition(position);
    260 
    261   // If the split position coincides with the beginning of a use interval
    262   // we need to split use positons in a special way.
    263   bool split_at_start = false;
    264 
    265   if (current->start().Value() == position.Value()) {
    266     // When splitting at start we need to locate the previous use interval.
    267     current = first_interval_;
    268   }
    269 
    270   while (current != NULL) {
    271     if (current->Contains(position)) {
    272       current->SplitAt(position, zone);
    273       break;
    274     }
    275     UseInterval* next = current->next();
    276     if (next->start().Value() >= position.Value()) {
    277       split_at_start = (next->start().Value() == position.Value());
    278       break;
    279     }
    280     current = next;
    281   }
    282 
    283   // Partition original use intervals to the two live ranges.
    284   UseInterval* before = current;
    285   UseInterval* after = before->next();
    286   result->last_interval_ = (last_interval_ == before)
    287       ? after            // Only interval in the range after split.
    288       : last_interval_;  // Last interval of the original range.
    289   result->first_interval_ = after;
    290   last_interval_ = before;
    291 
    292   // Find the last use position before the split and the first use
    293   // position after it.
    294   UsePosition* use_after = first_pos_;
    295   UsePosition* use_before = NULL;
    296   if (split_at_start) {
    297     // The split position coincides with the beginning of a use interval (the
    298     // end of a lifetime hole). Use at this position should be attributed to
    299     // the split child because split child owns use interval covering it.
    300     while (use_after != NULL && use_after->pos().Value() < position.Value()) {
    301       use_before = use_after;
    302       use_after = use_after->next();
    303     }
    304   } else {
    305     while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
    306       use_before = use_after;
    307       use_after = use_after->next();
    308     }
    309   }
    310 
    311   // Partition original use positions to the two live ranges.
    312   if (use_before != NULL) {
    313     use_before->next_ = NULL;
    314   } else {
    315     first_pos_ = NULL;
    316   }
    317   result->first_pos_ = use_after;
    318 
    319   // Discard cached iteration state. It might be pointing
    320   // to the use that no longer belongs to this live range.
    321   last_processed_use_ = NULL;
    322   current_interval_ = NULL;
    323 
    324   // Link the new live range in the chain before any of the other
    325   // ranges linked from the range before the split.
    326   result->parent_ = (parent_ == NULL) ? this : parent_;
    327   result->kind_ = result->parent_->kind_;
    328   result->next_ = next_;
    329   next_ = result;
    330 
    331 #ifdef DEBUG
    332   Verify();
    333   result->Verify();
    334 #endif
    335 }
    336 
    337 
    338 // This implements an ordering on live ranges so that they are ordered by their
    339 // start positions.  This is needed for the correctness of the register
    340 // allocation algorithm.  If two live ranges start at the same offset then there
    341 // is a tie breaker based on where the value is first used.  This part of the
    342 // ordering is merely a heuristic.
    343 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
    344   LifetimePosition start = Start();
    345   LifetimePosition other_start = other->Start();
    346   if (start.Value() == other_start.Value()) {
    347     UsePosition* pos = first_pos();
    348     if (pos == NULL) return false;
    349     UsePosition* other_pos = other->first_pos();
    350     if (other_pos == NULL) return true;
    351     return pos->pos().Value() < other_pos->pos().Value();
    352   }
    353   return start.Value() < other_start.Value();
    354 }
    355 
    356 
    357 void LiveRange::ShortenTo(LifetimePosition start) {
    358   LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
    359   DCHECK(first_interval_ != NULL);
    360   DCHECK(first_interval_->start().Value() <= start.Value());
    361   DCHECK(start.Value() < first_interval_->end().Value());
    362   first_interval_->set_start(start);
    363 }
    364 
    365 
    366 void LiveRange::EnsureInterval(LifetimePosition start,
    367                                LifetimePosition end,
    368                                Zone* zone) {
    369   LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
    370                          id_,
    371                          start.Value(),
    372                          end.Value());
    373   LifetimePosition new_end = end;
    374   while (first_interval_ != NULL &&
    375          first_interval_->start().Value() <= end.Value()) {
    376     if (first_interval_->end().Value() > end.Value()) {
    377       new_end = first_interval_->end();
    378     }
    379     first_interval_ = first_interval_->next();
    380   }
    381 
    382   UseInterval* new_interval = new(zone) UseInterval(start, new_end);
    383   new_interval->next_ = first_interval_;
    384   first_interval_ = new_interval;
    385   if (new_interval->next() == NULL) {
    386     last_interval_ = new_interval;
    387   }
    388 }
    389 
    390 
    391 void LiveRange::AddUseInterval(LifetimePosition start,
    392                                LifetimePosition end,
    393                                Zone* zone) {
    394   LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
    395                          id_,
    396                          start.Value(),
    397                          end.Value());
    398   if (first_interval_ == NULL) {
    399     UseInterval* interval = new(zone) UseInterval(start, end);
    400     first_interval_ = interval;
    401     last_interval_ = interval;
    402   } else {
    403     if (end.Value() == first_interval_->start().Value()) {
    404       first_interval_->set_start(start);
    405     } else if (end.Value() < first_interval_->start().Value()) {
    406       UseInterval* interval = new(zone) UseInterval(start, end);
    407       interval->set_next(first_interval_);
    408       first_interval_ = interval;
    409     } else {
    410       // Order of instruction's processing (see ProcessInstructions) guarantees
    411       // that each new use interval either precedes or intersects with
    412       // last added interval.
    413       DCHECK(start.Value() < first_interval_->end().Value());
    414       first_interval_->start_ = Min(start, first_interval_->start_);
    415       first_interval_->end_ = Max(end, first_interval_->end_);
    416     }
    417   }
    418 }
    419 
    420 
    421 void LiveRange::AddUsePosition(LifetimePosition pos,
    422                                LOperand* operand,
    423                                LOperand* hint,
    424                                Zone* zone) {
    425   LAllocator::TraceAlloc("Add to live range %d use position %d\n",
    426                          id_,
    427                          pos.Value());
    428   UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
    429   UsePosition* prev_hint = NULL;
    430   UsePosition* prev = NULL;
    431   UsePosition* current = first_pos_;
    432   while (current != NULL && current->pos().Value() < pos.Value()) {
    433     prev_hint = current->HasHint() ? current : prev_hint;
    434     prev = current;
    435     current = current->next();
    436   }
    437 
    438   if (prev == NULL) {
    439     use_pos->set_next(first_pos_);
    440     first_pos_ = use_pos;
    441   } else {
    442     use_pos->next_ = prev->next_;
    443     prev->next_ = use_pos;
    444   }
    445 
    446   if (prev_hint == NULL && use_pos->HasHint()) {
    447     current_hint_operand_ = hint;
    448   }
    449 }
    450 
    451 
    452 void LiveRange::ConvertOperands(Zone* zone) {
    453   LOperand* op = CreateAssignedOperand(zone);
    454   UsePosition* use_pos = first_pos();
    455   while (use_pos != NULL) {
    456     DCHECK(Start().Value() <= use_pos->pos().Value() &&
    457            use_pos->pos().Value() <= End().Value());
    458 
    459     if (use_pos->HasOperand()) {
    460       DCHECK(op->IsRegister() || op->IsDoubleRegister() ||
    461              !use_pos->RequiresRegister());
    462       use_pos->operand()->ConvertTo(op->kind(), op->index());
    463     }
    464     use_pos = use_pos->next();
    465   }
    466 }
    467 
    468 
    469 bool LiveRange::CanCover(LifetimePosition position) const {
    470   if (IsEmpty()) return false;
    471   return Start().Value() <= position.Value() &&
    472          position.Value() < End().Value();
    473 }
    474 
    475 
    476 bool LiveRange::Covers(LifetimePosition position) {
    477   if (!CanCover(position)) return false;
    478   UseInterval* start_search = FirstSearchIntervalForPosition(position);
    479   for (UseInterval* interval = start_search;
    480        interval != NULL;
    481        interval = interval->next()) {
    482     DCHECK(interval->next() == NULL ||
    483            interval->next()->start().Value() >= interval->start().Value());
    484     AdvanceLastProcessedMarker(interval, position);
    485     if (interval->Contains(position)) return true;
    486     if (interval->start().Value() > position.Value()) return false;
    487   }
    488   return false;
    489 }
    490 
    491 
    492 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
    493   UseInterval* b = other->first_interval();
    494   if (b == NULL) return LifetimePosition::Invalid();
    495   LifetimePosition advance_last_processed_up_to = b->start();
    496   UseInterval* a = FirstSearchIntervalForPosition(b->start());
    497   while (a != NULL && b != NULL) {
    498     if (a->start().Value() > other->End().Value()) break;
    499     if (b->start().Value() > End().Value()) break;
    500     LifetimePosition cur_intersection = a->Intersect(b);
    501     if (cur_intersection.IsValid()) {
    502       return cur_intersection;
    503     }
    504     if (a->start().Value() < b->start().Value()) {
    505       a = a->next();
    506       if (a == NULL || a->start().Value() > other->End().Value()) break;
    507       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
    508     } else {
    509       b = b->next();
    510     }
    511   }
    512   return LifetimePosition::Invalid();
    513 }
    514 
    515 LAllocator::LAllocator(int num_values, HGraph* graph)
    516     : zone_(graph->isolate()->allocator(), ZONE_NAME),
    517       chunk_(NULL),
    518       live_in_sets_(graph->blocks()->length(), zone()),
    519       live_ranges_(num_values * 2, zone()),
    520       fixed_live_ranges_(NULL),
    521       fixed_double_live_ranges_(NULL),
    522       unhandled_live_ranges_(num_values * 2, zone()),
    523       active_live_ranges_(8, zone()),
    524       inactive_live_ranges_(8, zone()),
    525       reusable_slots_(8, zone()),
    526       next_virtual_register_(num_values),
    527       first_artificial_register_(num_values),
    528       mode_(UNALLOCATED_REGISTERS),
    529       num_registers_(-1),
    530       graph_(graph),
    531       has_osr_entry_(false),
    532       allocation_ok_(true) {}
    533 
    534 void LAllocator::InitializeLivenessAnalysis() {
    535   // Initialize the live_in sets for each block to NULL.
    536   int block_count = graph_->blocks()->length();
    537   live_in_sets_.Initialize(block_count, zone());
    538   live_in_sets_.AddBlock(NULL, block_count, zone());
    539 }
    540 
    541 
    542 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
    543   // Compute live out for the given block, except not including backward
    544   // successor edges.
    545   BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
    546 
    547   // Process all successor blocks.
    548   for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
    549     // Add values live on entry to the successor. Note the successor's
    550     // live_in will not be computed yet for backwards edges.
    551     HBasicBlock* successor = it.Current();
    552     BitVector* live_in = live_in_sets_[successor->block_id()];
    553     if (live_in != NULL) live_out->Union(*live_in);
    554 
    555     // All phi input operands corresponding to this successor edge are live
    556     // out from this block.
    557     int index = successor->PredecessorIndexOf(block);
    558     const ZoneList<HPhi*>* phis = successor->phis();
    559     for (int i = 0; i < phis->length(); ++i) {
    560       HPhi* phi = phis->at(i);
    561       if (!phi->OperandAt(index)->IsConstant()) {
    562         live_out->Add(phi->OperandAt(index)->id());
    563       }
    564     }
    565   }
    566 
    567   return live_out;
    568 }
    569 
    570 
    571 void LAllocator::AddInitialIntervals(HBasicBlock* block,
    572                                      BitVector* live_out) {
    573   // Add an interval that includes the entire block to the live range for
    574   // each live_out value.
    575   LifetimePosition start = LifetimePosition::FromInstructionIndex(
    576       block->first_instruction_index());
    577   LifetimePosition end = LifetimePosition::FromInstructionIndex(
    578       block->last_instruction_index()).NextInstruction();
    579   BitVector::Iterator iterator(live_out);
    580   while (!iterator.Done()) {
    581     int operand_index = iterator.Current();
    582     LiveRange* range = LiveRangeFor(operand_index);
    583     range->AddUseInterval(start, end, zone());
    584     iterator.Advance();
    585   }
    586 }
    587 
    588 
    589 int LAllocator::FixedDoubleLiveRangeID(int index) {
    590   return -index - 1 - Register::kNumRegisters;
    591 }
    592 
    593 
    594 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
    595                                     int pos,
    596                                     bool is_tagged) {
    597   TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
    598   DCHECK(operand->HasFixedPolicy());
    599   if (operand->HasFixedSlotPolicy()) {
    600     operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
    601   } else if (operand->HasFixedRegisterPolicy()) {
    602     int reg_index = operand->fixed_register_index();
    603     operand->ConvertTo(LOperand::REGISTER, reg_index);
    604   } else if (operand->HasFixedDoubleRegisterPolicy()) {
    605     int reg_index = operand->fixed_register_index();
    606     operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
    607   } else {
    608     UNREACHABLE();
    609   }
    610   if (is_tagged) {
    611     TraceAlloc("Fixed reg is tagged at %d\n", pos);
    612     LInstruction* instr = InstructionAt(pos);
    613     if (instr->HasPointerMap()) {
    614       instr->pointer_map()->RecordPointer(operand, chunk()->zone());
    615     }
    616   }
    617   return operand;
    618 }
    619 
    620 
    621 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
    622   DCHECK(index < Register::kNumRegisters);
    623   LiveRange* result = fixed_live_ranges_[index];
    624   if (result == NULL) {
    625     result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
    626     DCHECK(result->IsFixed());
    627     result->kind_ = GENERAL_REGISTERS;
    628     SetLiveRangeAssignedRegister(result, index);
    629     fixed_live_ranges_[index] = result;
    630   }
    631   return result;
    632 }
    633 
    634 
    635 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
    636   DCHECK(index < DoubleRegister::kMaxNumRegisters);
    637   LiveRange* result = fixed_double_live_ranges_[index];
    638   if (result == NULL) {
    639     result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
    640                                    chunk()->zone());
    641     DCHECK(result->IsFixed());
    642     result->kind_ = DOUBLE_REGISTERS;
    643     SetLiveRangeAssignedRegister(result, index);
    644     fixed_double_live_ranges_[index] = result;
    645   }
    646   return result;
    647 }
    648 
    649 
    650 LiveRange* LAllocator::LiveRangeFor(int index) {
    651   if (index >= live_ranges_.length()) {
    652     live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
    653   }
    654   LiveRange* result = live_ranges_[index];
    655   if (result == NULL) {
    656     result = new(zone()) LiveRange(index, chunk()->zone());
    657     live_ranges_[index] = result;
    658   }
    659   return result;
    660 }
    661 
    662 
    663 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
    664   int last_instruction = block->last_instruction_index();
    665   int index = chunk_->NearestGapPos(last_instruction);
    666   return GapAt(index);
    667 }
    668 
    669 
    670 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
    671   if (!operand->IsUnallocated()) return NULL;
    672   int index = LUnallocated::cast(operand)->virtual_register();
    673   HValue* instr = graph_->LookupValue(index);
    674   if (instr != NULL && instr->IsPhi()) {
    675     return HPhi::cast(instr);
    676   }
    677   return NULL;
    678 }
    679 
    680 
    681 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
    682   if (operand->IsUnallocated()) {
    683     return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
    684   } else if (operand->IsRegister()) {
    685     return FixedLiveRangeFor(operand->index());
    686   } else if (operand->IsDoubleRegister()) {
    687     return FixedDoubleLiveRangeFor(operand->index());
    688   } else {
    689     return NULL;
    690   }
    691 }
    692 
    693 
    694 void LAllocator::Define(LifetimePosition position,
    695                         LOperand* operand,
    696                         LOperand* hint) {
    697   LiveRange* range = LiveRangeFor(operand);
    698   if (range == NULL) return;
    699 
    700   if (range->IsEmpty() || range->Start().Value() > position.Value()) {
    701     // Can happen if there is a definition without use.
    702     range->AddUseInterval(position, position.NextInstruction(), zone());
    703     range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
    704   } else {
    705     range->ShortenTo(position);
    706   }
    707 
    708   if (operand->IsUnallocated()) {
    709     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
    710     range->AddUsePosition(position, unalloc_operand, hint, zone());
    711   }
    712 }
    713 
    714 
    715 void LAllocator::Use(LifetimePosition block_start,
    716                      LifetimePosition position,
    717                      LOperand* operand,
    718                      LOperand* hint) {
    719   LiveRange* range = LiveRangeFor(operand);
    720   if (range == NULL) return;
    721   if (operand->IsUnallocated()) {
    722     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
    723     range->AddUsePosition(position, unalloc_operand, hint, zone());
    724   }
    725   range->AddUseInterval(block_start, position, zone());
    726 }
    727 
    728 
    729 void LAllocator::AddConstraintsGapMove(int index,
    730                                        LOperand* from,
    731                                        LOperand* to) {
    732   LGap* gap = GapAt(index);
    733   LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
    734                                                      chunk()->zone());
    735   if (from->IsUnallocated()) {
    736     const ZoneList<LMoveOperands>* move_operands = move->move_operands();
    737     for (int i = 0; i < move_operands->length(); ++i) {
    738       LMoveOperands cur = move_operands->at(i);
    739       LOperand* cur_to = cur.destination();
    740       if (cur_to->IsUnallocated()) {
    741         if (LUnallocated::cast(cur_to)->virtual_register() ==
    742             LUnallocated::cast(from)->virtual_register()) {
    743           move->AddMove(cur.source(), to, chunk()->zone());
    744           return;
    745         }
    746       }
    747     }
    748   }
    749   move->AddMove(from, to, chunk()->zone());
    750 }
    751 
    752 
    753 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
    754   int start = block->first_instruction_index();
    755   int end = block->last_instruction_index();
    756   if (start == -1) return;
    757   for (int i = start; i <= end; ++i) {
    758     if (IsGapAt(i)) {
    759       LInstruction* instr = NULL;
    760       LInstruction* prev_instr = NULL;
    761       if (i < end) instr = InstructionAt(i + 1);
    762       if (i > start) prev_instr = InstructionAt(i - 1);
    763       MeetConstraintsBetween(prev_instr, instr, i);
    764       if (!AllocationOk()) return;
    765     }
    766   }
    767 }
    768 
    769 
    770 void LAllocator::MeetConstraintsBetween(LInstruction* first,
    771                                         LInstruction* second,
    772                                         int gap_index) {
    773   // Handle fixed temporaries.
    774   if (first != NULL) {
    775     for (TempIterator it(first); !it.Done(); it.Advance()) {
    776       LUnallocated* temp = LUnallocated::cast(it.Current());
    777       if (temp->HasFixedPolicy()) {
    778         AllocateFixed(temp, gap_index - 1, false);
    779       }
    780     }
    781   }
    782 
    783   // Handle fixed output operand.
    784   if (first != NULL && first->Output() != NULL) {
    785     LUnallocated* first_output = LUnallocated::cast(first->Output());
    786     LiveRange* range = LiveRangeFor(first_output->virtual_register());
    787     bool assigned = false;
    788     if (first_output->HasFixedPolicy()) {
    789       LUnallocated* output_copy = first_output->CopyUnconstrained(
    790           chunk()->zone());
    791       bool is_tagged = HasTaggedValue(first_output->virtual_register());
    792       AllocateFixed(first_output, gap_index, is_tagged);
    793 
    794       // This value is produced on the stack, we never need to spill it.
    795       if (first_output->IsStackSlot()) {
    796         range->SetSpillOperand(first_output);
    797         range->SetSpillStartIndex(gap_index - 1);
    798         assigned = true;
    799       }
    800       chunk_->AddGapMove(gap_index, first_output, output_copy);
    801     }
    802 
    803     if (!assigned) {
    804       range->SetSpillStartIndex(gap_index);
    805 
    806       // This move to spill operand is not a real use. Liveness analysis
    807       // and splitting of live ranges do not account for it.
    808       // Thus it should be inserted to a lifetime position corresponding to
    809       // the instruction end.
    810       LGap* gap = GapAt(gap_index);
    811       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
    812                                                          chunk()->zone());
    813       move->AddMove(first_output, range->GetSpillOperand(),
    814                     chunk()->zone());
    815     }
    816   }
    817 
    818   // Handle fixed input operands of second instruction.
    819   if (second != NULL) {
    820     for (UseIterator it(second); !it.Done(); it.Advance()) {
    821       LUnallocated* cur_input = LUnallocated::cast(it.Current());
    822       if (cur_input->HasFixedPolicy()) {
    823         LUnallocated* input_copy = cur_input->CopyUnconstrained(
    824             chunk()->zone());
    825         bool is_tagged = HasTaggedValue(cur_input->virtual_register());
    826         AllocateFixed(cur_input, gap_index + 1, is_tagged);
    827         AddConstraintsGapMove(gap_index, input_copy, cur_input);
    828       } else if (cur_input->HasWritableRegisterPolicy()) {
    829         // The live range of writable input registers always goes until the end
    830         // of the instruction.
    831         DCHECK(!cur_input->IsUsedAtStart());
    832 
    833         LUnallocated* input_copy = cur_input->CopyUnconstrained(
    834             chunk()->zone());
    835         int vreg = GetVirtualRegister();
    836         if (!AllocationOk()) return;
    837         cur_input->set_virtual_register(vreg);
    838 
    839         if (RequiredRegisterKind(input_copy->virtual_register()) ==
    840             DOUBLE_REGISTERS) {
    841           double_artificial_registers_.Add(
    842               cur_input->virtual_register() - first_artificial_register_,
    843               zone());
    844         }
    845 
    846         AddConstraintsGapMove(gap_index, input_copy, cur_input);
    847       }
    848     }
    849   }
    850 
    851   // Handle "output same as input" for second instruction.
    852   if (second != NULL && second->Output() != NULL) {
    853     LUnallocated* second_output = LUnallocated::cast(second->Output());
    854     if (second_output->HasSameAsInputPolicy()) {
    855       LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
    856       int output_vreg = second_output->virtual_register();
    857       int input_vreg = cur_input->virtual_register();
    858 
    859       LUnallocated* input_copy = cur_input->CopyUnconstrained(
    860           chunk()->zone());
    861       cur_input->set_virtual_register(second_output->virtual_register());
    862       AddConstraintsGapMove(gap_index, input_copy, cur_input);
    863 
    864       if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
    865         int index = gap_index + 1;
    866         LInstruction* instr = InstructionAt(index);
    867         if (instr->HasPointerMap()) {
    868           instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
    869         }
    870       } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
    871         // The input is assumed to immediately have a tagged representation,
    872         // before the pointer map can be used. I.e. the pointer map at the
    873         // instruction will include the output operand (whose value at the
    874         // beginning of the instruction is equal to the input operand). If
    875         // this is not desired, then the pointer map at this instruction needs
    876         // to be adjusted manually.
    877       }
    878     }
    879   }
    880 }
    881 
    882 
    883 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
    884   int block_start = block->first_instruction_index();
    885   int index = block->last_instruction_index();
    886 
    887   LifetimePosition block_start_position =
    888       LifetimePosition::FromInstructionIndex(block_start);
    889 
    890   while (index >= block_start) {
    891     LifetimePosition curr_position =
    892         LifetimePosition::FromInstructionIndex(index);
    893 
    894     if (IsGapAt(index)) {
    895       // We have a gap at this position.
    896       LGap* gap = GapAt(index);
    897       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
    898                                                          chunk()->zone());
    899       const ZoneList<LMoveOperands>* move_operands = move->move_operands();
    900       for (int i = 0; i < move_operands->length(); ++i) {
    901         LMoveOperands* cur = &move_operands->at(i);
    902         if (cur->IsIgnored()) continue;
    903         LOperand* from = cur->source();
    904         LOperand* to = cur->destination();
    905         HPhi* phi = LookupPhi(to);
    906         LOperand* hint = to;
    907         if (phi != NULL) {
    908           // This is a phi resolving move.
    909           if (!phi->block()->IsLoopHeader()) {
    910             hint = LiveRangeFor(phi->id())->current_hint_operand();
    911           }
    912         } else {
    913           if (to->IsUnallocated()) {
    914             if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
    915               Define(curr_position, to, from);
    916               live->Remove(LUnallocated::cast(to)->virtual_register());
    917             } else {
    918               cur->Eliminate();
    919               continue;
    920             }
    921           } else {
    922             Define(curr_position, to, from);
    923           }
    924         }
    925         Use(block_start_position, curr_position, from, hint);
    926         if (from->IsUnallocated()) {
    927           live->Add(LUnallocated::cast(from)->virtual_register());
    928         }
    929       }
    930     } else {
    931       DCHECK(!IsGapAt(index));
    932       LInstruction* instr = InstructionAt(index);
    933 
    934       if (instr != NULL) {
    935         LOperand* output = instr->Output();
    936         if (output != NULL) {
    937           if (output->IsUnallocated()) {
    938             live->Remove(LUnallocated::cast(output)->virtual_register());
    939           }
    940           Define(curr_position, output, NULL);
    941         }
    942 
    943         if (instr->ClobbersRegisters()) {
    944           for (int i = 0; i < Register::kNumRegisters; ++i) {
    945             if (GetRegConfig()->IsAllocatableGeneralCode(i)) {
    946               if (output == NULL || !output->IsRegister() ||
    947                   output->index() != i) {
    948                 LiveRange* range = FixedLiveRangeFor(i);
    949                 range->AddUseInterval(curr_position,
    950                                       curr_position.InstructionEnd(), zone());
    951               }
    952             }
    953           }
    954         }
    955 
    956         if (instr->ClobbersDoubleRegisters(isolate())) {
    957           for (int i = 0; i < DoubleRegister::kMaxNumRegisters; ++i) {
    958             if (GetRegConfig()->IsAllocatableDoubleCode(i)) {
    959               if (output == NULL || !output->IsDoubleRegister() ||
    960                   output->index() != i) {
    961                 LiveRange* range = FixedDoubleLiveRangeFor(i);
    962                 range->AddUseInterval(curr_position,
    963                                       curr_position.InstructionEnd(), zone());
    964               }
    965             }
    966           }
    967         }
    968 
    969         for (UseIterator it(instr); !it.Done(); it.Advance()) {
    970           LOperand* input = it.Current();
    971 
    972           LifetimePosition use_pos;
    973           if (input->IsUnallocated() &&
    974               LUnallocated::cast(input)->IsUsedAtStart()) {
    975             use_pos = curr_position;
    976           } else {
    977             use_pos = curr_position.InstructionEnd();
    978           }
    979 
    980           Use(block_start_position, use_pos, input, NULL);
    981           if (input->IsUnallocated()) {
    982             live->Add(LUnallocated::cast(input)->virtual_register());
    983           }
    984         }
    985 
    986         for (TempIterator it(instr); !it.Done(); it.Advance()) {
    987           LOperand* temp = it.Current();
    988           if (instr->ClobbersTemps()) {
    989             if (temp->IsRegister()) continue;
    990             if (temp->IsUnallocated()) {
    991               LUnallocated* temp_unalloc = LUnallocated::cast(temp);
    992               if (temp_unalloc->HasFixedPolicy()) {
    993                 continue;
    994               }
    995             }
    996           }
    997           Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
    998           Define(curr_position, temp, NULL);
    999 
   1000           if (temp->IsUnallocated()) {
   1001             LUnallocated* temp_unalloc = LUnallocated::cast(temp);
   1002             if (temp_unalloc->HasDoubleRegisterPolicy()) {
   1003               double_artificial_registers_.Add(
   1004                   temp_unalloc->virtual_register() - first_artificial_register_,
   1005                   zone());
   1006             }
   1007           }
   1008         }
   1009       }
   1010     }
   1011 
   1012     index = index - 1;
   1013   }
   1014 }
   1015 
   1016 
   1017 void LAllocator::ResolvePhis(HBasicBlock* block) {
   1018   const ZoneList<HPhi*>* phis = block->phis();
   1019   for (int i = 0; i < phis->length(); ++i) {
   1020     HPhi* phi = phis->at(i);
   1021     LUnallocated* phi_operand =
   1022         new (chunk()->zone()) LUnallocated(LUnallocated::NONE);
   1023     phi_operand->set_virtual_register(phi->id());
   1024     for (int j = 0; j < phi->OperandCount(); ++j) {
   1025       HValue* op = phi->OperandAt(j);
   1026       LOperand* operand = NULL;
   1027       if (op->IsConstant() && op->EmitAtUses()) {
   1028         HConstant* constant = HConstant::cast(op);
   1029         operand = chunk_->DefineConstantOperand(constant);
   1030       } else {
   1031         DCHECK(!op->EmitAtUses());
   1032         LUnallocated* unalloc =
   1033             new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
   1034         unalloc->set_virtual_register(op->id());
   1035         operand = unalloc;
   1036       }
   1037       HBasicBlock* cur_block = block->predecessors()->at(j);
   1038       // The gap move must be added without any special processing as in
   1039       // the AddConstraintsGapMove.
   1040       chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
   1041                          operand,
   1042                          phi_operand);
   1043 
   1044       // We are going to insert a move before the branch instruction.
   1045       // Some branch instructions (e.g. loops' back edges)
   1046       // can potentially cause a GC so they have a pointer map.
   1047       // By inserting a move we essentially create a copy of a
   1048       // value which is invisible to PopulatePointerMaps(), because we store
   1049       // it into a location different from the operand of a live range
   1050       // covering a branch instruction.
   1051       // Thus we need to manually record a pointer.
   1052       LInstruction* branch =
   1053           InstructionAt(cur_block->last_instruction_index());
   1054       if (branch->HasPointerMap()) {
   1055         if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
   1056           branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
   1057         } else if (!phi->representation().IsDouble()) {
   1058           branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
   1059         }
   1060       }
   1061     }
   1062 
   1063     LiveRange* live_range = LiveRangeFor(phi->id());
   1064     LLabel* label = chunk_->GetLabel(phi->block()->block_id());
   1065     label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
   1066         AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
   1067     live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
   1068   }
   1069 }
   1070 
   1071 
   1072 bool LAllocator::Allocate(LChunk* chunk) {
   1073   DCHECK(chunk_ == NULL);
   1074   chunk_ = static_cast<LPlatformChunk*>(chunk);
   1075   assigned_registers_ =
   1076       new (chunk->zone()) BitVector(Register::kNumRegisters, chunk->zone());
   1077   assigned_double_registers_ = new (chunk->zone())
   1078       BitVector(DoubleRegister::kMaxNumRegisters, chunk->zone());
   1079   MeetRegisterConstraints();
   1080   if (!AllocationOk()) return false;
   1081   ResolvePhis();
   1082   BuildLiveRanges();
   1083   AllocateGeneralRegisters();
   1084   if (!AllocationOk()) return false;
   1085   AllocateDoubleRegisters();
   1086   if (!AllocationOk()) return false;
   1087   PopulatePointerMaps();
   1088   ConnectRanges();
   1089   ResolveControlFlow();
   1090   return true;
   1091 }
   1092 
   1093 
   1094 void LAllocator::MeetRegisterConstraints() {
   1095   LAllocatorPhase phase("L_Register constraints", this);
   1096   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1097   for (int i = 0; i < blocks->length(); ++i) {
   1098     HBasicBlock* block = blocks->at(i);
   1099     MeetRegisterConstraints(block);
   1100     if (!AllocationOk()) return;
   1101   }
   1102 }
   1103 
   1104 
   1105 void LAllocator::ResolvePhis() {
   1106   LAllocatorPhase phase("L_Resolve phis", this);
   1107 
   1108   // Process the blocks in reverse order.
   1109   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1110   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
   1111     HBasicBlock* block = blocks->at(block_id);
   1112     ResolvePhis(block);
   1113   }
   1114 }
   1115 
   1116 
   1117 void LAllocator::ResolveControlFlow(LiveRange* range,
   1118                                     HBasicBlock* block,
   1119                                     HBasicBlock* pred) {
   1120   LifetimePosition pred_end =
   1121       LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
   1122   LifetimePosition cur_start =
   1123       LifetimePosition::FromInstructionIndex(block->first_instruction_index());
   1124   LiveRange* pred_cover = NULL;
   1125   LiveRange* cur_cover = NULL;
   1126   LiveRange* cur_range = range;
   1127   while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
   1128     if (cur_range->CanCover(cur_start)) {
   1129       DCHECK(cur_cover == NULL);
   1130       cur_cover = cur_range;
   1131     }
   1132     if (cur_range->CanCover(pred_end)) {
   1133       DCHECK(pred_cover == NULL);
   1134       pred_cover = cur_range;
   1135     }
   1136     cur_range = cur_range->next();
   1137   }
   1138 
   1139   if (cur_cover->IsSpilled()) return;
   1140   DCHECK(pred_cover != NULL && cur_cover != NULL);
   1141   if (pred_cover != cur_cover) {
   1142     LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
   1143     LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
   1144     if (!pred_op->Equals(cur_op)) {
   1145       LGap* gap = NULL;
   1146       if (block->predecessors()->length() == 1) {
   1147         gap = GapAt(block->first_instruction_index());
   1148       } else {
   1149         DCHECK(pred->end()->SecondSuccessor() == NULL);
   1150         gap = GetLastGap(pred);
   1151 
   1152         // We are going to insert a move before the branch instruction.
   1153         // Some branch instructions (e.g. loops' back edges)
   1154         // can potentially cause a GC so they have a pointer map.
   1155         // By inserting a move we essentially create a copy of a
   1156         // value which is invisible to PopulatePointerMaps(), because we store
   1157         // it into a location different from the operand of a live range
   1158         // covering a branch instruction.
   1159         // Thus we need to manually record a pointer.
   1160         LInstruction* branch = InstructionAt(pred->last_instruction_index());
   1161         if (branch->HasPointerMap()) {
   1162           if (HasTaggedValue(range->id())) {
   1163             branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
   1164           } else if (!cur_op->IsDoubleStackSlot() &&
   1165                      !cur_op->IsDoubleRegister()) {
   1166             branch->pointer_map()->RemovePointer(cur_op);
   1167           }
   1168         }
   1169       }
   1170       gap->GetOrCreateParallelMove(
   1171           LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
   1172                                                  chunk()->zone());
   1173     }
   1174   }
   1175 }
   1176 
   1177 
   1178 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
   1179   int index = pos.InstructionIndex();
   1180   if (IsGapAt(index)) {
   1181     LGap* gap = GapAt(index);
   1182     return gap->GetOrCreateParallelMove(
   1183         pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
   1184   }
   1185   int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
   1186   return GapAt(gap_pos)->GetOrCreateParallelMove(
   1187       (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
   1188 }
   1189 
   1190 
   1191 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
   1192   LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
   1193   return gap->block();
   1194 }
   1195 
   1196 
   1197 void LAllocator::ConnectRanges() {
   1198   LAllocatorPhase phase("L_Connect ranges", this);
   1199   for (int i = 0; i < live_ranges()->length(); ++i) {
   1200     LiveRange* first_range = live_ranges()->at(i);
   1201     if (first_range == NULL || first_range->parent() != NULL) continue;
   1202 
   1203     LiveRange* second_range = first_range->next();
   1204     while (second_range != NULL) {
   1205       LifetimePosition pos = second_range->Start();
   1206 
   1207       if (!second_range->IsSpilled()) {
   1208         // Add gap move if the two live ranges touch and there is no block
   1209         // boundary.
   1210         if (first_range->End().Value() == pos.Value()) {
   1211           bool should_insert = true;
   1212           if (IsBlockBoundary(pos)) {
   1213             should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
   1214           }
   1215           if (should_insert) {
   1216             LParallelMove* move = GetConnectingParallelMove(pos);
   1217             LOperand* prev_operand = first_range->CreateAssignedOperand(
   1218                 chunk()->zone());
   1219             LOperand* cur_operand = second_range->CreateAssignedOperand(
   1220                 chunk()->zone());
   1221             move->AddMove(prev_operand, cur_operand,
   1222                           chunk()->zone());
   1223           }
   1224         }
   1225       }
   1226 
   1227       first_range = second_range;
   1228       second_range = second_range->next();
   1229     }
   1230   }
   1231 }
   1232 
   1233 
   1234 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
   1235   if (block->predecessors()->length() != 1) return false;
   1236   return block->predecessors()->first()->block_id() == block->block_id() - 1;
   1237 }
   1238 
   1239 
   1240 void LAllocator::ResolveControlFlow() {
   1241   LAllocatorPhase phase("L_Resolve control flow", this);
   1242   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1243   for (int block_id = 1; block_id < blocks->length(); ++block_id) {
   1244     HBasicBlock* block = blocks->at(block_id);
   1245     if (CanEagerlyResolveControlFlow(block)) continue;
   1246     BitVector* live = live_in_sets_[block->block_id()];
   1247     BitVector::Iterator iterator(live);
   1248     while (!iterator.Done()) {
   1249       int operand_index = iterator.Current();
   1250       for (int i = 0; i < block->predecessors()->length(); ++i) {
   1251         HBasicBlock* cur = block->predecessors()->at(i);
   1252         LiveRange* cur_range = LiveRangeFor(operand_index);
   1253         ResolveControlFlow(cur_range, block, cur);
   1254       }
   1255       iterator.Advance();
   1256     }
   1257   }
   1258 }
   1259 
   1260 
   1261 void LAllocator::BuildLiveRanges() {
   1262   LAllocatorPhase phase("L_Build live ranges", this);
   1263   InitializeLivenessAnalysis();
   1264   // Process the blocks in reverse order.
   1265   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1266   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
   1267     HBasicBlock* block = blocks->at(block_id);
   1268     BitVector* live = ComputeLiveOut(block);
   1269     // Initially consider all live_out values live for the entire block. We
   1270     // will shorten these intervals if necessary.
   1271     AddInitialIntervals(block, live);
   1272 
   1273     // Process the instructions in reverse order, generating and killing
   1274     // live values.
   1275     ProcessInstructions(block, live);
   1276     // All phi output operands are killed by this block.
   1277     const ZoneList<HPhi*>* phis = block->phis();
   1278     for (int i = 0; i < phis->length(); ++i) {
   1279       // The live range interval already ends at the first instruction of the
   1280       // block.
   1281       HPhi* phi = phis->at(i);
   1282       live->Remove(phi->id());
   1283 
   1284       LOperand* hint = NULL;
   1285       LOperand* phi_operand = NULL;
   1286       LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
   1287       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
   1288                                                          chunk()->zone());
   1289       for (int j = 0; j < move->move_operands()->length(); ++j) {
   1290         LOperand* to = move->move_operands()->at(j).destination();
   1291         if (to->IsUnallocated() &&
   1292             LUnallocated::cast(to)->virtual_register() == phi->id()) {
   1293           hint = move->move_operands()->at(j).source();
   1294           phi_operand = to;
   1295           break;
   1296         }
   1297       }
   1298       DCHECK(hint != NULL);
   1299 
   1300       LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
   1301               block->first_instruction_index());
   1302       Define(block_start, phi_operand, hint);
   1303     }
   1304 
   1305     // Now live is live_in for this block except not including values live
   1306     // out on backward successor edges.
   1307     live_in_sets_[block_id] = live;
   1308 
   1309     // If this block is a loop header go back and patch up the necessary
   1310     // predecessor blocks.
   1311     if (block->IsLoopHeader()) {
   1312       // TODO(kmillikin): Need to be able to get the last block of the loop
   1313       // in the loop information. Add a live range stretching from the first
   1314       // loop instruction to the last for each value live on entry to the
   1315       // header.
   1316       HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
   1317       BitVector::Iterator iterator(live);
   1318       LifetimePosition start = LifetimePosition::FromInstructionIndex(
   1319           block->first_instruction_index());
   1320       LifetimePosition end = LifetimePosition::FromInstructionIndex(
   1321           back_edge->last_instruction_index()).NextInstruction();
   1322       while (!iterator.Done()) {
   1323         int operand_index = iterator.Current();
   1324         LiveRange* range = LiveRangeFor(operand_index);
   1325         range->EnsureInterval(start, end, zone());
   1326         iterator.Advance();
   1327       }
   1328 
   1329       for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
   1330         live_in_sets_[i]->Union(*live);
   1331       }
   1332     }
   1333 
   1334 #ifdef DEBUG
   1335     if (block_id == 0) {
   1336       BitVector::Iterator iterator(live);
   1337       bool found = false;
   1338       while (!iterator.Done()) {
   1339         found = true;
   1340         int operand_index = iterator.Current();
   1341         {
   1342           AllowHandleDereference allow_deref;
   1343           PrintF("Function: %s\n", chunk_->info()->GetDebugName().get());
   1344         }
   1345         PrintF("Value %d used before first definition!\n", operand_index);
   1346         LiveRange* range = LiveRangeFor(operand_index);
   1347         PrintF("First use is at %d\n", range->first_pos()->pos().Value());
   1348         iterator.Advance();
   1349       }
   1350       DCHECK(!found);
   1351     }
   1352 #endif
   1353   }
   1354 
   1355   for (int i = 0; i < live_ranges_.length(); ++i) {
   1356     if (live_ranges_[i] != NULL) {
   1357       live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
   1358     }
   1359   }
   1360 }
   1361 
   1362 
   1363 bool LAllocator::SafePointsAreInOrder() const {
   1364   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
   1365   int safe_point = 0;
   1366   for (int i = 0; i < pointer_maps->length(); ++i) {
   1367     LPointerMap* map = pointer_maps->at(i);
   1368     if (safe_point > map->lithium_position()) return false;
   1369     safe_point = map->lithium_position();
   1370   }
   1371   return true;
   1372 }
   1373 
   1374 
   1375 void LAllocator::PopulatePointerMaps() {
   1376   LAllocatorPhase phase("L_Populate pointer maps", this);
   1377   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
   1378 
   1379   DCHECK(SafePointsAreInOrder());
   1380 
   1381   // Iterate over all safe point positions and record a pointer
   1382   // for all spilled live ranges at this point.
   1383   int first_safe_point_index = 0;
   1384   int last_range_start = 0;
   1385   for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
   1386     LiveRange* range = live_ranges()->at(range_idx);
   1387     if (range == NULL) continue;
   1388     // Iterate over the first parts of multi-part live ranges.
   1389     if (range->parent() != NULL) continue;
   1390     // Skip non-pointer values.
   1391     if (!HasTaggedValue(range->id())) continue;
   1392     // Skip empty live ranges.
   1393     if (range->IsEmpty()) continue;
   1394 
   1395     // Find the extent of the range and its children.
   1396     int start = range->Start().InstructionIndex();
   1397     int end = 0;
   1398     for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
   1399       LifetimePosition this_end = cur->End();
   1400       if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
   1401       DCHECK(cur->Start().InstructionIndex() >= start);
   1402     }
   1403 
   1404     // Most of the ranges are in order, but not all.  Keep an eye on when
   1405     // they step backwards and reset the first_safe_point_index so we don't
   1406     // miss any safe points.
   1407     if (start < last_range_start) {
   1408       first_safe_point_index = 0;
   1409     }
   1410     last_range_start = start;
   1411 
   1412     // Step across all the safe points that are before the start of this range,
   1413     // recording how far we step in order to save doing this for the next range.
   1414     while (first_safe_point_index < pointer_maps->length()) {
   1415       LPointerMap* map = pointer_maps->at(first_safe_point_index);
   1416       int safe_point = map->lithium_position();
   1417       if (safe_point >= start) break;
   1418       first_safe_point_index++;
   1419     }
   1420 
   1421     // Step through the safe points to see whether they are in the range.
   1422     for (int safe_point_index = first_safe_point_index;
   1423          safe_point_index < pointer_maps->length();
   1424          ++safe_point_index) {
   1425       LPointerMap* map = pointer_maps->at(safe_point_index);
   1426       int safe_point = map->lithium_position();
   1427 
   1428       // The safe points are sorted so we can stop searching here.
   1429       if (safe_point - 1 > end) break;
   1430 
   1431       // Advance to the next active range that covers the current
   1432       // safe point position.
   1433       LifetimePosition safe_point_pos =
   1434           LifetimePosition::FromInstructionIndex(safe_point);
   1435       LiveRange* cur = range;
   1436       while (cur != NULL && !cur->Covers(safe_point_pos)) {
   1437         cur = cur->next();
   1438       }
   1439       if (cur == NULL) continue;
   1440 
   1441       // Check if the live range is spilled and the safe point is after
   1442       // the spill position.
   1443       if (range->HasAllocatedSpillOperand() &&
   1444           safe_point >= range->spill_start_index()) {
   1445         TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
   1446                    range->id(), range->spill_start_index(), safe_point);
   1447         map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
   1448       }
   1449 
   1450       if (!cur->IsSpilled()) {
   1451         TraceAlloc("Pointer in register for range %d (start at %d) "
   1452                    "at safe point %d\n",
   1453                    cur->id(), cur->Start().Value(), safe_point);
   1454         LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
   1455         DCHECK(!operand->IsStackSlot());
   1456         map->RecordPointer(operand, chunk()->zone());
   1457       }
   1458     }
   1459   }
   1460 }
   1461 
   1462 
   1463 void LAllocator::AllocateGeneralRegisters() {
   1464   LAllocatorPhase phase("L_Allocate general registers", this);
   1465   num_registers_ = GetRegConfig()->num_allocatable_general_registers();
   1466   allocatable_register_codes_ = GetRegConfig()->allocatable_general_codes();
   1467   mode_ = GENERAL_REGISTERS;
   1468   AllocateRegisters();
   1469 }
   1470 
   1471 
   1472 void LAllocator::AllocateDoubleRegisters() {
   1473   LAllocatorPhase phase("L_Allocate double registers", this);
   1474   num_registers_ = GetRegConfig()->num_allocatable_double_registers();
   1475   allocatable_register_codes_ = GetRegConfig()->allocatable_double_codes();
   1476   mode_ = DOUBLE_REGISTERS;
   1477   AllocateRegisters();
   1478 }
   1479 
   1480 
   1481 void LAllocator::AllocateRegisters() {
   1482   DCHECK(unhandled_live_ranges_.is_empty());
   1483 
   1484   for (int i = 0; i < live_ranges_.length(); ++i) {
   1485     if (live_ranges_[i] != NULL) {
   1486       if (live_ranges_[i]->Kind() == mode_) {
   1487         AddToUnhandledUnsorted(live_ranges_[i]);
   1488       }
   1489     }
   1490   }
   1491   SortUnhandled();
   1492   DCHECK(UnhandledIsSorted());
   1493 
   1494   DCHECK(reusable_slots_.is_empty());
   1495   DCHECK(active_live_ranges_.is_empty());
   1496   DCHECK(inactive_live_ranges_.is_empty());
   1497 
   1498   if (mode_ == DOUBLE_REGISTERS) {
   1499     for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
   1500       LiveRange* current = fixed_double_live_ranges_.at(i);
   1501       if (current != NULL) {
   1502         AddToInactive(current);
   1503       }
   1504     }
   1505   } else {
   1506     DCHECK(mode_ == GENERAL_REGISTERS);
   1507     for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
   1508       LiveRange* current = fixed_live_ranges_.at(i);
   1509       if (current != NULL) {
   1510         AddToInactive(current);
   1511       }
   1512     }
   1513   }
   1514 
   1515   while (!unhandled_live_ranges_.is_empty()) {
   1516     DCHECK(UnhandledIsSorted());
   1517     LiveRange* current = unhandled_live_ranges_.RemoveLast();
   1518     DCHECK(UnhandledIsSorted());
   1519     LifetimePosition position = current->Start();
   1520 #ifdef DEBUG
   1521     allocation_finger_ = position;
   1522 #endif
   1523     TraceAlloc("Processing interval %d start=%d\n",
   1524                current->id(),
   1525                position.Value());
   1526 
   1527     if (current->HasAllocatedSpillOperand()) {
   1528       TraceAlloc("Live range %d already has a spill operand\n", current->id());
   1529       LifetimePosition next_pos = position;
   1530       if (IsGapAt(next_pos.InstructionIndex())) {
   1531         next_pos = next_pos.NextInstruction();
   1532       }
   1533       UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
   1534       // If the range already has a spill operand and it doesn't need a
   1535       // register immediately, split it and spill the first part of the range.
   1536       if (pos == NULL) {
   1537         Spill(current);
   1538         continue;
   1539       } else if (pos->pos().Value() >
   1540                  current->Start().NextInstruction().Value()) {
   1541         // Do not spill live range eagerly if use position that can benefit from
   1542         // the register is too close to the start of live range.
   1543         SpillBetween(current, current->Start(), pos->pos());
   1544         if (!AllocationOk()) return;
   1545         DCHECK(UnhandledIsSorted());
   1546         continue;
   1547       }
   1548     }
   1549 
   1550     for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1551       LiveRange* cur_active = active_live_ranges_.at(i);
   1552       if (cur_active->End().Value() <= position.Value()) {
   1553         ActiveToHandled(cur_active);
   1554         --i;  // The live range was removed from the list of active live ranges.
   1555       } else if (!cur_active->Covers(position)) {
   1556         ActiveToInactive(cur_active);
   1557         --i;  // The live range was removed from the list of active live ranges.
   1558       }
   1559     }
   1560 
   1561     for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1562       LiveRange* cur_inactive = inactive_live_ranges_.at(i);
   1563       if (cur_inactive->End().Value() <= position.Value()) {
   1564         InactiveToHandled(cur_inactive);
   1565         --i;  // Live range was removed from the list of inactive live ranges.
   1566       } else if (cur_inactive->Covers(position)) {
   1567         InactiveToActive(cur_inactive);
   1568         --i;  // Live range was removed from the list of inactive live ranges.
   1569       }
   1570     }
   1571 
   1572     DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled());
   1573 
   1574     bool result = TryAllocateFreeReg(current);
   1575     if (!AllocationOk()) return;
   1576 
   1577     if (!result) AllocateBlockedReg(current);
   1578     if (!AllocationOk()) return;
   1579 
   1580     if (current->HasRegisterAssigned()) {
   1581       AddToActive(current);
   1582     }
   1583   }
   1584 
   1585   reusable_slots_.Rewind(0);
   1586   active_live_ranges_.Rewind(0);
   1587   inactive_live_ranges_.Rewind(0);
   1588 }
   1589 
   1590 
   1591 const char* LAllocator::RegisterName(int allocation_index) {
   1592   if (mode_ == GENERAL_REGISTERS) {
   1593     return GetRegConfig()->GetGeneralRegisterName(allocation_index);
   1594   } else {
   1595     return GetRegConfig()->GetDoubleRegisterName(allocation_index);
   1596   }
   1597 }
   1598 
   1599 
   1600 void LAllocator::TraceAlloc(const char* msg, ...) {
   1601   if (FLAG_trace_alloc) {
   1602     va_list arguments;
   1603     va_start(arguments, msg);
   1604     base::OS::VPrint(msg, arguments);
   1605     va_end(arguments);
   1606   }
   1607 }
   1608 
   1609 
   1610 bool LAllocator::HasTaggedValue(int virtual_register) const {
   1611   HValue* value = graph_->LookupValue(virtual_register);
   1612   if (value == NULL) return false;
   1613   return value->representation().IsTagged() && !value->type().IsSmi();
   1614 }
   1615 
   1616 
   1617 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
   1618   if (virtual_register < first_artificial_register_) {
   1619     HValue* value = graph_->LookupValue(virtual_register);
   1620     if (value != NULL && value->representation().IsDouble()) {
   1621       return DOUBLE_REGISTERS;
   1622     }
   1623   } else if (double_artificial_registers_.Contains(
   1624       virtual_register - first_artificial_register_)) {
   1625     return DOUBLE_REGISTERS;
   1626   }
   1627 
   1628   return GENERAL_REGISTERS;
   1629 }
   1630 
   1631 
   1632 void LAllocator::AddToActive(LiveRange* range) {
   1633   TraceAlloc("Add live range %d to active\n", range->id());
   1634   active_live_ranges_.Add(range, zone());
   1635 }
   1636 
   1637 
   1638 void LAllocator::AddToInactive(LiveRange* range) {
   1639   TraceAlloc("Add live range %d to inactive\n", range->id());
   1640   inactive_live_ranges_.Add(range, zone());
   1641 }
   1642 
   1643 
   1644 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
   1645   if (range == NULL || range->IsEmpty()) return;
   1646   DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
   1647   DCHECK(allocation_finger_.Value() <= range->Start().Value());
   1648   for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
   1649     LiveRange* cur_range = unhandled_live_ranges_.at(i);
   1650     if (range->ShouldBeAllocatedBefore(cur_range)) {
   1651       TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
   1652       unhandled_live_ranges_.InsertAt(i + 1, range, zone());
   1653       DCHECK(UnhandledIsSorted());
   1654       return;
   1655     }
   1656   }
   1657   TraceAlloc("Add live range %d to unhandled at start\n", range->id());
   1658   unhandled_live_ranges_.InsertAt(0, range, zone());
   1659   DCHECK(UnhandledIsSorted());
   1660 }
   1661 
   1662 
   1663 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
   1664   if (range == NULL || range->IsEmpty()) return;
   1665   DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
   1666   TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
   1667   unhandled_live_ranges_.Add(range, zone());
   1668 }
   1669 
   1670 
   1671 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
   1672   DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) ||
   1673          !(*b)->ShouldBeAllocatedBefore(*a));
   1674   if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
   1675   if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
   1676   return (*a)->id() - (*b)->id();
   1677 }
   1678 
   1679 
   1680 // Sort the unhandled live ranges so that the ranges to be processed first are
   1681 // at the end of the array list.  This is convenient for the register allocation
   1682 // algorithm because it is efficient to remove elements from the end.
   1683 void LAllocator::SortUnhandled() {
   1684   TraceAlloc("Sort unhandled\n");
   1685   unhandled_live_ranges_.Sort(&UnhandledSortHelper);
   1686 }
   1687 
   1688 
   1689 bool LAllocator::UnhandledIsSorted() {
   1690   int len = unhandled_live_ranges_.length();
   1691   for (int i = 1; i < len; i++) {
   1692     LiveRange* a = unhandled_live_ranges_.at(i - 1);
   1693     LiveRange* b = unhandled_live_ranges_.at(i);
   1694     if (a->Start().Value() < b->Start().Value()) return false;
   1695   }
   1696   return true;
   1697 }
   1698 
   1699 
   1700 void LAllocator::FreeSpillSlot(LiveRange* range) {
   1701   // Check that we are the last range.
   1702   if (range->next() != NULL) return;
   1703 
   1704   if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
   1705 
   1706   int index = range->TopLevel()->GetSpillOperand()->index();
   1707   if (index >= 0) {
   1708     reusable_slots_.Add(range, zone());
   1709   }
   1710 }
   1711 
   1712 
   1713 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
   1714   if (reusable_slots_.is_empty()) return NULL;
   1715   if (reusable_slots_.first()->End().Value() >
   1716       range->TopLevel()->Start().Value()) {
   1717     return NULL;
   1718   }
   1719   LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
   1720   reusable_slots_.Remove(0);
   1721   return result;
   1722 }
   1723 
   1724 
   1725 void LAllocator::ActiveToHandled(LiveRange* range) {
   1726   DCHECK(active_live_ranges_.Contains(range));
   1727   active_live_ranges_.RemoveElement(range);
   1728   TraceAlloc("Moving live range %d from active to handled\n", range->id());
   1729   FreeSpillSlot(range);
   1730 }
   1731 
   1732 
   1733 void LAllocator::ActiveToInactive(LiveRange* range) {
   1734   DCHECK(active_live_ranges_.Contains(range));
   1735   active_live_ranges_.RemoveElement(range);
   1736   inactive_live_ranges_.Add(range, zone());
   1737   TraceAlloc("Moving live range %d from active to inactive\n", range->id());
   1738 }
   1739 
   1740 
   1741 void LAllocator::InactiveToHandled(LiveRange* range) {
   1742   DCHECK(inactive_live_ranges_.Contains(range));
   1743   inactive_live_ranges_.RemoveElement(range);
   1744   TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
   1745   FreeSpillSlot(range);
   1746 }
   1747 
   1748 
   1749 void LAllocator::InactiveToActive(LiveRange* range) {
   1750   DCHECK(inactive_live_ranges_.Contains(range));
   1751   inactive_live_ranges_.RemoveElement(range);
   1752   active_live_ranges_.Add(range, zone());
   1753   TraceAlloc("Moving live range %d from inactive to active\n", range->id());
   1754 }
   1755 
   1756 
   1757 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
   1758   DCHECK(DoubleRegister::kMaxNumRegisters >= Register::kNumRegisters);
   1759 
   1760   LifetimePosition free_until_pos[DoubleRegister::kMaxNumRegisters];
   1761 
   1762   for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
   1763     free_until_pos[i] = LifetimePosition::MaxPosition();
   1764   }
   1765 
   1766   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1767     LiveRange* cur_active = active_live_ranges_.at(i);
   1768     free_until_pos[cur_active->assigned_register()] =
   1769         LifetimePosition::FromInstructionIndex(0);
   1770   }
   1771 
   1772   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1773     LiveRange* cur_inactive = inactive_live_ranges_.at(i);
   1774     DCHECK(cur_inactive->End().Value() > current->Start().Value());
   1775     LifetimePosition next_intersection =
   1776         cur_inactive->FirstIntersection(current);
   1777     if (!next_intersection.IsValid()) continue;
   1778     int cur_reg = cur_inactive->assigned_register();
   1779     free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
   1780   }
   1781 
   1782   LOperand* hint = current->FirstHint();
   1783   if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
   1784     int register_index = hint->index();
   1785     TraceAlloc(
   1786         "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
   1787         RegisterName(register_index),
   1788         free_until_pos[register_index].Value(),
   1789         current->id(),
   1790         current->End().Value());
   1791 
   1792     // The desired register is free until the end of the current live range.
   1793     if (free_until_pos[register_index].Value() >= current->End().Value()) {
   1794       TraceAlloc("Assigning preferred reg %s to live range %d\n",
   1795                  RegisterName(register_index),
   1796                  current->id());
   1797       SetLiveRangeAssignedRegister(current, register_index);
   1798       return true;
   1799     }
   1800   }
   1801 
   1802   // Find the register which stays free for the longest time.
   1803   int reg = allocatable_register_codes_[0];
   1804   for (int i = 1; i < RegisterCount(); ++i) {
   1805     int code = allocatable_register_codes_[i];
   1806     if (free_until_pos[code].Value() > free_until_pos[reg].Value()) {
   1807       reg = code;
   1808     }
   1809   }
   1810 
   1811   LifetimePosition pos = free_until_pos[reg];
   1812 
   1813   if (pos.Value() <= current->Start().Value()) {
   1814     // All registers are blocked.
   1815     return false;
   1816   }
   1817 
   1818   if (pos.Value() < current->End().Value()) {
   1819     // Register reg is available at the range start but becomes blocked before
   1820     // the range end. Split current at position where it becomes blocked.
   1821     LiveRange* tail = SplitRangeAt(current, pos);
   1822     if (!AllocationOk()) return false;
   1823     AddToUnhandledSorted(tail);
   1824   }
   1825 
   1826 
   1827   // Register reg is available at the range start and is free until
   1828   // the range end.
   1829   DCHECK(pos.Value() >= current->End().Value());
   1830   TraceAlloc("Assigning free reg %s to live range %d\n",
   1831              RegisterName(reg),
   1832              current->id());
   1833   SetLiveRangeAssignedRegister(current, reg);
   1834 
   1835   return true;
   1836 }
   1837 
   1838 
   1839 void LAllocator::AllocateBlockedReg(LiveRange* current) {
   1840   UsePosition* register_use = current->NextRegisterPosition(current->Start());
   1841   if (register_use == NULL) {
   1842     // There is no use in the current live range that requires a register.
   1843     // We can just spill it.
   1844     Spill(current);
   1845     return;
   1846   }
   1847 
   1848 
   1849   LifetimePosition use_pos[DoubleRegister::kMaxNumRegisters];
   1850   LifetimePosition block_pos[DoubleRegister::kMaxNumRegisters];
   1851 
   1852   for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
   1853     use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
   1854   }
   1855 
   1856   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1857     LiveRange* range = active_live_ranges_[i];
   1858     int cur_reg = range->assigned_register();
   1859     if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
   1860       block_pos[cur_reg] = use_pos[cur_reg] =
   1861           LifetimePosition::FromInstructionIndex(0);
   1862     } else {
   1863       UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
   1864           current->Start());
   1865       if (next_use == NULL) {
   1866         use_pos[cur_reg] = range->End();
   1867       } else {
   1868         use_pos[cur_reg] = next_use->pos();
   1869       }
   1870     }
   1871   }
   1872 
   1873   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1874     LiveRange* range = inactive_live_ranges_.at(i);
   1875     DCHECK(range->End().Value() > current->Start().Value());
   1876     LifetimePosition next_intersection = range->FirstIntersection(current);
   1877     if (!next_intersection.IsValid()) continue;
   1878     int cur_reg = range->assigned_register();
   1879     if (range->IsFixed()) {
   1880       block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
   1881       use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
   1882     } else {
   1883       use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
   1884     }
   1885   }
   1886 
   1887   int reg = allocatable_register_codes_[0];
   1888   for (int i = 1; i < RegisterCount(); ++i) {
   1889     int code = allocatable_register_codes_[i];
   1890     if (use_pos[code].Value() > use_pos[reg].Value()) {
   1891       reg = code;
   1892     }
   1893   }
   1894 
   1895   LifetimePosition pos = use_pos[reg];
   1896 
   1897   if (pos.Value() < register_use->pos().Value()) {
   1898     // All registers are blocked before the first use that requires a register.
   1899     // Spill starting part of live range up to that use.
   1900     SpillBetween(current, current->Start(), register_use->pos());
   1901     return;
   1902   }
   1903 
   1904   if (block_pos[reg].Value() < current->End().Value()) {
   1905     // Register becomes blocked before the current range end. Split before that
   1906     // position.
   1907     LiveRange* tail = SplitBetween(current,
   1908                                    current->Start(),
   1909                                    block_pos[reg].InstructionStart());
   1910     if (!AllocationOk()) return;
   1911     AddToUnhandledSorted(tail);
   1912   }
   1913 
   1914   // Register reg is not blocked for the whole range.
   1915   DCHECK(block_pos[reg].Value() >= current->End().Value());
   1916   TraceAlloc("Assigning blocked reg %s to live range %d\n",
   1917              RegisterName(reg),
   1918              current->id());
   1919   SetLiveRangeAssignedRegister(current, reg);
   1920 
   1921   // This register was not free. Thus we need to find and spill
   1922   // parts of active and inactive live regions that use the same register
   1923   // at the same lifetime positions as current.
   1924   SplitAndSpillIntersecting(current);
   1925 }
   1926 
   1927 
   1928 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
   1929                                                     LifetimePosition pos) {
   1930   HBasicBlock* block = GetBlock(pos.InstructionStart());
   1931   HBasicBlock* loop_header =
   1932       block->IsLoopHeader() ? block : block->parent_loop_header();
   1933 
   1934   if (loop_header == NULL) return pos;
   1935 
   1936   UsePosition* prev_use =
   1937     range->PreviousUsePositionRegisterIsBeneficial(pos);
   1938 
   1939   while (loop_header != NULL) {
   1940     // We are going to spill live range inside the loop.
   1941     // If possible try to move spilling position backwards to loop header.
   1942     // This will reduce number of memory moves on the back edge.
   1943     LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
   1944         loop_header->first_instruction_index());
   1945 
   1946     if (range->Covers(loop_start)) {
   1947       if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
   1948         // No register beneficial use inside the loop before the pos.
   1949         pos = loop_start;
   1950       }
   1951     }
   1952 
   1953     // Try hoisting out to an outer loop.
   1954     loop_header = loop_header->parent_loop_header();
   1955   }
   1956 
   1957   return pos;
   1958 }
   1959 
   1960 
   1961 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
   1962   DCHECK(current->HasRegisterAssigned());
   1963   int reg = current->assigned_register();
   1964   LifetimePosition split_pos = current->Start();
   1965   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1966     LiveRange* range = active_live_ranges_[i];
   1967     if (range->assigned_register() == reg) {
   1968       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
   1969       LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
   1970       if (next_pos == NULL) {
   1971         SpillAfter(range, spill_pos);
   1972       } else {
   1973         // When spilling between spill_pos and next_pos ensure that the range
   1974         // remains spilled at least until the start of the current live range.
   1975         // This guarantees that we will not introduce new unhandled ranges that
   1976         // start before the current range as this violates allocation invariant
   1977         // and will lead to an inconsistent state of active and inactive
   1978         // live-ranges: ranges are allocated in order of their start positions,
   1979         // ranges are retired from active/inactive when the start of the
   1980         // current live-range is larger than their end.
   1981         SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
   1982       }
   1983       if (!AllocationOk()) return;
   1984       ActiveToHandled(range);
   1985       --i;
   1986     }
   1987   }
   1988 
   1989   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1990     LiveRange* range = inactive_live_ranges_[i];
   1991     DCHECK(range->End().Value() > current->Start().Value());
   1992     if (range->assigned_register() == reg && !range->IsFixed()) {
   1993       LifetimePosition next_intersection = range->FirstIntersection(current);
   1994       if (next_intersection.IsValid()) {
   1995         UsePosition* next_pos = range->NextRegisterPosition(current->Start());
   1996         if (next_pos == NULL) {
   1997           SpillAfter(range, split_pos);
   1998         } else {
   1999           next_intersection = Min(next_intersection, next_pos->pos());
   2000           SpillBetween(range, split_pos, next_intersection);
   2001         }
   2002         if (!AllocationOk()) return;
   2003         InactiveToHandled(range);
   2004         --i;
   2005       }
   2006     }
   2007   }
   2008 }
   2009 
   2010 
   2011 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
   2012   return pos.IsInstructionStart() &&
   2013       InstructionAt(pos.InstructionIndex())->IsLabel();
   2014 }
   2015 
   2016 
   2017 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
   2018   DCHECK(!range->IsFixed());
   2019   TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
   2020 
   2021   if (pos.Value() <= range->Start().Value()) return range;
   2022 
   2023   // We can't properly connect liveranges if split occured at the end
   2024   // of control instruction.
   2025   DCHECK(pos.IsInstructionStart() ||
   2026          !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
   2027 
   2028   int vreg = GetVirtualRegister();
   2029   if (!AllocationOk()) return NULL;
   2030   LiveRange* result = LiveRangeFor(vreg);
   2031   range->SplitAt(pos, result, zone());
   2032   return result;
   2033 }
   2034 
   2035 
   2036 LiveRange* LAllocator::SplitBetween(LiveRange* range,
   2037                                     LifetimePosition start,
   2038                                     LifetimePosition end) {
   2039   DCHECK(!range->IsFixed());
   2040   TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
   2041              range->id(),
   2042              start.Value(),
   2043              end.Value());
   2044 
   2045   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
   2046   DCHECK(split_pos.Value() >= start.Value());
   2047   return SplitRangeAt(range, split_pos);
   2048 }
   2049 
   2050 
   2051 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
   2052                                                  LifetimePosition end) {
   2053   int start_instr = start.InstructionIndex();
   2054   int end_instr = end.InstructionIndex();
   2055   DCHECK(start_instr <= end_instr);
   2056 
   2057   // We have no choice
   2058   if (start_instr == end_instr) return end;
   2059 
   2060   HBasicBlock* start_block = GetBlock(start);
   2061   HBasicBlock* end_block = GetBlock(end);
   2062 
   2063   if (end_block == start_block) {
   2064     // The interval is split in the same basic block. Split at the latest
   2065     // possible position.
   2066     return end;
   2067   }
   2068 
   2069   HBasicBlock* block = end_block;
   2070   // Find header of outermost loop.
   2071   while (block->parent_loop_header() != NULL &&
   2072       block->parent_loop_header()->block_id() > start_block->block_id()) {
   2073     block = block->parent_loop_header();
   2074   }
   2075 
   2076   // We did not find any suitable outer loop. Split at the latest possible
   2077   // position unless end_block is a loop header itself.
   2078   if (block == end_block && !end_block->IsLoopHeader()) return end;
   2079 
   2080   return LifetimePosition::FromInstructionIndex(
   2081       block->first_instruction_index());
   2082 }
   2083 
   2084 
   2085 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
   2086   LiveRange* second_part = SplitRangeAt(range, pos);
   2087   if (!AllocationOk()) return;
   2088   Spill(second_part);
   2089 }
   2090 
   2091 
   2092 void LAllocator::SpillBetween(LiveRange* range,
   2093                               LifetimePosition start,
   2094                               LifetimePosition end) {
   2095   SpillBetweenUntil(range, start, start, end);
   2096 }
   2097 
   2098 
   2099 void LAllocator::SpillBetweenUntil(LiveRange* range,
   2100                                    LifetimePosition start,
   2101                                    LifetimePosition until,
   2102                                    LifetimePosition end) {
   2103   CHECK(start.Value() < end.Value());
   2104   LiveRange* second_part = SplitRangeAt(range, start);
   2105   if (!AllocationOk()) return;
   2106 
   2107   if (second_part->Start().Value() < end.Value()) {
   2108     // The split result intersects with [start, end[.
   2109     // Split it at position between ]start+1, end[, spill the middle part
   2110     // and put the rest to unhandled.
   2111     LiveRange* third_part = SplitBetween(
   2112         second_part,
   2113         Max(second_part->Start().InstructionEnd(), until),
   2114         end.PrevInstruction().InstructionEnd());
   2115     if (!AllocationOk()) return;
   2116 
   2117     DCHECK(third_part != second_part);
   2118 
   2119     Spill(second_part);
   2120     AddToUnhandledSorted(third_part);
   2121   } else {
   2122     // The split result does not intersect with [start, end[.
   2123     // Nothing to spill. Just put it to unhandled as whole.
   2124     AddToUnhandledSorted(second_part);
   2125   }
   2126 }
   2127 
   2128 
   2129 void LAllocator::Spill(LiveRange* range) {
   2130   DCHECK(!range->IsSpilled());
   2131   TraceAlloc("Spilling live range %d\n", range->id());
   2132   LiveRange* first = range->TopLevel();
   2133 
   2134   if (!first->HasAllocatedSpillOperand()) {
   2135     LOperand* op = TryReuseSpillSlot(range);
   2136     if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind());
   2137     first->SetSpillOperand(op);
   2138   }
   2139   range->MakeSpilled(chunk()->zone());
   2140 }
   2141 
   2142 
   2143 int LAllocator::RegisterCount() const {
   2144   return num_registers_;
   2145 }
   2146 
   2147 
   2148 #ifdef DEBUG
   2149 
   2150 
   2151 void LAllocator::Verify() const {
   2152   for (int i = 0; i < live_ranges()->length(); ++i) {
   2153     LiveRange* current = live_ranges()->at(i);
   2154     if (current != NULL) current->Verify();
   2155   }
   2156 }
   2157 
   2158 
   2159 #endif
   2160 
   2161 
   2162 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
   2163     : CompilationPhase(name, allocator->graph()->info()),
   2164       allocator_(allocator) {
   2165   if (FLAG_hydrogen_stats) {
   2166     allocator_zone_start_allocation_size_ =
   2167         allocator->zone()->allocation_size();
   2168   }
   2169 }
   2170 
   2171 
   2172 LAllocatorPhase::~LAllocatorPhase() {
   2173   if (FLAG_hydrogen_stats) {
   2174     size_t size = allocator_->zone()->allocation_size() -
   2175                   allocator_zone_start_allocation_size_;
   2176     isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size);
   2177   }
   2178 
   2179   if (ShouldProduceTraceOutput()) {
   2180     isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
   2181     isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
   2182   }
   2183 
   2184 #ifdef DEBUG
   2185   if (allocator_ != NULL) allocator_->Verify();
   2186 #endif
   2187 }
   2188 
   2189 
   2190 }  // namespace internal
   2191 }  // namespace v8
   2192