Home | History | Annotate | Download | only in crankshaft
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/crankshaft/lithium-allocator.h"
      6 
      7 #include "src/crankshaft/hydrogen.h"
      8 #include "src/crankshaft/lithium-inl.h"
      9 #include "src/crankshaft/lithium-allocator-inl.h"
     10 #include "src/register-configuration.h"
     11 #include "src/string-stream.h"
     12 
     13 namespace v8 {
     14 namespace internal {
     15 
     16 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
     17   return a.Value() < b.Value() ? a : b;
     18 }
     19 
     20 
     21 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
     22   return a.Value() > b.Value() ? a : b;
     23 }
     24 
     25 
     26 UsePosition::UsePosition(LifetimePosition pos,
     27                          LOperand* operand,
     28                          LOperand* hint)
     29     : operand_(operand),
     30       hint_(hint),
     31       pos_(pos),
     32       next_(NULL),
     33       requires_reg_(false),
     34       register_beneficial_(true) {
     35   if (operand_ != NULL && operand_->IsUnallocated()) {
     36     LUnallocated* unalloc = LUnallocated::cast(operand_);
     37     requires_reg_ = unalloc->HasRegisterPolicy() ||
     38         unalloc->HasDoubleRegisterPolicy();
     39     register_beneficial_ = !unalloc->HasAnyPolicy();
     40   }
     41   DCHECK(pos_.IsValid());
     42 }
     43 
     44 
     45 bool UsePosition::HasHint() const {
     46   return hint_ != NULL && !hint_->IsUnallocated();
     47 }
     48 
     49 
     50 bool UsePosition::RequiresRegister() const {
     51   return requires_reg_;
     52 }
     53 
     54 
     55 bool UsePosition::RegisterIsBeneficial() const {
     56   return register_beneficial_;
     57 }
     58 
     59 
     60 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
     61   DCHECK(Contains(pos) && pos.Value() != start().Value());
     62   UseInterval* after = new(zone) UseInterval(pos, end_);
     63   after->next_ = next_;
     64   next_ = after;
     65   end_ = pos;
     66 }
     67 
     68 
     69 #ifdef DEBUG
     70 
     71 
     72 void LiveRange::Verify() const {
     73   UsePosition* cur = first_pos_;
     74   while (cur != NULL) {
     75     DCHECK(Start().Value() <= cur->pos().Value() &&
     76            cur->pos().Value() <= End().Value());
     77     cur = cur->next();
     78   }
     79 }
     80 
     81 
     82 bool LiveRange::HasOverlap(UseInterval* target) const {
     83   UseInterval* current_interval = first_interval_;
     84   while (current_interval != NULL) {
     85     // Intervals overlap if the start of one is contained in the other.
     86     if (current_interval->Contains(target->start()) ||
     87         target->Contains(current_interval->start())) {
     88       return true;
     89     }
     90     current_interval = current_interval->next();
     91   }
     92   return false;
     93 }
     94 
     95 
     96 #endif
     97 
     98 
     99 LiveRange::LiveRange(int id, Zone* zone)
    100     : id_(id),
    101       spilled_(false),
    102       kind_(UNALLOCATED_REGISTERS),
    103       assigned_register_(kInvalidAssignment),
    104       last_interval_(NULL),
    105       first_interval_(NULL),
    106       first_pos_(NULL),
    107       parent_(NULL),
    108       next_(NULL),
    109       current_interval_(NULL),
    110       last_processed_use_(NULL),
    111       current_hint_operand_(NULL),
    112       spill_operand_(new (zone) LOperand()),
    113       spill_start_index_(kMaxInt) {}
    114 
    115 
    116 void LiveRange::set_assigned_register(int reg, Zone* zone) {
    117   DCHECK(!HasRegisterAssigned() && !IsSpilled());
    118   assigned_register_ = reg;
    119   ConvertOperands(zone);
    120 }
    121 
    122 
    123 void LiveRange::MakeSpilled(Zone* zone) {
    124   DCHECK(!IsSpilled());
    125   DCHECK(TopLevel()->HasAllocatedSpillOperand());
    126   spilled_ = true;
    127   assigned_register_ = kInvalidAssignment;
    128   ConvertOperands(zone);
    129 }
    130 
    131 
    132 bool LiveRange::HasAllocatedSpillOperand() const {
    133   DCHECK(spill_operand_ != NULL);
    134   return !spill_operand_->IsIgnored();
    135 }
    136 
    137 
    138 void LiveRange::SetSpillOperand(LOperand* operand) {
    139   DCHECK(!operand->IsUnallocated());
    140   DCHECK(spill_operand_ != NULL);
    141   DCHECK(spill_operand_->IsIgnored());
    142   spill_operand_->ConvertTo(operand->kind(), operand->index());
    143 }
    144 
    145 
    146 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
    147   UsePosition* use_pos = last_processed_use_;
    148   if (use_pos == NULL) use_pos = first_pos();
    149   while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
    150     use_pos = use_pos->next();
    151   }
    152   last_processed_use_ = use_pos;
    153   return use_pos;
    154 }
    155 
    156 
    157 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
    158     LifetimePosition start) {
    159   UsePosition* pos = NextUsePosition(start);
    160   while (pos != NULL && !pos->RegisterIsBeneficial()) {
    161     pos = pos->next();
    162   }
    163   return pos;
    164 }
    165 
    166 
    167 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
    168     LifetimePosition start) {
    169   UsePosition* pos = first_pos();
    170   UsePosition* prev = NULL;
    171   while (pos != NULL && pos->pos().Value() < start.Value()) {
    172     if (pos->RegisterIsBeneficial()) prev = pos;
    173     pos = pos->next();
    174   }
    175   return prev;
    176 }
    177 
    178 
    179 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
    180   UsePosition* pos = NextUsePosition(start);
    181   while (pos != NULL && !pos->RequiresRegister()) {
    182     pos = pos->next();
    183   }
    184   return pos;
    185 }
    186 
    187 
    188 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
    189   // We cannot spill a live range that has a use requiring a register
    190   // at the current or the immediate next position.
    191   UsePosition* use_pos = NextRegisterPosition(pos);
    192   if (use_pos == NULL) return true;
    193   return
    194       use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
    195 }
    196 
    197 
    198 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
    199   LOperand* op = NULL;
    200   if (HasRegisterAssigned()) {
    201     DCHECK(!IsSpilled());
    202     switch (Kind()) {
    203       case GENERAL_REGISTERS:
    204         op = LRegister::Create(assigned_register(), zone);
    205         break;
    206       case DOUBLE_REGISTERS:
    207         op = LDoubleRegister::Create(assigned_register(), zone);
    208         break;
    209       default:
    210         UNREACHABLE();
    211     }
    212   } else if (IsSpilled()) {
    213     DCHECK(!HasRegisterAssigned());
    214     op = TopLevel()->GetSpillOperand();
    215     DCHECK(!op->IsUnallocated());
    216   } else {
    217     LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
    218     unalloc->set_virtual_register(id_);
    219     op = unalloc;
    220   }
    221   return op;
    222 }
    223 
    224 
    225 UseInterval* LiveRange::FirstSearchIntervalForPosition(
    226     LifetimePosition position) const {
    227   if (current_interval_ == NULL) return first_interval_;
    228   if (current_interval_->start().Value() > position.Value()) {
    229     current_interval_ = NULL;
    230     return first_interval_;
    231   }
    232   return current_interval_;
    233 }
    234 
    235 
    236 void LiveRange::AdvanceLastProcessedMarker(
    237     UseInterval* to_start_of, LifetimePosition but_not_past) const {
    238   if (to_start_of == NULL) return;
    239   if (to_start_of->start().Value() > but_not_past.Value()) return;
    240   LifetimePosition start =
    241       current_interval_ == NULL ? LifetimePosition::Invalid()
    242                                 : current_interval_->start();
    243   if (to_start_of->start().Value() > start.Value()) {
    244     current_interval_ = to_start_of;
    245   }
    246 }
    247 
    248 
    249 void LiveRange::SplitAt(LifetimePosition position,
    250                         LiveRange* result,
    251                         Zone* zone) {
    252   DCHECK(Start().Value() < position.Value());
    253   DCHECK(result->IsEmpty());
    254   // Find the last interval that ends before the position. If the
    255   // position is contained in one of the intervals in the chain, we
    256   // split that interval and use the first part.
    257   UseInterval* current = FirstSearchIntervalForPosition(position);
    258 
    259   // If the split position coincides with the beginning of a use interval
    260   // we need to split use positons in a special way.
    261   bool split_at_start = false;
    262 
    263   if (current->start().Value() == position.Value()) {
    264     // When splitting at start we need to locate the previous use interval.
    265     current = first_interval_;
    266   }
    267 
    268   while (current != NULL) {
    269     if (current->Contains(position)) {
    270       current->SplitAt(position, zone);
    271       break;
    272     }
    273     UseInterval* next = current->next();
    274     if (next->start().Value() >= position.Value()) {
    275       split_at_start = (next->start().Value() == position.Value());
    276       break;
    277     }
    278     current = next;
    279   }
    280 
    281   // Partition original use intervals to the two live ranges.
    282   UseInterval* before = current;
    283   UseInterval* after = before->next();
    284   result->last_interval_ = (last_interval_ == before)
    285       ? after            // Only interval in the range after split.
    286       : last_interval_;  // Last interval of the original range.
    287   result->first_interval_ = after;
    288   last_interval_ = before;
    289 
    290   // Find the last use position before the split and the first use
    291   // position after it.
    292   UsePosition* use_after = first_pos_;
    293   UsePosition* use_before = NULL;
    294   if (split_at_start) {
    295     // The split position coincides with the beginning of a use interval (the
    296     // end of a lifetime hole). Use at this position should be attributed to
    297     // the split child because split child owns use interval covering it.
    298     while (use_after != NULL && use_after->pos().Value() < position.Value()) {
    299       use_before = use_after;
    300       use_after = use_after->next();
    301     }
    302   } else {
    303     while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
    304       use_before = use_after;
    305       use_after = use_after->next();
    306     }
    307   }
    308 
    309   // Partition original use positions to the two live ranges.
    310   if (use_before != NULL) {
    311     use_before->next_ = NULL;
    312   } else {
    313     first_pos_ = NULL;
    314   }
    315   result->first_pos_ = use_after;
    316 
    317   // Discard cached iteration state. It might be pointing
    318   // to the use that no longer belongs to this live range.
    319   last_processed_use_ = NULL;
    320   current_interval_ = NULL;
    321 
    322   // Link the new live range in the chain before any of the other
    323   // ranges linked from the range before the split.
    324   result->parent_ = (parent_ == NULL) ? this : parent_;
    325   result->kind_ = result->parent_->kind_;
    326   result->next_ = next_;
    327   next_ = result;
    328 
    329 #ifdef DEBUG
    330   Verify();
    331   result->Verify();
    332 #endif
    333 }
    334 
    335 
    336 // This implements an ordering on live ranges so that they are ordered by their
    337 // start positions.  This is needed for the correctness of the register
    338 // allocation algorithm.  If two live ranges start at the same offset then there
    339 // is a tie breaker based on where the value is first used.  This part of the
    340 // ordering is merely a heuristic.
    341 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
    342   LifetimePosition start = Start();
    343   LifetimePosition other_start = other->Start();
    344   if (start.Value() == other_start.Value()) {
    345     UsePosition* pos = first_pos();
    346     if (pos == NULL) return false;
    347     UsePosition* other_pos = other->first_pos();
    348     if (other_pos == NULL) return true;
    349     return pos->pos().Value() < other_pos->pos().Value();
    350   }
    351   return start.Value() < other_start.Value();
    352 }
    353 
    354 
    355 void LiveRange::ShortenTo(LifetimePosition start) {
    356   LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
    357   DCHECK(first_interval_ != NULL);
    358   DCHECK(first_interval_->start().Value() <= start.Value());
    359   DCHECK(start.Value() < first_interval_->end().Value());
    360   first_interval_->set_start(start);
    361 }
    362 
    363 
    364 void LiveRange::EnsureInterval(LifetimePosition start,
    365                                LifetimePosition end,
    366                                Zone* zone) {
    367   LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
    368                          id_,
    369                          start.Value(),
    370                          end.Value());
    371   LifetimePosition new_end = end;
    372   while (first_interval_ != NULL &&
    373          first_interval_->start().Value() <= end.Value()) {
    374     if (first_interval_->end().Value() > end.Value()) {
    375       new_end = first_interval_->end();
    376     }
    377     first_interval_ = first_interval_->next();
    378   }
    379 
    380   UseInterval* new_interval = new(zone) UseInterval(start, new_end);
    381   new_interval->next_ = first_interval_;
    382   first_interval_ = new_interval;
    383   if (new_interval->next() == NULL) {
    384     last_interval_ = new_interval;
    385   }
    386 }
    387 
    388 
    389 void LiveRange::AddUseInterval(LifetimePosition start,
    390                                LifetimePosition end,
    391                                Zone* zone) {
    392   LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
    393                          id_,
    394                          start.Value(),
    395                          end.Value());
    396   if (first_interval_ == NULL) {
    397     UseInterval* interval = new(zone) UseInterval(start, end);
    398     first_interval_ = interval;
    399     last_interval_ = interval;
    400   } else {
    401     if (end.Value() == first_interval_->start().Value()) {
    402       first_interval_->set_start(start);
    403     } else if (end.Value() < first_interval_->start().Value()) {
    404       UseInterval* interval = new(zone) UseInterval(start, end);
    405       interval->set_next(first_interval_);
    406       first_interval_ = interval;
    407     } else {
    408       // Order of instruction's processing (see ProcessInstructions) guarantees
    409       // that each new use interval either precedes or intersects with
    410       // last added interval.
    411       DCHECK(start.Value() < first_interval_->end().Value());
    412       first_interval_->start_ = Min(start, first_interval_->start_);
    413       first_interval_->end_ = Max(end, first_interval_->end_);
    414     }
    415   }
    416 }
    417 
    418 
    419 void LiveRange::AddUsePosition(LifetimePosition pos,
    420                                LOperand* operand,
    421                                LOperand* hint,
    422                                Zone* zone) {
    423   LAllocator::TraceAlloc("Add to live range %d use position %d\n",
    424                          id_,
    425                          pos.Value());
    426   UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
    427   UsePosition* prev_hint = NULL;
    428   UsePosition* prev = NULL;
    429   UsePosition* current = first_pos_;
    430   while (current != NULL && current->pos().Value() < pos.Value()) {
    431     prev_hint = current->HasHint() ? current : prev_hint;
    432     prev = current;
    433     current = current->next();
    434   }
    435 
    436   if (prev == NULL) {
    437     use_pos->set_next(first_pos_);
    438     first_pos_ = use_pos;
    439   } else {
    440     use_pos->next_ = prev->next_;
    441     prev->next_ = use_pos;
    442   }
    443 
    444   if (prev_hint == NULL && use_pos->HasHint()) {
    445     current_hint_operand_ = hint;
    446   }
    447 }
    448 
    449 
    450 void LiveRange::ConvertOperands(Zone* zone) {
    451   LOperand* op = CreateAssignedOperand(zone);
    452   UsePosition* use_pos = first_pos();
    453   while (use_pos != NULL) {
    454     DCHECK(Start().Value() <= use_pos->pos().Value() &&
    455            use_pos->pos().Value() <= End().Value());
    456 
    457     if (use_pos->HasOperand()) {
    458       DCHECK(op->IsRegister() || op->IsDoubleRegister() ||
    459              !use_pos->RequiresRegister());
    460       use_pos->operand()->ConvertTo(op->kind(), op->index());
    461     }
    462     use_pos = use_pos->next();
    463   }
    464 }
    465 
    466 
    467 bool LiveRange::CanCover(LifetimePosition position) const {
    468   if (IsEmpty()) return false;
    469   return Start().Value() <= position.Value() &&
    470          position.Value() < End().Value();
    471 }
    472 
    473 
    474 bool LiveRange::Covers(LifetimePosition position) {
    475   if (!CanCover(position)) return false;
    476   UseInterval* start_search = FirstSearchIntervalForPosition(position);
    477   for (UseInterval* interval = start_search;
    478        interval != NULL;
    479        interval = interval->next()) {
    480     DCHECK(interval->next() == NULL ||
    481            interval->next()->start().Value() >= interval->start().Value());
    482     AdvanceLastProcessedMarker(interval, position);
    483     if (interval->Contains(position)) return true;
    484     if (interval->start().Value() > position.Value()) return false;
    485   }
    486   return false;
    487 }
    488 
    489 
    490 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
    491   UseInterval* b = other->first_interval();
    492   if (b == NULL) return LifetimePosition::Invalid();
    493   LifetimePosition advance_last_processed_up_to = b->start();
    494   UseInterval* a = FirstSearchIntervalForPosition(b->start());
    495   while (a != NULL && b != NULL) {
    496     if (a->start().Value() > other->End().Value()) break;
    497     if (b->start().Value() > End().Value()) break;
    498     LifetimePosition cur_intersection = a->Intersect(b);
    499     if (cur_intersection.IsValid()) {
    500       return cur_intersection;
    501     }
    502     if (a->start().Value() < b->start().Value()) {
    503       a = a->next();
    504       if (a == NULL || a->start().Value() > other->End().Value()) break;
    505       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
    506     } else {
    507       b = b->next();
    508     }
    509   }
    510   return LifetimePosition::Invalid();
    511 }
    512 
    513 
    514 LAllocator::LAllocator(int num_values, HGraph* graph)
    515     : chunk_(NULL),
    516       live_in_sets_(graph->blocks()->length(), zone()),
    517       live_ranges_(num_values * 2, zone()),
    518       fixed_live_ranges_(NULL),
    519       fixed_double_live_ranges_(NULL),
    520       unhandled_live_ranges_(num_values * 2, zone()),
    521       active_live_ranges_(8, zone()),
    522       inactive_live_ranges_(8, zone()),
    523       reusable_slots_(8, zone()),
    524       next_virtual_register_(num_values),
    525       first_artificial_register_(num_values),
    526       mode_(UNALLOCATED_REGISTERS),
    527       num_registers_(-1),
    528       graph_(graph),
    529       has_osr_entry_(false),
    530       allocation_ok_(true) {}
    531 
    532 
    533 void LAllocator::InitializeLivenessAnalysis() {
    534   // Initialize the live_in sets for each block to NULL.
    535   int block_count = graph_->blocks()->length();
    536   live_in_sets_.Initialize(block_count, zone());
    537   live_in_sets_.AddBlock(NULL, block_count, zone());
    538 }
    539 
    540 
    541 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
    542   // Compute live out for the given block, except not including backward
    543   // successor edges.
    544   BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
    545 
    546   // Process all successor blocks.
    547   for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
    548     // Add values live on entry to the successor. Note the successor's
    549     // live_in will not be computed yet for backwards edges.
    550     HBasicBlock* successor = it.Current();
    551     BitVector* live_in = live_in_sets_[successor->block_id()];
    552     if (live_in != NULL) live_out->Union(*live_in);
    553 
    554     // All phi input operands corresponding to this successor edge are live
    555     // out from this block.
    556     int index = successor->PredecessorIndexOf(block);
    557     const ZoneList<HPhi*>* phis = successor->phis();
    558     for (int i = 0; i < phis->length(); ++i) {
    559       HPhi* phi = phis->at(i);
    560       if (!phi->OperandAt(index)->IsConstant()) {
    561         live_out->Add(phi->OperandAt(index)->id());
    562       }
    563     }
    564   }
    565 
    566   return live_out;
    567 }
    568 
    569 
    570 void LAllocator::AddInitialIntervals(HBasicBlock* block,
    571                                      BitVector* live_out) {
    572   // Add an interval that includes the entire block to the live range for
    573   // each live_out value.
    574   LifetimePosition start = LifetimePosition::FromInstructionIndex(
    575       block->first_instruction_index());
    576   LifetimePosition end = LifetimePosition::FromInstructionIndex(
    577       block->last_instruction_index()).NextInstruction();
    578   BitVector::Iterator iterator(live_out);
    579   while (!iterator.Done()) {
    580     int operand_index = iterator.Current();
    581     LiveRange* range = LiveRangeFor(operand_index);
    582     range->AddUseInterval(start, end, zone());
    583     iterator.Advance();
    584   }
    585 }
    586 
    587 
    588 int LAllocator::FixedDoubleLiveRangeID(int index) {
    589   return -index - 1 - Register::kNumRegisters;
    590 }
    591 
    592 
    593 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
    594                                     int pos,
    595                                     bool is_tagged) {
    596   TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
    597   DCHECK(operand->HasFixedPolicy());
    598   if (operand->HasFixedSlotPolicy()) {
    599     operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
    600   } else if (operand->HasFixedRegisterPolicy()) {
    601     int reg_index = operand->fixed_register_index();
    602     operand->ConvertTo(LOperand::REGISTER, reg_index);
    603   } else if (operand->HasFixedDoubleRegisterPolicy()) {
    604     int reg_index = operand->fixed_register_index();
    605     operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
    606   } else {
    607     UNREACHABLE();
    608   }
    609   if (is_tagged) {
    610     TraceAlloc("Fixed reg is tagged at %d\n", pos);
    611     LInstruction* instr = InstructionAt(pos);
    612     if (instr->HasPointerMap()) {
    613       instr->pointer_map()->RecordPointer(operand, chunk()->zone());
    614     }
    615   }
    616   return operand;
    617 }
    618 
    619 
    620 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
    621   DCHECK(index < Register::kNumRegisters);
    622   LiveRange* result = fixed_live_ranges_[index];
    623   if (result == NULL) {
    624     result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
    625     DCHECK(result->IsFixed());
    626     result->kind_ = GENERAL_REGISTERS;
    627     SetLiveRangeAssignedRegister(result, index);
    628     fixed_live_ranges_[index] = result;
    629   }
    630   return result;
    631 }
    632 
    633 
    634 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
    635   DCHECK(index < DoubleRegister::kMaxNumRegisters);
    636   LiveRange* result = fixed_double_live_ranges_[index];
    637   if (result == NULL) {
    638     result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
    639                                    chunk()->zone());
    640     DCHECK(result->IsFixed());
    641     result->kind_ = DOUBLE_REGISTERS;
    642     SetLiveRangeAssignedRegister(result, index);
    643     fixed_double_live_ranges_[index] = result;
    644   }
    645   return result;
    646 }
    647 
    648 
    649 LiveRange* LAllocator::LiveRangeFor(int index) {
    650   if (index >= live_ranges_.length()) {
    651     live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
    652   }
    653   LiveRange* result = live_ranges_[index];
    654   if (result == NULL) {
    655     result = new(zone()) LiveRange(index, chunk()->zone());
    656     live_ranges_[index] = result;
    657   }
    658   return result;
    659 }
    660 
    661 
    662 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
    663   int last_instruction = block->last_instruction_index();
    664   int index = chunk_->NearestGapPos(last_instruction);
    665   return GapAt(index);
    666 }
    667 
    668 
    669 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
    670   if (!operand->IsUnallocated()) return NULL;
    671   int index = LUnallocated::cast(operand)->virtual_register();
    672   HValue* instr = graph_->LookupValue(index);
    673   if (instr != NULL && instr->IsPhi()) {
    674     return HPhi::cast(instr);
    675   }
    676   return NULL;
    677 }
    678 
    679 
    680 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
    681   if (operand->IsUnallocated()) {
    682     return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
    683   } else if (operand->IsRegister()) {
    684     return FixedLiveRangeFor(operand->index());
    685   } else if (operand->IsDoubleRegister()) {
    686     return FixedDoubleLiveRangeFor(operand->index());
    687   } else {
    688     return NULL;
    689   }
    690 }
    691 
    692 
    693 void LAllocator::Define(LifetimePosition position,
    694                         LOperand* operand,
    695                         LOperand* hint) {
    696   LiveRange* range = LiveRangeFor(operand);
    697   if (range == NULL) return;
    698 
    699   if (range->IsEmpty() || range->Start().Value() > position.Value()) {
    700     // Can happen if there is a definition without use.
    701     range->AddUseInterval(position, position.NextInstruction(), zone());
    702     range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
    703   } else {
    704     range->ShortenTo(position);
    705   }
    706 
    707   if (operand->IsUnallocated()) {
    708     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
    709     range->AddUsePosition(position, unalloc_operand, hint, zone());
    710   }
    711 }
    712 
    713 
    714 void LAllocator::Use(LifetimePosition block_start,
    715                      LifetimePosition position,
    716                      LOperand* operand,
    717                      LOperand* hint) {
    718   LiveRange* range = LiveRangeFor(operand);
    719   if (range == NULL) return;
    720   if (operand->IsUnallocated()) {
    721     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
    722     range->AddUsePosition(position, unalloc_operand, hint, zone());
    723   }
    724   range->AddUseInterval(block_start, position, zone());
    725 }
    726 
    727 
    728 void LAllocator::AddConstraintsGapMove(int index,
    729                                        LOperand* from,
    730                                        LOperand* to) {
    731   LGap* gap = GapAt(index);
    732   LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
    733                                                      chunk()->zone());
    734   if (from->IsUnallocated()) {
    735     const ZoneList<LMoveOperands>* move_operands = move->move_operands();
    736     for (int i = 0; i < move_operands->length(); ++i) {
    737       LMoveOperands cur = move_operands->at(i);
    738       LOperand* cur_to = cur.destination();
    739       if (cur_to->IsUnallocated()) {
    740         if (LUnallocated::cast(cur_to)->virtual_register() ==
    741             LUnallocated::cast(from)->virtual_register()) {
    742           move->AddMove(cur.source(), to, chunk()->zone());
    743           return;
    744         }
    745       }
    746     }
    747   }
    748   move->AddMove(from, to, chunk()->zone());
    749 }
    750 
    751 
    752 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
    753   int start = block->first_instruction_index();
    754   int end = block->last_instruction_index();
    755   if (start == -1) return;
    756   for (int i = start; i <= end; ++i) {
    757     if (IsGapAt(i)) {
    758       LInstruction* instr = NULL;
    759       LInstruction* prev_instr = NULL;
    760       if (i < end) instr = InstructionAt(i + 1);
    761       if (i > start) prev_instr = InstructionAt(i - 1);
    762       MeetConstraintsBetween(prev_instr, instr, i);
    763       if (!AllocationOk()) return;
    764     }
    765   }
    766 }
    767 
    768 
    769 void LAllocator::MeetConstraintsBetween(LInstruction* first,
    770                                         LInstruction* second,
    771                                         int gap_index) {
    772   // Handle fixed temporaries.
    773   if (first != NULL) {
    774     for (TempIterator it(first); !it.Done(); it.Advance()) {
    775       LUnallocated* temp = LUnallocated::cast(it.Current());
    776       if (temp->HasFixedPolicy()) {
    777         AllocateFixed(temp, gap_index - 1, false);
    778       }
    779     }
    780   }
    781 
    782   // Handle fixed output operand.
    783   if (first != NULL && first->Output() != NULL) {
    784     LUnallocated* first_output = LUnallocated::cast(first->Output());
    785     LiveRange* range = LiveRangeFor(first_output->virtual_register());
    786     bool assigned = false;
    787     if (first_output->HasFixedPolicy()) {
    788       LUnallocated* output_copy = first_output->CopyUnconstrained(
    789           chunk()->zone());
    790       bool is_tagged = HasTaggedValue(first_output->virtual_register());
    791       AllocateFixed(first_output, gap_index, is_tagged);
    792 
    793       // This value is produced on the stack, we never need to spill it.
    794       if (first_output->IsStackSlot()) {
    795         range->SetSpillOperand(first_output);
    796         range->SetSpillStartIndex(gap_index - 1);
    797         assigned = true;
    798       }
    799       chunk_->AddGapMove(gap_index, first_output, output_copy);
    800     }
    801 
    802     if (!assigned) {
    803       range->SetSpillStartIndex(gap_index);
    804 
    805       // This move to spill operand is not a real use. Liveness analysis
    806       // and splitting of live ranges do not account for it.
    807       // Thus it should be inserted to a lifetime position corresponding to
    808       // the instruction end.
    809       LGap* gap = GapAt(gap_index);
    810       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
    811                                                          chunk()->zone());
    812       move->AddMove(first_output, range->GetSpillOperand(),
    813                     chunk()->zone());
    814     }
    815   }
    816 
    817   // Handle fixed input operands of second instruction.
    818   if (second != NULL) {
    819     for (UseIterator it(second); !it.Done(); it.Advance()) {
    820       LUnallocated* cur_input = LUnallocated::cast(it.Current());
    821       if (cur_input->HasFixedPolicy()) {
    822         LUnallocated* input_copy = cur_input->CopyUnconstrained(
    823             chunk()->zone());
    824         bool is_tagged = HasTaggedValue(cur_input->virtual_register());
    825         AllocateFixed(cur_input, gap_index + 1, is_tagged);
    826         AddConstraintsGapMove(gap_index, input_copy, cur_input);
    827       } else if (cur_input->HasWritableRegisterPolicy()) {
    828         // The live range of writable input registers always goes until the end
    829         // of the instruction.
    830         DCHECK(!cur_input->IsUsedAtStart());
    831 
    832         LUnallocated* input_copy = cur_input->CopyUnconstrained(
    833             chunk()->zone());
    834         int vreg = GetVirtualRegister();
    835         if (!AllocationOk()) return;
    836         cur_input->set_virtual_register(vreg);
    837 
    838         if (RequiredRegisterKind(input_copy->virtual_register()) ==
    839             DOUBLE_REGISTERS) {
    840           double_artificial_registers_.Add(
    841               cur_input->virtual_register() - first_artificial_register_,
    842               zone());
    843         }
    844 
    845         AddConstraintsGapMove(gap_index, input_copy, cur_input);
    846       }
    847     }
    848   }
    849 
    850   // Handle "output same as input" for second instruction.
    851   if (second != NULL && second->Output() != NULL) {
    852     LUnallocated* second_output = LUnallocated::cast(second->Output());
    853     if (second_output->HasSameAsInputPolicy()) {
    854       LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
    855       int output_vreg = second_output->virtual_register();
    856       int input_vreg = cur_input->virtual_register();
    857 
    858       LUnallocated* input_copy = cur_input->CopyUnconstrained(
    859           chunk()->zone());
    860       cur_input->set_virtual_register(second_output->virtual_register());
    861       AddConstraintsGapMove(gap_index, input_copy, cur_input);
    862 
    863       if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
    864         int index = gap_index + 1;
    865         LInstruction* instr = InstructionAt(index);
    866         if (instr->HasPointerMap()) {
    867           instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
    868         }
    869       } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
    870         // The input is assumed to immediately have a tagged representation,
    871         // before the pointer map can be used. I.e. the pointer map at the
    872         // instruction will include the output operand (whose value at the
    873         // beginning of the instruction is equal to the input operand). If
    874         // this is not desired, then the pointer map at this instruction needs
    875         // to be adjusted manually.
    876       }
    877     }
    878   }
    879 }
    880 
    881 
    882 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
    883   int block_start = block->first_instruction_index();
    884   int index = block->last_instruction_index();
    885 
    886   LifetimePosition block_start_position =
    887       LifetimePosition::FromInstructionIndex(block_start);
    888 
    889   while (index >= block_start) {
    890     LifetimePosition curr_position =
    891         LifetimePosition::FromInstructionIndex(index);
    892 
    893     if (IsGapAt(index)) {
    894       // We have a gap at this position.
    895       LGap* gap = GapAt(index);
    896       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
    897                                                          chunk()->zone());
    898       const ZoneList<LMoveOperands>* move_operands = move->move_operands();
    899       for (int i = 0; i < move_operands->length(); ++i) {
    900         LMoveOperands* cur = &move_operands->at(i);
    901         if (cur->IsIgnored()) continue;
    902         LOperand* from = cur->source();
    903         LOperand* to = cur->destination();
    904         HPhi* phi = LookupPhi(to);
    905         LOperand* hint = to;
    906         if (phi != NULL) {
    907           // This is a phi resolving move.
    908           if (!phi->block()->IsLoopHeader()) {
    909             hint = LiveRangeFor(phi->id())->current_hint_operand();
    910           }
    911         } else {
    912           if (to->IsUnallocated()) {
    913             if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
    914               Define(curr_position, to, from);
    915               live->Remove(LUnallocated::cast(to)->virtual_register());
    916             } else {
    917               cur->Eliminate();
    918               continue;
    919             }
    920           } else {
    921             Define(curr_position, to, from);
    922           }
    923         }
    924         Use(block_start_position, curr_position, from, hint);
    925         if (from->IsUnallocated()) {
    926           live->Add(LUnallocated::cast(from)->virtual_register());
    927         }
    928       }
    929     } else {
    930       DCHECK(!IsGapAt(index));
    931       LInstruction* instr = InstructionAt(index);
    932 
    933       if (instr != NULL) {
    934         LOperand* output = instr->Output();
    935         if (output != NULL) {
    936           if (output->IsUnallocated()) {
    937             live->Remove(LUnallocated::cast(output)->virtual_register());
    938           }
    939           Define(curr_position, output, NULL);
    940         }
    941 
    942         if (instr->ClobbersRegisters()) {
    943           for (int i = 0; i < Register::kNumRegisters; ++i) {
    944             if (Register::from_code(i).IsAllocatable()) {
    945               if (output == NULL || !output->IsRegister() ||
    946                   output->index() != i) {
    947                 LiveRange* range = FixedLiveRangeFor(i);
    948                 range->AddUseInterval(curr_position,
    949                                       curr_position.InstructionEnd(), zone());
    950               }
    951             }
    952           }
    953         }
    954 
    955         if (instr->ClobbersDoubleRegisters(isolate())) {
    956           for (int i = 0; i < DoubleRegister::kMaxNumRegisters; ++i) {
    957             if (DoubleRegister::from_code(i).IsAllocatable()) {
    958               if (output == NULL || !output->IsDoubleRegister() ||
    959                   output->index() != i) {
    960                 LiveRange* range = FixedDoubleLiveRangeFor(i);
    961                 range->AddUseInterval(curr_position,
    962                                       curr_position.InstructionEnd(), zone());
    963               }
    964             }
    965           }
    966         }
    967 
    968         for (UseIterator it(instr); !it.Done(); it.Advance()) {
    969           LOperand* input = it.Current();
    970 
    971           LifetimePosition use_pos;
    972           if (input->IsUnallocated() &&
    973               LUnallocated::cast(input)->IsUsedAtStart()) {
    974             use_pos = curr_position;
    975           } else {
    976             use_pos = curr_position.InstructionEnd();
    977           }
    978 
    979           Use(block_start_position, use_pos, input, NULL);
    980           if (input->IsUnallocated()) {
    981             live->Add(LUnallocated::cast(input)->virtual_register());
    982           }
    983         }
    984 
    985         for (TempIterator it(instr); !it.Done(); it.Advance()) {
    986           LOperand* temp = it.Current();
    987           if (instr->ClobbersTemps()) {
    988             if (temp->IsRegister()) continue;
    989             if (temp->IsUnallocated()) {
    990               LUnallocated* temp_unalloc = LUnallocated::cast(temp);
    991               if (temp_unalloc->HasFixedPolicy()) {
    992                 continue;
    993               }
    994             }
    995           }
    996           Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
    997           Define(curr_position, temp, NULL);
    998 
    999           if (temp->IsUnallocated()) {
   1000             LUnallocated* temp_unalloc = LUnallocated::cast(temp);
   1001             if (temp_unalloc->HasDoubleRegisterPolicy()) {
   1002               double_artificial_registers_.Add(
   1003                   temp_unalloc->virtual_register() - first_artificial_register_,
   1004                   zone());
   1005             }
   1006           }
   1007         }
   1008       }
   1009     }
   1010 
   1011     index = index - 1;
   1012   }
   1013 }
   1014 
   1015 
   1016 void LAllocator::ResolvePhis(HBasicBlock* block) {
   1017   const ZoneList<HPhi*>* phis = block->phis();
   1018   for (int i = 0; i < phis->length(); ++i) {
   1019     HPhi* phi = phis->at(i);
   1020     LUnallocated* phi_operand =
   1021         new (chunk()->zone()) LUnallocated(LUnallocated::NONE);
   1022     phi_operand->set_virtual_register(phi->id());
   1023     for (int j = 0; j < phi->OperandCount(); ++j) {
   1024       HValue* op = phi->OperandAt(j);
   1025       LOperand* operand = NULL;
   1026       if (op->IsConstant() && op->EmitAtUses()) {
   1027         HConstant* constant = HConstant::cast(op);
   1028         operand = chunk_->DefineConstantOperand(constant);
   1029       } else {
   1030         DCHECK(!op->EmitAtUses());
   1031         LUnallocated* unalloc =
   1032             new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
   1033         unalloc->set_virtual_register(op->id());
   1034         operand = unalloc;
   1035       }
   1036       HBasicBlock* cur_block = block->predecessors()->at(j);
   1037       // The gap move must be added without any special processing as in
   1038       // the AddConstraintsGapMove.
   1039       chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
   1040                          operand,
   1041                          phi_operand);
   1042 
   1043       // We are going to insert a move before the branch instruction.
   1044       // Some branch instructions (e.g. loops' back edges)
   1045       // can potentially cause a GC so they have a pointer map.
   1046       // By inserting a move we essentially create a copy of a
   1047       // value which is invisible to PopulatePointerMaps(), because we store
   1048       // it into a location different from the operand of a live range
   1049       // covering a branch instruction.
   1050       // Thus we need to manually record a pointer.
   1051       LInstruction* branch =
   1052           InstructionAt(cur_block->last_instruction_index());
   1053       if (branch->HasPointerMap()) {
   1054         if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
   1055           branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
   1056         } else if (!phi->representation().IsDouble()) {
   1057           branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
   1058         }
   1059       }
   1060     }
   1061 
   1062     LiveRange* live_range = LiveRangeFor(phi->id());
   1063     LLabel* label = chunk_->GetLabel(phi->block()->block_id());
   1064     label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
   1065         AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
   1066     live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
   1067   }
   1068 }
   1069 
   1070 
   1071 bool LAllocator::Allocate(LChunk* chunk) {
   1072   DCHECK(chunk_ == NULL);
   1073   chunk_ = static_cast<LPlatformChunk*>(chunk);
   1074   assigned_registers_ =
   1075       new (chunk->zone()) BitVector(Register::kNumRegisters, chunk->zone());
   1076   assigned_double_registers_ = new (chunk->zone())
   1077       BitVector(DoubleRegister::kMaxNumRegisters, chunk->zone());
   1078   MeetRegisterConstraints();
   1079   if (!AllocationOk()) return false;
   1080   ResolvePhis();
   1081   BuildLiveRanges();
   1082   AllocateGeneralRegisters();
   1083   if (!AllocationOk()) return false;
   1084   AllocateDoubleRegisters();
   1085   if (!AllocationOk()) return false;
   1086   PopulatePointerMaps();
   1087   ConnectRanges();
   1088   ResolveControlFlow();
   1089   return true;
   1090 }
   1091 
   1092 
   1093 void LAllocator::MeetRegisterConstraints() {
   1094   LAllocatorPhase phase("L_Register constraints", this);
   1095   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1096   for (int i = 0; i < blocks->length(); ++i) {
   1097     HBasicBlock* block = blocks->at(i);
   1098     MeetRegisterConstraints(block);
   1099     if (!AllocationOk()) return;
   1100   }
   1101 }
   1102 
   1103 
   1104 void LAllocator::ResolvePhis() {
   1105   LAllocatorPhase phase("L_Resolve phis", this);
   1106 
   1107   // Process the blocks in reverse order.
   1108   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1109   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
   1110     HBasicBlock* block = blocks->at(block_id);
   1111     ResolvePhis(block);
   1112   }
   1113 }
   1114 
   1115 
   1116 void LAllocator::ResolveControlFlow(LiveRange* range,
   1117                                     HBasicBlock* block,
   1118                                     HBasicBlock* pred) {
   1119   LifetimePosition pred_end =
   1120       LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
   1121   LifetimePosition cur_start =
   1122       LifetimePosition::FromInstructionIndex(block->first_instruction_index());
   1123   LiveRange* pred_cover = NULL;
   1124   LiveRange* cur_cover = NULL;
   1125   LiveRange* cur_range = range;
   1126   while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
   1127     if (cur_range->CanCover(cur_start)) {
   1128       DCHECK(cur_cover == NULL);
   1129       cur_cover = cur_range;
   1130     }
   1131     if (cur_range->CanCover(pred_end)) {
   1132       DCHECK(pred_cover == NULL);
   1133       pred_cover = cur_range;
   1134     }
   1135     cur_range = cur_range->next();
   1136   }
   1137 
   1138   if (cur_cover->IsSpilled()) return;
   1139   DCHECK(pred_cover != NULL && cur_cover != NULL);
   1140   if (pred_cover != cur_cover) {
   1141     LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
   1142     LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
   1143     if (!pred_op->Equals(cur_op)) {
   1144       LGap* gap = NULL;
   1145       if (block->predecessors()->length() == 1) {
   1146         gap = GapAt(block->first_instruction_index());
   1147       } else {
   1148         DCHECK(pred->end()->SecondSuccessor() == NULL);
   1149         gap = GetLastGap(pred);
   1150 
   1151         // We are going to insert a move before the branch instruction.
   1152         // Some branch instructions (e.g. loops' back edges)
   1153         // can potentially cause a GC so they have a pointer map.
   1154         // By inserting a move we essentially create a copy of a
   1155         // value which is invisible to PopulatePointerMaps(), because we store
   1156         // it into a location different from the operand of a live range
   1157         // covering a branch instruction.
   1158         // Thus we need to manually record a pointer.
   1159         LInstruction* branch = InstructionAt(pred->last_instruction_index());
   1160         if (branch->HasPointerMap()) {
   1161           if (HasTaggedValue(range->id())) {
   1162             branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
   1163           } else if (!cur_op->IsDoubleStackSlot() &&
   1164                      !cur_op->IsDoubleRegister()) {
   1165             branch->pointer_map()->RemovePointer(cur_op);
   1166           }
   1167         }
   1168       }
   1169       gap->GetOrCreateParallelMove(
   1170           LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
   1171                                                  chunk()->zone());
   1172     }
   1173   }
   1174 }
   1175 
   1176 
   1177 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
   1178   int index = pos.InstructionIndex();
   1179   if (IsGapAt(index)) {
   1180     LGap* gap = GapAt(index);
   1181     return gap->GetOrCreateParallelMove(
   1182         pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
   1183   }
   1184   int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
   1185   return GapAt(gap_pos)->GetOrCreateParallelMove(
   1186       (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
   1187 }
   1188 
   1189 
   1190 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
   1191   LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
   1192   return gap->block();
   1193 }
   1194 
   1195 
   1196 void LAllocator::ConnectRanges() {
   1197   LAllocatorPhase phase("L_Connect ranges", this);
   1198   for (int i = 0; i < live_ranges()->length(); ++i) {
   1199     LiveRange* first_range = live_ranges()->at(i);
   1200     if (first_range == NULL || first_range->parent() != NULL) continue;
   1201 
   1202     LiveRange* second_range = first_range->next();
   1203     while (second_range != NULL) {
   1204       LifetimePosition pos = second_range->Start();
   1205 
   1206       if (!second_range->IsSpilled()) {
   1207         // Add gap move if the two live ranges touch and there is no block
   1208         // boundary.
   1209         if (first_range->End().Value() == pos.Value()) {
   1210           bool should_insert = true;
   1211           if (IsBlockBoundary(pos)) {
   1212             should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
   1213           }
   1214           if (should_insert) {
   1215             LParallelMove* move = GetConnectingParallelMove(pos);
   1216             LOperand* prev_operand = first_range->CreateAssignedOperand(
   1217                 chunk()->zone());
   1218             LOperand* cur_operand = second_range->CreateAssignedOperand(
   1219                 chunk()->zone());
   1220             move->AddMove(prev_operand, cur_operand,
   1221                           chunk()->zone());
   1222           }
   1223         }
   1224       }
   1225 
   1226       first_range = second_range;
   1227       second_range = second_range->next();
   1228     }
   1229   }
   1230 }
   1231 
   1232 
   1233 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
   1234   if (block->predecessors()->length() != 1) return false;
   1235   return block->predecessors()->first()->block_id() == block->block_id() - 1;
   1236 }
   1237 
   1238 
   1239 void LAllocator::ResolveControlFlow() {
   1240   LAllocatorPhase phase("L_Resolve control flow", this);
   1241   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1242   for (int block_id = 1; block_id < blocks->length(); ++block_id) {
   1243     HBasicBlock* block = blocks->at(block_id);
   1244     if (CanEagerlyResolveControlFlow(block)) continue;
   1245     BitVector* live = live_in_sets_[block->block_id()];
   1246     BitVector::Iterator iterator(live);
   1247     while (!iterator.Done()) {
   1248       int operand_index = iterator.Current();
   1249       for (int i = 0; i < block->predecessors()->length(); ++i) {
   1250         HBasicBlock* cur = block->predecessors()->at(i);
   1251         LiveRange* cur_range = LiveRangeFor(operand_index);
   1252         ResolveControlFlow(cur_range, block, cur);
   1253       }
   1254       iterator.Advance();
   1255     }
   1256   }
   1257 }
   1258 
   1259 
   1260 void LAllocator::BuildLiveRanges() {
   1261   LAllocatorPhase phase("L_Build live ranges", this);
   1262   InitializeLivenessAnalysis();
   1263   // Process the blocks in reverse order.
   1264   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1265   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
   1266     HBasicBlock* block = blocks->at(block_id);
   1267     BitVector* live = ComputeLiveOut(block);
   1268     // Initially consider all live_out values live for the entire block. We
   1269     // will shorten these intervals if necessary.
   1270     AddInitialIntervals(block, live);
   1271 
   1272     // Process the instructions in reverse order, generating and killing
   1273     // live values.
   1274     ProcessInstructions(block, live);
   1275     // All phi output operands are killed by this block.
   1276     const ZoneList<HPhi*>* phis = block->phis();
   1277     for (int i = 0; i < phis->length(); ++i) {
   1278       // The live range interval already ends at the first instruction of the
   1279       // block.
   1280       HPhi* phi = phis->at(i);
   1281       live->Remove(phi->id());
   1282 
   1283       LOperand* hint = NULL;
   1284       LOperand* phi_operand = NULL;
   1285       LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
   1286       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
   1287                                                          chunk()->zone());
   1288       for (int j = 0; j < move->move_operands()->length(); ++j) {
   1289         LOperand* to = move->move_operands()->at(j).destination();
   1290         if (to->IsUnallocated() &&
   1291             LUnallocated::cast(to)->virtual_register() == phi->id()) {
   1292           hint = move->move_operands()->at(j).source();
   1293           phi_operand = to;
   1294           break;
   1295         }
   1296       }
   1297       DCHECK(hint != NULL);
   1298 
   1299       LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
   1300               block->first_instruction_index());
   1301       Define(block_start, phi_operand, hint);
   1302     }
   1303 
   1304     // Now live is live_in for this block except not including values live
   1305     // out on backward successor edges.
   1306     live_in_sets_[block_id] = live;
   1307 
   1308     // If this block is a loop header go back and patch up the necessary
   1309     // predecessor blocks.
   1310     if (block->IsLoopHeader()) {
   1311       // TODO(kmillikin): Need to be able to get the last block of the loop
   1312       // in the loop information. Add a live range stretching from the first
   1313       // loop instruction to the last for each value live on entry to the
   1314       // header.
   1315       HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
   1316       BitVector::Iterator iterator(live);
   1317       LifetimePosition start = LifetimePosition::FromInstructionIndex(
   1318           block->first_instruction_index());
   1319       LifetimePosition end = LifetimePosition::FromInstructionIndex(
   1320           back_edge->last_instruction_index()).NextInstruction();
   1321       while (!iterator.Done()) {
   1322         int operand_index = iterator.Current();
   1323         LiveRange* range = LiveRangeFor(operand_index);
   1324         range->EnsureInterval(start, end, zone());
   1325         iterator.Advance();
   1326       }
   1327 
   1328       for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
   1329         live_in_sets_[i]->Union(*live);
   1330       }
   1331     }
   1332 
   1333 #ifdef DEBUG
   1334     if (block_id == 0) {
   1335       BitVector::Iterator iterator(live);
   1336       bool found = false;
   1337       while (!iterator.Done()) {
   1338         found = true;
   1339         int operand_index = iterator.Current();
   1340         {
   1341           AllowHandleDereference allow_deref;
   1342           PrintF("Function: %s\n", chunk_->info()->GetDebugName().get());
   1343         }
   1344         PrintF("Value %d used before first definition!\n", operand_index);
   1345         LiveRange* range = LiveRangeFor(operand_index);
   1346         PrintF("First use is at %d\n", range->first_pos()->pos().Value());
   1347         iterator.Advance();
   1348       }
   1349       DCHECK(!found);
   1350     }
   1351 #endif
   1352   }
   1353 
   1354   for (int i = 0; i < live_ranges_.length(); ++i) {
   1355     if (live_ranges_[i] != NULL) {
   1356       live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
   1357     }
   1358   }
   1359 }
   1360 
   1361 
   1362 bool LAllocator::SafePointsAreInOrder() const {
   1363   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
   1364   int safe_point = 0;
   1365   for (int i = 0; i < pointer_maps->length(); ++i) {
   1366     LPointerMap* map = pointer_maps->at(i);
   1367     if (safe_point > map->lithium_position()) return false;
   1368     safe_point = map->lithium_position();
   1369   }
   1370   return true;
   1371 }
   1372 
   1373 
   1374 void LAllocator::PopulatePointerMaps() {
   1375   LAllocatorPhase phase("L_Populate pointer maps", this);
   1376   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
   1377 
   1378   DCHECK(SafePointsAreInOrder());
   1379 
   1380   // Iterate over all safe point positions and record a pointer
   1381   // for all spilled live ranges at this point.
   1382   int first_safe_point_index = 0;
   1383   int last_range_start = 0;
   1384   for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
   1385     LiveRange* range = live_ranges()->at(range_idx);
   1386     if (range == NULL) continue;
   1387     // Iterate over the first parts of multi-part live ranges.
   1388     if (range->parent() != NULL) continue;
   1389     // Skip non-pointer values.
   1390     if (!HasTaggedValue(range->id())) continue;
   1391     // Skip empty live ranges.
   1392     if (range->IsEmpty()) continue;
   1393 
   1394     // Find the extent of the range and its children.
   1395     int start = range->Start().InstructionIndex();
   1396     int end = 0;
   1397     for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
   1398       LifetimePosition this_end = cur->End();
   1399       if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
   1400       DCHECK(cur->Start().InstructionIndex() >= start);
   1401     }
   1402 
   1403     // Most of the ranges are in order, but not all.  Keep an eye on when
   1404     // they step backwards and reset the first_safe_point_index so we don't
   1405     // miss any safe points.
   1406     if (start < last_range_start) {
   1407       first_safe_point_index = 0;
   1408     }
   1409     last_range_start = start;
   1410 
   1411     // Step across all the safe points that are before the start of this range,
   1412     // recording how far we step in order to save doing this for the next range.
   1413     while (first_safe_point_index < pointer_maps->length()) {
   1414       LPointerMap* map = pointer_maps->at(first_safe_point_index);
   1415       int safe_point = map->lithium_position();
   1416       if (safe_point >= start) break;
   1417       first_safe_point_index++;
   1418     }
   1419 
   1420     // Step through the safe points to see whether they are in the range.
   1421     for (int safe_point_index = first_safe_point_index;
   1422          safe_point_index < pointer_maps->length();
   1423          ++safe_point_index) {
   1424       LPointerMap* map = pointer_maps->at(safe_point_index);
   1425       int safe_point = map->lithium_position();
   1426 
   1427       // The safe points are sorted so we can stop searching here.
   1428       if (safe_point - 1 > end) break;
   1429 
   1430       // Advance to the next active range that covers the current
   1431       // safe point position.
   1432       LifetimePosition safe_point_pos =
   1433           LifetimePosition::FromInstructionIndex(safe_point);
   1434       LiveRange* cur = range;
   1435       while (cur != NULL && !cur->Covers(safe_point_pos)) {
   1436         cur = cur->next();
   1437       }
   1438       if (cur == NULL) continue;
   1439 
   1440       // Check if the live range is spilled and the safe point is after
   1441       // the spill position.
   1442       if (range->HasAllocatedSpillOperand() &&
   1443           safe_point >= range->spill_start_index()) {
   1444         TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
   1445                    range->id(), range->spill_start_index(), safe_point);
   1446         map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
   1447       }
   1448 
   1449       if (!cur->IsSpilled()) {
   1450         TraceAlloc("Pointer in register for range %d (start at %d) "
   1451                    "at safe point %d\n",
   1452                    cur->id(), cur->Start().Value(), safe_point);
   1453         LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
   1454         DCHECK(!operand->IsStackSlot());
   1455         map->RecordPointer(operand, chunk()->zone());
   1456       }
   1457     }
   1458   }
   1459 }
   1460 
   1461 
   1462 void LAllocator::AllocateGeneralRegisters() {
   1463   LAllocatorPhase phase("L_Allocate general registers", this);
   1464   num_registers_ =
   1465       RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
   1466           ->num_allocatable_general_registers();
   1467   allocatable_register_codes_ =
   1468       RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
   1469           ->allocatable_general_codes();
   1470   mode_ = GENERAL_REGISTERS;
   1471   AllocateRegisters();
   1472 }
   1473 
   1474 
   1475 void LAllocator::AllocateDoubleRegisters() {
   1476   LAllocatorPhase phase("L_Allocate double registers", this);
   1477   num_registers_ =
   1478       RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
   1479           ->num_allocatable_double_registers();
   1480   allocatable_register_codes_ =
   1481       RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
   1482           ->allocatable_double_codes();
   1483   mode_ = DOUBLE_REGISTERS;
   1484   AllocateRegisters();
   1485 }
   1486 
   1487 
   1488 void LAllocator::AllocateRegisters() {
   1489   DCHECK(unhandled_live_ranges_.is_empty());
   1490 
   1491   for (int i = 0; i < live_ranges_.length(); ++i) {
   1492     if (live_ranges_[i] != NULL) {
   1493       if (live_ranges_[i]->Kind() == mode_) {
   1494         AddToUnhandledUnsorted(live_ranges_[i]);
   1495       }
   1496     }
   1497   }
   1498   SortUnhandled();
   1499   DCHECK(UnhandledIsSorted());
   1500 
   1501   DCHECK(reusable_slots_.is_empty());
   1502   DCHECK(active_live_ranges_.is_empty());
   1503   DCHECK(inactive_live_ranges_.is_empty());
   1504 
   1505   if (mode_ == DOUBLE_REGISTERS) {
   1506     for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
   1507       LiveRange* current = fixed_double_live_ranges_.at(i);
   1508       if (current != NULL) {
   1509         AddToInactive(current);
   1510       }
   1511     }
   1512   } else {
   1513     DCHECK(mode_ == GENERAL_REGISTERS);
   1514     for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
   1515       LiveRange* current = fixed_live_ranges_.at(i);
   1516       if (current != NULL) {
   1517         AddToInactive(current);
   1518       }
   1519     }
   1520   }
   1521 
   1522   while (!unhandled_live_ranges_.is_empty()) {
   1523     DCHECK(UnhandledIsSorted());
   1524     LiveRange* current = unhandled_live_ranges_.RemoveLast();
   1525     DCHECK(UnhandledIsSorted());
   1526     LifetimePosition position = current->Start();
   1527 #ifdef DEBUG
   1528     allocation_finger_ = position;
   1529 #endif
   1530     TraceAlloc("Processing interval %d start=%d\n",
   1531                current->id(),
   1532                position.Value());
   1533 
   1534     if (current->HasAllocatedSpillOperand()) {
   1535       TraceAlloc("Live range %d already has a spill operand\n", current->id());
   1536       LifetimePosition next_pos = position;
   1537       if (IsGapAt(next_pos.InstructionIndex())) {
   1538         next_pos = next_pos.NextInstruction();
   1539       }
   1540       UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
   1541       // If the range already has a spill operand and it doesn't need a
   1542       // register immediately, split it and spill the first part of the range.
   1543       if (pos == NULL) {
   1544         Spill(current);
   1545         continue;
   1546       } else if (pos->pos().Value() >
   1547                  current->Start().NextInstruction().Value()) {
   1548         // Do not spill live range eagerly if use position that can benefit from
   1549         // the register is too close to the start of live range.
   1550         SpillBetween(current, current->Start(), pos->pos());
   1551         if (!AllocationOk()) return;
   1552         DCHECK(UnhandledIsSorted());
   1553         continue;
   1554       }
   1555     }
   1556 
   1557     for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1558       LiveRange* cur_active = active_live_ranges_.at(i);
   1559       if (cur_active->End().Value() <= position.Value()) {
   1560         ActiveToHandled(cur_active);
   1561         --i;  // The live range was removed from the list of active live ranges.
   1562       } else if (!cur_active->Covers(position)) {
   1563         ActiveToInactive(cur_active);
   1564         --i;  // The live range was removed from the list of active live ranges.
   1565       }
   1566     }
   1567 
   1568     for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1569       LiveRange* cur_inactive = inactive_live_ranges_.at(i);
   1570       if (cur_inactive->End().Value() <= position.Value()) {
   1571         InactiveToHandled(cur_inactive);
   1572         --i;  // Live range was removed from the list of inactive live ranges.
   1573       } else if (cur_inactive->Covers(position)) {
   1574         InactiveToActive(cur_inactive);
   1575         --i;  // Live range was removed from the list of inactive live ranges.
   1576       }
   1577     }
   1578 
   1579     DCHECK(!current->HasRegisterAssigned() && !current->IsSpilled());
   1580 
   1581     bool result = TryAllocateFreeReg(current);
   1582     if (!AllocationOk()) return;
   1583 
   1584     if (!result) AllocateBlockedReg(current);
   1585     if (!AllocationOk()) return;
   1586 
   1587     if (current->HasRegisterAssigned()) {
   1588       AddToActive(current);
   1589     }
   1590   }
   1591 
   1592   reusable_slots_.Rewind(0);
   1593   active_live_ranges_.Rewind(0);
   1594   inactive_live_ranges_.Rewind(0);
   1595 }
   1596 
   1597 
   1598 const char* LAllocator::RegisterName(int allocation_index) {
   1599   if (mode_ == GENERAL_REGISTERS) {
   1600     return Register::from_code(allocation_index).ToString();
   1601   } else {
   1602     return DoubleRegister::from_code(allocation_index).ToString();
   1603   }
   1604 }
   1605 
   1606 
   1607 void LAllocator::TraceAlloc(const char* msg, ...) {
   1608   if (FLAG_trace_alloc) {
   1609     va_list arguments;
   1610     va_start(arguments, msg);
   1611     base::OS::VPrint(msg, arguments);
   1612     va_end(arguments);
   1613   }
   1614 }
   1615 
   1616 
   1617 bool LAllocator::HasTaggedValue(int virtual_register) const {
   1618   HValue* value = graph_->LookupValue(virtual_register);
   1619   if (value == NULL) return false;
   1620   return value->representation().IsTagged() && !value->type().IsSmi();
   1621 }
   1622 
   1623 
   1624 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
   1625   if (virtual_register < first_artificial_register_) {
   1626     HValue* value = graph_->LookupValue(virtual_register);
   1627     if (value != NULL && value->representation().IsDouble()) {
   1628       return DOUBLE_REGISTERS;
   1629     }
   1630   } else if (double_artificial_registers_.Contains(
   1631       virtual_register - first_artificial_register_)) {
   1632     return DOUBLE_REGISTERS;
   1633   }
   1634 
   1635   return GENERAL_REGISTERS;
   1636 }
   1637 
   1638 
   1639 void LAllocator::AddToActive(LiveRange* range) {
   1640   TraceAlloc("Add live range %d to active\n", range->id());
   1641   active_live_ranges_.Add(range, zone());
   1642 }
   1643 
   1644 
   1645 void LAllocator::AddToInactive(LiveRange* range) {
   1646   TraceAlloc("Add live range %d to inactive\n", range->id());
   1647   inactive_live_ranges_.Add(range, zone());
   1648 }
   1649 
   1650 
   1651 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
   1652   if (range == NULL || range->IsEmpty()) return;
   1653   DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
   1654   DCHECK(allocation_finger_.Value() <= range->Start().Value());
   1655   for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
   1656     LiveRange* cur_range = unhandled_live_ranges_.at(i);
   1657     if (range->ShouldBeAllocatedBefore(cur_range)) {
   1658       TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
   1659       unhandled_live_ranges_.InsertAt(i + 1, range, zone());
   1660       DCHECK(UnhandledIsSorted());
   1661       return;
   1662     }
   1663   }
   1664   TraceAlloc("Add live range %d to unhandled at start\n", range->id());
   1665   unhandled_live_ranges_.InsertAt(0, range, zone());
   1666   DCHECK(UnhandledIsSorted());
   1667 }
   1668 
   1669 
   1670 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
   1671   if (range == NULL || range->IsEmpty()) return;
   1672   DCHECK(!range->HasRegisterAssigned() && !range->IsSpilled());
   1673   TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
   1674   unhandled_live_ranges_.Add(range, zone());
   1675 }
   1676 
   1677 
   1678 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
   1679   DCHECK(!(*a)->ShouldBeAllocatedBefore(*b) ||
   1680          !(*b)->ShouldBeAllocatedBefore(*a));
   1681   if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
   1682   if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
   1683   return (*a)->id() - (*b)->id();
   1684 }
   1685 
   1686 
   1687 // Sort the unhandled live ranges so that the ranges to be processed first are
   1688 // at the end of the array list.  This is convenient for the register allocation
   1689 // algorithm because it is efficient to remove elements from the end.
   1690 void LAllocator::SortUnhandled() {
   1691   TraceAlloc("Sort unhandled\n");
   1692   unhandled_live_ranges_.Sort(&UnhandledSortHelper);
   1693 }
   1694 
   1695 
   1696 bool LAllocator::UnhandledIsSorted() {
   1697   int len = unhandled_live_ranges_.length();
   1698   for (int i = 1; i < len; i++) {
   1699     LiveRange* a = unhandled_live_ranges_.at(i - 1);
   1700     LiveRange* b = unhandled_live_ranges_.at(i);
   1701     if (a->Start().Value() < b->Start().Value()) return false;
   1702   }
   1703   return true;
   1704 }
   1705 
   1706 
   1707 void LAllocator::FreeSpillSlot(LiveRange* range) {
   1708   // Check that we are the last range.
   1709   if (range->next() != NULL) return;
   1710 
   1711   if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
   1712 
   1713   int index = range->TopLevel()->GetSpillOperand()->index();
   1714   if (index >= 0) {
   1715     reusable_slots_.Add(range, zone());
   1716   }
   1717 }
   1718 
   1719 
   1720 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
   1721   if (reusable_slots_.is_empty()) return NULL;
   1722   if (reusable_slots_.first()->End().Value() >
   1723       range->TopLevel()->Start().Value()) {
   1724     return NULL;
   1725   }
   1726   LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
   1727   reusable_slots_.Remove(0);
   1728   return result;
   1729 }
   1730 
   1731 
   1732 void LAllocator::ActiveToHandled(LiveRange* range) {
   1733   DCHECK(active_live_ranges_.Contains(range));
   1734   active_live_ranges_.RemoveElement(range);
   1735   TraceAlloc("Moving live range %d from active to handled\n", range->id());
   1736   FreeSpillSlot(range);
   1737 }
   1738 
   1739 
   1740 void LAllocator::ActiveToInactive(LiveRange* range) {
   1741   DCHECK(active_live_ranges_.Contains(range));
   1742   active_live_ranges_.RemoveElement(range);
   1743   inactive_live_ranges_.Add(range, zone());
   1744   TraceAlloc("Moving live range %d from active to inactive\n", range->id());
   1745 }
   1746 
   1747 
   1748 void LAllocator::InactiveToHandled(LiveRange* range) {
   1749   DCHECK(inactive_live_ranges_.Contains(range));
   1750   inactive_live_ranges_.RemoveElement(range);
   1751   TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
   1752   FreeSpillSlot(range);
   1753 }
   1754 
   1755 
   1756 void LAllocator::InactiveToActive(LiveRange* range) {
   1757   DCHECK(inactive_live_ranges_.Contains(range));
   1758   inactive_live_ranges_.RemoveElement(range);
   1759   active_live_ranges_.Add(range, zone());
   1760   TraceAlloc("Moving live range %d from inactive to active\n", range->id());
   1761 }
   1762 
   1763 
   1764 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
   1765   DCHECK(DoubleRegister::kMaxNumRegisters >= Register::kNumRegisters);
   1766 
   1767   LifetimePosition free_until_pos[DoubleRegister::kMaxNumRegisters];
   1768 
   1769   for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
   1770     free_until_pos[i] = LifetimePosition::MaxPosition();
   1771   }
   1772 
   1773   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1774     LiveRange* cur_active = active_live_ranges_.at(i);
   1775     free_until_pos[cur_active->assigned_register()] =
   1776         LifetimePosition::FromInstructionIndex(0);
   1777   }
   1778 
   1779   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1780     LiveRange* cur_inactive = inactive_live_ranges_.at(i);
   1781     DCHECK(cur_inactive->End().Value() > current->Start().Value());
   1782     LifetimePosition next_intersection =
   1783         cur_inactive->FirstIntersection(current);
   1784     if (!next_intersection.IsValid()) continue;
   1785     int cur_reg = cur_inactive->assigned_register();
   1786     free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
   1787   }
   1788 
   1789   LOperand* hint = current->FirstHint();
   1790   if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
   1791     int register_index = hint->index();
   1792     TraceAlloc(
   1793         "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
   1794         RegisterName(register_index),
   1795         free_until_pos[register_index].Value(),
   1796         current->id(),
   1797         current->End().Value());
   1798 
   1799     // The desired register is free until the end of the current live range.
   1800     if (free_until_pos[register_index].Value() >= current->End().Value()) {
   1801       TraceAlloc("Assigning preferred reg %s to live range %d\n",
   1802                  RegisterName(register_index),
   1803                  current->id());
   1804       SetLiveRangeAssignedRegister(current, register_index);
   1805       return true;
   1806     }
   1807   }
   1808 
   1809   // Find the register which stays free for the longest time.
   1810   int reg = allocatable_register_codes_[0];
   1811   for (int i = 1; i < RegisterCount(); ++i) {
   1812     int code = allocatable_register_codes_[i];
   1813     if (free_until_pos[code].Value() > free_until_pos[reg].Value()) {
   1814       reg = code;
   1815     }
   1816   }
   1817 
   1818   LifetimePosition pos = free_until_pos[reg];
   1819 
   1820   if (pos.Value() <= current->Start().Value()) {
   1821     // All registers are blocked.
   1822     return false;
   1823   }
   1824 
   1825   if (pos.Value() < current->End().Value()) {
   1826     // Register reg is available at the range start but becomes blocked before
   1827     // the range end. Split current at position where it becomes blocked.
   1828     LiveRange* tail = SplitRangeAt(current, pos);
   1829     if (!AllocationOk()) return false;
   1830     AddToUnhandledSorted(tail);
   1831   }
   1832 
   1833 
   1834   // Register reg is available at the range start and is free until
   1835   // the range end.
   1836   DCHECK(pos.Value() >= current->End().Value());
   1837   TraceAlloc("Assigning free reg %s to live range %d\n",
   1838              RegisterName(reg),
   1839              current->id());
   1840   SetLiveRangeAssignedRegister(current, reg);
   1841 
   1842   return true;
   1843 }
   1844 
   1845 
   1846 void LAllocator::AllocateBlockedReg(LiveRange* current) {
   1847   UsePosition* register_use = current->NextRegisterPosition(current->Start());
   1848   if (register_use == NULL) {
   1849     // There is no use in the current live range that requires a register.
   1850     // We can just spill it.
   1851     Spill(current);
   1852     return;
   1853   }
   1854 
   1855 
   1856   LifetimePosition use_pos[DoubleRegister::kMaxNumRegisters];
   1857   LifetimePosition block_pos[DoubleRegister::kMaxNumRegisters];
   1858 
   1859   for (int i = 0; i < DoubleRegister::kMaxNumRegisters; i++) {
   1860     use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
   1861   }
   1862 
   1863   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1864     LiveRange* range = active_live_ranges_[i];
   1865     int cur_reg = range->assigned_register();
   1866     if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
   1867       block_pos[cur_reg] = use_pos[cur_reg] =
   1868           LifetimePosition::FromInstructionIndex(0);
   1869     } else {
   1870       UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
   1871           current->Start());
   1872       if (next_use == NULL) {
   1873         use_pos[cur_reg] = range->End();
   1874       } else {
   1875         use_pos[cur_reg] = next_use->pos();
   1876       }
   1877     }
   1878   }
   1879 
   1880   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1881     LiveRange* range = inactive_live_ranges_.at(i);
   1882     DCHECK(range->End().Value() > current->Start().Value());
   1883     LifetimePosition next_intersection = range->FirstIntersection(current);
   1884     if (!next_intersection.IsValid()) continue;
   1885     int cur_reg = range->assigned_register();
   1886     if (range->IsFixed()) {
   1887       block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
   1888       use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
   1889     } else {
   1890       use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
   1891     }
   1892   }
   1893 
   1894   int reg = allocatable_register_codes_[0];
   1895   for (int i = 1; i < RegisterCount(); ++i) {
   1896     int code = allocatable_register_codes_[i];
   1897     if (use_pos[code].Value() > use_pos[reg].Value()) {
   1898       reg = code;
   1899     }
   1900   }
   1901 
   1902   LifetimePosition pos = use_pos[reg];
   1903 
   1904   if (pos.Value() < register_use->pos().Value()) {
   1905     // All registers are blocked before the first use that requires a register.
   1906     // Spill starting part of live range up to that use.
   1907     SpillBetween(current, current->Start(), register_use->pos());
   1908     return;
   1909   }
   1910 
   1911   if (block_pos[reg].Value() < current->End().Value()) {
   1912     // Register becomes blocked before the current range end. Split before that
   1913     // position.
   1914     LiveRange* tail = SplitBetween(current,
   1915                                    current->Start(),
   1916                                    block_pos[reg].InstructionStart());
   1917     if (!AllocationOk()) return;
   1918     AddToUnhandledSorted(tail);
   1919   }
   1920 
   1921   // Register reg is not blocked for the whole range.
   1922   DCHECK(block_pos[reg].Value() >= current->End().Value());
   1923   TraceAlloc("Assigning blocked reg %s to live range %d\n",
   1924              RegisterName(reg),
   1925              current->id());
   1926   SetLiveRangeAssignedRegister(current, reg);
   1927 
   1928   // This register was not free. Thus we need to find and spill
   1929   // parts of active and inactive live regions that use the same register
   1930   // at the same lifetime positions as current.
   1931   SplitAndSpillIntersecting(current);
   1932 }
   1933 
   1934 
   1935 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
   1936                                                     LifetimePosition pos) {
   1937   HBasicBlock* block = GetBlock(pos.InstructionStart());
   1938   HBasicBlock* loop_header =
   1939       block->IsLoopHeader() ? block : block->parent_loop_header();
   1940 
   1941   if (loop_header == NULL) return pos;
   1942 
   1943   UsePosition* prev_use =
   1944     range->PreviousUsePositionRegisterIsBeneficial(pos);
   1945 
   1946   while (loop_header != NULL) {
   1947     // We are going to spill live range inside the loop.
   1948     // If possible try to move spilling position backwards to loop header.
   1949     // This will reduce number of memory moves on the back edge.
   1950     LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
   1951         loop_header->first_instruction_index());
   1952 
   1953     if (range->Covers(loop_start)) {
   1954       if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
   1955         // No register beneficial use inside the loop before the pos.
   1956         pos = loop_start;
   1957       }
   1958     }
   1959 
   1960     // Try hoisting out to an outer loop.
   1961     loop_header = loop_header->parent_loop_header();
   1962   }
   1963 
   1964   return pos;
   1965 }
   1966 
   1967 
   1968 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
   1969   DCHECK(current->HasRegisterAssigned());
   1970   int reg = current->assigned_register();
   1971   LifetimePosition split_pos = current->Start();
   1972   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1973     LiveRange* range = active_live_ranges_[i];
   1974     if (range->assigned_register() == reg) {
   1975       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
   1976       LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
   1977       if (next_pos == NULL) {
   1978         SpillAfter(range, spill_pos);
   1979       } else {
   1980         // When spilling between spill_pos and next_pos ensure that the range
   1981         // remains spilled at least until the start of the current live range.
   1982         // This guarantees that we will not introduce new unhandled ranges that
   1983         // start before the current range as this violates allocation invariant
   1984         // and will lead to an inconsistent state of active and inactive
   1985         // live-ranges: ranges are allocated in order of their start positions,
   1986         // ranges are retired from active/inactive when the start of the
   1987         // current live-range is larger than their end.
   1988         SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
   1989       }
   1990       if (!AllocationOk()) return;
   1991       ActiveToHandled(range);
   1992       --i;
   1993     }
   1994   }
   1995 
   1996   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1997     LiveRange* range = inactive_live_ranges_[i];
   1998     DCHECK(range->End().Value() > current->Start().Value());
   1999     if (range->assigned_register() == reg && !range->IsFixed()) {
   2000       LifetimePosition next_intersection = range->FirstIntersection(current);
   2001       if (next_intersection.IsValid()) {
   2002         UsePosition* next_pos = range->NextRegisterPosition(current->Start());
   2003         if (next_pos == NULL) {
   2004           SpillAfter(range, split_pos);
   2005         } else {
   2006           next_intersection = Min(next_intersection, next_pos->pos());
   2007           SpillBetween(range, split_pos, next_intersection);
   2008         }
   2009         if (!AllocationOk()) return;
   2010         InactiveToHandled(range);
   2011         --i;
   2012       }
   2013     }
   2014   }
   2015 }
   2016 
   2017 
   2018 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
   2019   return pos.IsInstructionStart() &&
   2020       InstructionAt(pos.InstructionIndex())->IsLabel();
   2021 }
   2022 
   2023 
   2024 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
   2025   DCHECK(!range->IsFixed());
   2026   TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
   2027 
   2028   if (pos.Value() <= range->Start().Value()) return range;
   2029 
   2030   // We can't properly connect liveranges if split occured at the end
   2031   // of control instruction.
   2032   DCHECK(pos.IsInstructionStart() ||
   2033          !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
   2034 
   2035   int vreg = GetVirtualRegister();
   2036   if (!AllocationOk()) return NULL;
   2037   LiveRange* result = LiveRangeFor(vreg);
   2038   range->SplitAt(pos, result, zone());
   2039   return result;
   2040 }
   2041 
   2042 
   2043 LiveRange* LAllocator::SplitBetween(LiveRange* range,
   2044                                     LifetimePosition start,
   2045                                     LifetimePosition end) {
   2046   DCHECK(!range->IsFixed());
   2047   TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
   2048              range->id(),
   2049              start.Value(),
   2050              end.Value());
   2051 
   2052   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
   2053   DCHECK(split_pos.Value() >= start.Value());
   2054   return SplitRangeAt(range, split_pos);
   2055 }
   2056 
   2057 
   2058 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
   2059                                                  LifetimePosition end) {
   2060   int start_instr = start.InstructionIndex();
   2061   int end_instr = end.InstructionIndex();
   2062   DCHECK(start_instr <= end_instr);
   2063 
   2064   // We have no choice
   2065   if (start_instr == end_instr) return end;
   2066 
   2067   HBasicBlock* start_block = GetBlock(start);
   2068   HBasicBlock* end_block = GetBlock(end);
   2069 
   2070   if (end_block == start_block) {
   2071     // The interval is split in the same basic block. Split at the latest
   2072     // possible position.
   2073     return end;
   2074   }
   2075 
   2076   HBasicBlock* block = end_block;
   2077   // Find header of outermost loop.
   2078   while (block->parent_loop_header() != NULL &&
   2079       block->parent_loop_header()->block_id() > start_block->block_id()) {
   2080     block = block->parent_loop_header();
   2081   }
   2082 
   2083   // We did not find any suitable outer loop. Split at the latest possible
   2084   // position unless end_block is a loop header itself.
   2085   if (block == end_block && !end_block->IsLoopHeader()) return end;
   2086 
   2087   return LifetimePosition::FromInstructionIndex(
   2088       block->first_instruction_index());
   2089 }
   2090 
   2091 
   2092 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
   2093   LiveRange* second_part = SplitRangeAt(range, pos);
   2094   if (!AllocationOk()) return;
   2095   Spill(second_part);
   2096 }
   2097 
   2098 
   2099 void LAllocator::SpillBetween(LiveRange* range,
   2100                               LifetimePosition start,
   2101                               LifetimePosition end) {
   2102   SpillBetweenUntil(range, start, start, end);
   2103 }
   2104 
   2105 
   2106 void LAllocator::SpillBetweenUntil(LiveRange* range,
   2107                                    LifetimePosition start,
   2108                                    LifetimePosition until,
   2109                                    LifetimePosition end) {
   2110   CHECK(start.Value() < end.Value());
   2111   LiveRange* second_part = SplitRangeAt(range, start);
   2112   if (!AllocationOk()) return;
   2113 
   2114   if (second_part->Start().Value() < end.Value()) {
   2115     // The split result intersects with [start, end[.
   2116     // Split it at position between ]start+1, end[, spill the middle part
   2117     // and put the rest to unhandled.
   2118     LiveRange* third_part = SplitBetween(
   2119         second_part,
   2120         Max(second_part->Start().InstructionEnd(), until),
   2121         end.PrevInstruction().InstructionEnd());
   2122     if (!AllocationOk()) return;
   2123 
   2124     DCHECK(third_part != second_part);
   2125 
   2126     Spill(second_part);
   2127     AddToUnhandledSorted(third_part);
   2128   } else {
   2129     // The split result does not intersect with [start, end[.
   2130     // Nothing to spill. Just put it to unhandled as whole.
   2131     AddToUnhandledSorted(second_part);
   2132   }
   2133 }
   2134 
   2135 
   2136 void LAllocator::Spill(LiveRange* range) {
   2137   DCHECK(!range->IsSpilled());
   2138   TraceAlloc("Spilling live range %d\n", range->id());
   2139   LiveRange* first = range->TopLevel();
   2140 
   2141   if (!first->HasAllocatedSpillOperand()) {
   2142     LOperand* op = TryReuseSpillSlot(range);
   2143     if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind());
   2144     first->SetSpillOperand(op);
   2145   }
   2146   range->MakeSpilled(chunk()->zone());
   2147 }
   2148 
   2149 
   2150 int LAllocator::RegisterCount() const {
   2151   return num_registers_;
   2152 }
   2153 
   2154 
   2155 #ifdef DEBUG
   2156 
   2157 
   2158 void LAllocator::Verify() const {
   2159   for (int i = 0; i < live_ranges()->length(); ++i) {
   2160     LiveRange* current = live_ranges()->at(i);
   2161     if (current != NULL) current->Verify();
   2162   }
   2163 }
   2164 
   2165 
   2166 #endif
   2167 
   2168 
   2169 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
   2170     : CompilationPhase(name, allocator->graph()->info()),
   2171       allocator_(allocator) {
   2172   if (FLAG_hydrogen_stats) {
   2173     allocator_zone_start_allocation_size_ =
   2174         allocator->zone()->allocation_size();
   2175   }
   2176 }
   2177 
   2178 
   2179 LAllocatorPhase::~LAllocatorPhase() {
   2180   if (FLAG_hydrogen_stats) {
   2181     size_t size = allocator_->zone()->allocation_size() -
   2182                   allocator_zone_start_allocation_size_;
   2183     isolate()->GetHStatistics()->SaveTiming(name(), base::TimeDelta(), size);
   2184   }
   2185 
   2186   if (ShouldProduceTraceOutput()) {
   2187     isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
   2188     isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
   2189   }
   2190 
   2191 #ifdef DEBUG
   2192   if (allocator_ != NULL) allocator_->Verify();
   2193 #endif
   2194 }
   2195 
   2196 
   2197 }  // namespace internal
   2198 }  // namespace v8
   2199