Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 #include "lithium-allocator-inl.h"
     30 
     31 #include "hydrogen.h"
     32 #include "string-stream.h"
     33 
     34 #if V8_TARGET_ARCH_IA32
     35 #include "ia32/lithium-ia32.h"
     36 #elif V8_TARGET_ARCH_X64
     37 #include "x64/lithium-x64.h"
     38 #elif V8_TARGET_ARCH_ARM
     39 #include "arm/lithium-arm.h"
     40 #elif V8_TARGET_ARCH_MIPS
     41 #include "mips/lithium-mips.h"
     42 #else
     43 #error "Unknown architecture."
     44 #endif
     45 
     46 namespace v8 {
     47 namespace internal {
     48 
     49 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
     50   return a.Value() < b.Value() ? a : b;
     51 }
     52 
     53 
     54 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
     55   return a.Value() > b.Value() ? a : b;
     56 }
     57 
     58 
     59 UsePosition::UsePosition(LifetimePosition pos,
     60                          LOperand* operand,
     61                          LOperand* hint)
     62     : operand_(operand),
     63       hint_(hint),
     64       pos_(pos),
     65       next_(NULL),
     66       requires_reg_(false),
     67       register_beneficial_(true) {
     68   if (operand_ != NULL && operand_->IsUnallocated()) {
     69     LUnallocated* unalloc = LUnallocated::cast(operand_);
     70     requires_reg_ = unalloc->HasRegisterPolicy();
     71     register_beneficial_ = !unalloc->HasAnyPolicy();
     72   }
     73   ASSERT(pos_.IsValid());
     74 }
     75 
     76 
     77 bool UsePosition::HasHint() const {
     78   return hint_ != NULL && !hint_->IsUnallocated();
     79 }
     80 
     81 
     82 bool UsePosition::RequiresRegister() const {
     83   return requires_reg_;
     84 }
     85 
     86 
     87 bool UsePosition::RegisterIsBeneficial() const {
     88   return register_beneficial_;
     89 }
     90 
     91 
     92 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
     93   ASSERT(Contains(pos) && pos.Value() != start().Value());
     94   UseInterval* after = new(zone) UseInterval(pos, end_);
     95   after->next_ = next_;
     96   next_ = after;
     97   end_ = pos;
     98 }
     99 
    100 
    101 #ifdef DEBUG
    102 
    103 
    104 void LiveRange::Verify() const {
    105   UsePosition* cur = first_pos_;
    106   while (cur != NULL) {
    107     ASSERT(Start().Value() <= cur->pos().Value() &&
    108            cur->pos().Value() <= End().Value());
    109     cur = cur->next();
    110   }
    111 }
    112 
    113 
    114 bool LiveRange::HasOverlap(UseInterval* target) const {
    115   UseInterval* current_interval = first_interval_;
    116   while (current_interval != NULL) {
    117     // Intervals overlap if the start of one is contained in the other.
    118     if (current_interval->Contains(target->start()) ||
    119         target->Contains(current_interval->start())) {
    120       return true;
    121     }
    122     current_interval = current_interval->next();
    123   }
    124   return false;
    125 }
    126 
    127 
    128 #endif
    129 
    130 
    131 LiveRange::LiveRange(int id, Zone* zone)
    132     : id_(id),
    133       spilled_(false),
    134       kind_(UNALLOCATED_REGISTERS),
    135       assigned_register_(kInvalidAssignment),
    136       last_interval_(NULL),
    137       first_interval_(NULL),
    138       first_pos_(NULL),
    139       parent_(NULL),
    140       next_(NULL),
    141       current_interval_(NULL),
    142       last_processed_use_(NULL),
    143       current_hint_operand_(NULL),
    144       spill_operand_(new(zone) LOperand()),
    145       spill_start_index_(kMaxInt) { }
    146 
    147 
    148 void LiveRange::set_assigned_register(int reg, Zone* zone) {
    149   ASSERT(!HasRegisterAssigned() && !IsSpilled());
    150   assigned_register_ = reg;
    151   ConvertOperands(zone);
    152 }
    153 
    154 
    155 void LiveRange::MakeSpilled(Zone* zone) {
    156   ASSERT(!IsSpilled());
    157   ASSERT(TopLevel()->HasAllocatedSpillOperand());
    158   spilled_ = true;
    159   assigned_register_ = kInvalidAssignment;
    160   ConvertOperands(zone);
    161 }
    162 
    163 
    164 bool LiveRange::HasAllocatedSpillOperand() const {
    165   ASSERT(spill_operand_ != NULL);
    166   return !spill_operand_->IsIgnored();
    167 }
    168 
    169 
    170 void LiveRange::SetSpillOperand(LOperand* operand) {
    171   ASSERT(!operand->IsUnallocated());
    172   ASSERT(spill_operand_ != NULL);
    173   ASSERT(spill_operand_->IsIgnored());
    174   spill_operand_->ConvertTo(operand->kind(), operand->index());
    175 }
    176 
    177 
    178 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
    179   UsePosition* use_pos = last_processed_use_;
    180   if (use_pos == NULL) use_pos = first_pos();
    181   while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
    182     use_pos = use_pos->next();
    183   }
    184   last_processed_use_ = use_pos;
    185   return use_pos;
    186 }
    187 
    188 
    189 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
    190     LifetimePosition start) {
    191   UsePosition* pos = NextUsePosition(start);
    192   while (pos != NULL && !pos->RegisterIsBeneficial()) {
    193     pos = pos->next();
    194   }
    195   return pos;
    196 }
    197 
    198 
    199 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
    200     LifetimePosition start) {
    201   UsePosition* pos = first_pos();
    202   UsePosition* prev = NULL;
    203   while (pos != NULL && pos->pos().Value() < start.Value()) {
    204     if (pos->RegisterIsBeneficial()) prev = pos;
    205     pos = pos->next();
    206   }
    207   return prev;
    208 }
    209 
    210 
    211 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
    212   UsePosition* pos = NextUsePosition(start);
    213   while (pos != NULL && !pos->RequiresRegister()) {
    214     pos = pos->next();
    215   }
    216   return pos;
    217 }
    218 
    219 
    220 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
    221   // We cannot spill a live range that has a use requiring a register
    222   // at the current or the immediate next position.
    223   UsePosition* use_pos = NextRegisterPosition(pos);
    224   if (use_pos == NULL) return true;
    225   return
    226       use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
    227 }
    228 
    229 
    230 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
    231   LOperand* op = NULL;
    232   if (HasRegisterAssigned()) {
    233     ASSERT(!IsSpilled());
    234     switch (Kind()) {
    235       case GENERAL_REGISTERS:
    236         op = LRegister::Create(assigned_register(), zone);
    237         break;
    238       case DOUBLE_REGISTERS:
    239         op = LDoubleRegister::Create(assigned_register(), zone);
    240         break;
    241       default:
    242         UNREACHABLE();
    243     }
    244   } else if (IsSpilled()) {
    245     ASSERT(!HasRegisterAssigned());
    246     op = TopLevel()->GetSpillOperand();
    247     ASSERT(!op->IsUnallocated());
    248   } else {
    249     LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
    250     unalloc->set_virtual_register(id_);
    251     op = unalloc;
    252   }
    253   return op;
    254 }
    255 
    256 
    257 UseInterval* LiveRange::FirstSearchIntervalForPosition(
    258     LifetimePosition position) const {
    259   if (current_interval_ == NULL) return first_interval_;
    260   if (current_interval_->start().Value() > position.Value()) {
    261     current_interval_ = NULL;
    262     return first_interval_;
    263   }
    264   return current_interval_;
    265 }
    266 
    267 
    268 void LiveRange::AdvanceLastProcessedMarker(
    269     UseInterval* to_start_of, LifetimePosition but_not_past) const {
    270   if (to_start_of == NULL) return;
    271   if (to_start_of->start().Value() > but_not_past.Value()) return;
    272   LifetimePosition start =
    273       current_interval_ == NULL ? LifetimePosition::Invalid()
    274                                 : current_interval_->start();
    275   if (to_start_of->start().Value() > start.Value()) {
    276     current_interval_ = to_start_of;
    277   }
    278 }
    279 
    280 
    281 void LiveRange::SplitAt(LifetimePosition position,
    282                         LiveRange* result,
    283                         Zone* zone) {
    284   ASSERT(Start().Value() < position.Value());
    285   ASSERT(result->IsEmpty());
    286   // Find the last interval that ends before the position. If the
    287   // position is contained in one of the intervals in the chain, we
    288   // split that interval and use the first part.
    289   UseInterval* current = FirstSearchIntervalForPosition(position);
    290 
    291   // If the split position coincides with the beginning of a use interval
    292   // we need to split use positons in a special way.
    293   bool split_at_start = false;
    294 
    295   if (current->start().Value() == position.Value()) {
    296     // When splitting at start we need to locate the previous use interval.
    297     current = first_interval_;
    298   }
    299 
    300   while (current != NULL) {
    301     if (current->Contains(position)) {
    302       current->SplitAt(position, zone);
    303       break;
    304     }
    305     UseInterval* next = current->next();
    306     if (next->start().Value() >= position.Value()) {
    307       split_at_start = (next->start().Value() == position.Value());
    308       break;
    309     }
    310     current = next;
    311   }
    312 
    313   // Partition original use intervals to the two live ranges.
    314   UseInterval* before = current;
    315   UseInterval* after = before->next();
    316   result->last_interval_ = (last_interval_ == before)
    317       ? after            // Only interval in the range after split.
    318       : last_interval_;  // Last interval of the original range.
    319   result->first_interval_ = after;
    320   last_interval_ = before;
    321 
    322   // Find the last use position before the split and the first use
    323   // position after it.
    324   UsePosition* use_after = first_pos_;
    325   UsePosition* use_before = NULL;
    326   if (split_at_start) {
    327     // The split position coincides with the beginning of a use interval (the
    328     // end of a lifetime hole). Use at this position should be attributed to
    329     // the split child because split child owns use interval covering it.
    330     while (use_after != NULL && use_after->pos().Value() < position.Value()) {
    331       use_before = use_after;
    332       use_after = use_after->next();
    333     }
    334   } else {
    335     while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
    336       use_before = use_after;
    337       use_after = use_after->next();
    338     }
    339   }
    340 
    341   // Partition original use positions to the two live ranges.
    342   if (use_before != NULL) {
    343     use_before->next_ = NULL;
    344   } else {
    345     first_pos_ = NULL;
    346   }
    347   result->first_pos_ = use_after;
    348 
    349   // Discard cached iteration state. It might be pointing
    350   // to the use that no longer belongs to this live range.
    351   last_processed_use_ = NULL;
    352   current_interval_ = NULL;
    353 
    354   // Link the new live range in the chain before any of the other
    355   // ranges linked from the range before the split.
    356   result->parent_ = (parent_ == NULL) ? this : parent_;
    357   result->kind_ = result->parent_->kind_;
    358   result->next_ = next_;
    359   next_ = result;
    360 
    361 #ifdef DEBUG
    362   Verify();
    363   result->Verify();
    364 #endif
    365 }
    366 
    367 
    368 // This implements an ordering on live ranges so that they are ordered by their
    369 // start positions.  This is needed for the correctness of the register
    370 // allocation algorithm.  If two live ranges start at the same offset then there
    371 // is a tie breaker based on where the value is first used.  This part of the
    372 // ordering is merely a heuristic.
    373 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
    374   LifetimePosition start = Start();
    375   LifetimePosition other_start = other->Start();
    376   if (start.Value() == other_start.Value()) {
    377     UsePosition* pos = first_pos();
    378     if (pos == NULL) return false;
    379     UsePosition* other_pos = other->first_pos();
    380     if (other_pos == NULL) return true;
    381     return pos->pos().Value() < other_pos->pos().Value();
    382   }
    383   return start.Value() < other_start.Value();
    384 }
    385 
    386 
    387 void LiveRange::ShortenTo(LifetimePosition start) {
    388   LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
    389   ASSERT(first_interval_ != NULL);
    390   ASSERT(first_interval_->start().Value() <= start.Value());
    391   ASSERT(start.Value() < first_interval_->end().Value());
    392   first_interval_->set_start(start);
    393 }
    394 
    395 
    396 void LiveRange::EnsureInterval(LifetimePosition start,
    397                                LifetimePosition end,
    398                                Zone* zone) {
    399   LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
    400                          id_,
    401                          start.Value(),
    402                          end.Value());
    403   LifetimePosition new_end = end;
    404   while (first_interval_ != NULL &&
    405          first_interval_->start().Value() <= end.Value()) {
    406     if (first_interval_->end().Value() > end.Value()) {
    407       new_end = first_interval_->end();
    408     }
    409     first_interval_ = first_interval_->next();
    410   }
    411 
    412   UseInterval* new_interval = new(zone) UseInterval(start, new_end);
    413   new_interval->next_ = first_interval_;
    414   first_interval_ = new_interval;
    415   if (new_interval->next() == NULL) {
    416     last_interval_ = new_interval;
    417   }
    418 }
    419 
    420 
    421 void LiveRange::AddUseInterval(LifetimePosition start,
    422                                LifetimePosition end,
    423                                Zone* zone) {
    424   LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
    425                          id_,
    426                          start.Value(),
    427                          end.Value());
    428   if (first_interval_ == NULL) {
    429     UseInterval* interval = new(zone) UseInterval(start, end);
    430     first_interval_ = interval;
    431     last_interval_ = interval;
    432   } else {
    433     if (end.Value() == first_interval_->start().Value()) {
    434       first_interval_->set_start(start);
    435     } else if (end.Value() < first_interval_->start().Value()) {
    436       UseInterval* interval = new(zone) UseInterval(start, end);
    437       interval->set_next(first_interval_);
    438       first_interval_ = interval;
    439     } else {
    440       // Order of instruction's processing (see ProcessInstructions) guarantees
    441       // that each new use interval either precedes or intersects with
    442       // last added interval.
    443       ASSERT(start.Value() < first_interval_->end().Value());
    444       first_interval_->start_ = Min(start, first_interval_->start_);
    445       first_interval_->end_ = Max(end, first_interval_->end_);
    446     }
    447   }
    448 }
    449 
    450 
    451 void LiveRange::AddUsePosition(LifetimePosition pos,
    452                                LOperand* operand,
    453                                LOperand* hint,
    454                                Zone* zone) {
    455   LAllocator::TraceAlloc("Add to live range %d use position %d\n",
    456                          id_,
    457                          pos.Value());
    458   UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
    459   UsePosition* prev_hint = NULL;
    460   UsePosition* prev = NULL;
    461   UsePosition* current = first_pos_;
    462   while (current != NULL && current->pos().Value() < pos.Value()) {
    463     prev_hint = current->HasHint() ? current : prev_hint;
    464     prev = current;
    465     current = current->next();
    466   }
    467 
    468   if (prev == NULL) {
    469     use_pos->set_next(first_pos_);
    470     first_pos_ = use_pos;
    471   } else {
    472     use_pos->next_ = prev->next_;
    473     prev->next_ = use_pos;
    474   }
    475 
    476   if (prev_hint == NULL && use_pos->HasHint()) {
    477     current_hint_operand_ = hint;
    478   }
    479 }
    480 
    481 
    482 void LiveRange::ConvertOperands(Zone* zone) {
    483   LOperand* op = CreateAssignedOperand(zone);
    484   UsePosition* use_pos = first_pos();
    485   while (use_pos != NULL) {
    486     ASSERT(Start().Value() <= use_pos->pos().Value() &&
    487            use_pos->pos().Value() <= End().Value());
    488 
    489     if (use_pos->HasOperand()) {
    490       ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
    491              !use_pos->RequiresRegister());
    492       use_pos->operand()->ConvertTo(op->kind(), op->index());
    493     }
    494     use_pos = use_pos->next();
    495   }
    496 }
    497 
    498 
    499 bool LiveRange::CanCover(LifetimePosition position) const {
    500   if (IsEmpty()) return false;
    501   return Start().Value() <= position.Value() &&
    502          position.Value() < End().Value();
    503 }
    504 
    505 
    506 bool LiveRange::Covers(LifetimePosition position) {
    507   if (!CanCover(position)) return false;
    508   UseInterval* start_search = FirstSearchIntervalForPosition(position);
    509   for (UseInterval* interval = start_search;
    510        interval != NULL;
    511        interval = interval->next()) {
    512     ASSERT(interval->next() == NULL ||
    513            interval->next()->start().Value() >= interval->start().Value());
    514     AdvanceLastProcessedMarker(interval, position);
    515     if (interval->Contains(position)) return true;
    516     if (interval->start().Value() > position.Value()) return false;
    517   }
    518   return false;
    519 }
    520 
    521 
    522 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
    523   UseInterval* b = other->first_interval();
    524   if (b == NULL) return LifetimePosition::Invalid();
    525   LifetimePosition advance_last_processed_up_to = b->start();
    526   UseInterval* a = FirstSearchIntervalForPosition(b->start());
    527   while (a != NULL && b != NULL) {
    528     if (a->start().Value() > other->End().Value()) break;
    529     if (b->start().Value() > End().Value()) break;
    530     LifetimePosition cur_intersection = a->Intersect(b);
    531     if (cur_intersection.IsValid()) {
    532       return cur_intersection;
    533     }
    534     if (a->start().Value() < b->start().Value()) {
    535       a = a->next();
    536       if (a == NULL || a->start().Value() > other->End().Value()) break;
    537       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
    538     } else {
    539       b = b->next();
    540     }
    541   }
    542   return LifetimePosition::Invalid();
    543 }
    544 
    545 
    546 LAllocator::LAllocator(int num_values, HGraph* graph)
    547     : zone_(graph->isolate()),
    548       chunk_(NULL),
    549       live_in_sets_(graph->blocks()->length(), zone()),
    550       live_ranges_(num_values * 2, zone()),
    551       fixed_live_ranges_(NULL),
    552       fixed_double_live_ranges_(NULL),
    553       unhandled_live_ranges_(num_values * 2, zone()),
    554       active_live_ranges_(8, zone()),
    555       inactive_live_ranges_(8, zone()),
    556       reusable_slots_(8, zone()),
    557       next_virtual_register_(num_values),
    558       first_artificial_register_(num_values),
    559       mode_(UNALLOCATED_REGISTERS),
    560       num_registers_(-1),
    561       graph_(graph),
    562       has_osr_entry_(false),
    563       allocation_ok_(true) { }
    564 
    565 
    566 void LAllocator::InitializeLivenessAnalysis() {
    567   // Initialize the live_in sets for each block to NULL.
    568   int block_count = graph_->blocks()->length();
    569   live_in_sets_.Initialize(block_count, zone());
    570   live_in_sets_.AddBlock(NULL, block_count, zone());
    571 }
    572 
    573 
    574 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
    575   // Compute live out for the given block, except not including backward
    576   // successor edges.
    577   BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
    578 
    579   // Process all successor blocks.
    580   for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
    581     // Add values live on entry to the successor. Note the successor's
    582     // live_in will not be computed yet for backwards edges.
    583     HBasicBlock* successor = it.Current();
    584     BitVector* live_in = live_in_sets_[successor->block_id()];
    585     if (live_in != NULL) live_out->Union(*live_in);
    586 
    587     // All phi input operands corresponding to this successor edge are live
    588     // out from this block.
    589     int index = successor->PredecessorIndexOf(block);
    590     const ZoneList<HPhi*>* phis = successor->phis();
    591     for (int i = 0; i < phis->length(); ++i) {
    592       HPhi* phi = phis->at(i);
    593       if (!phi->OperandAt(index)->IsConstant()) {
    594         live_out->Add(phi->OperandAt(index)->id());
    595       }
    596     }
    597   }
    598 
    599   return live_out;
    600 }
    601 
    602 
    603 void LAllocator::AddInitialIntervals(HBasicBlock* block,
    604                                      BitVector* live_out) {
    605   // Add an interval that includes the entire block to the live range for
    606   // each live_out value.
    607   LifetimePosition start = LifetimePosition::FromInstructionIndex(
    608       block->first_instruction_index());
    609   LifetimePosition end = LifetimePosition::FromInstructionIndex(
    610       block->last_instruction_index()).NextInstruction();
    611   BitVector::Iterator iterator(live_out);
    612   while (!iterator.Done()) {
    613     int operand_index = iterator.Current();
    614     LiveRange* range = LiveRangeFor(operand_index);
    615     range->AddUseInterval(start, end, zone());
    616     iterator.Advance();
    617   }
    618 }
    619 
    620 
    621 int LAllocator::FixedDoubleLiveRangeID(int index) {
    622   return -index - 1 - Register::kMaxNumAllocatableRegisters;
    623 }
    624 
    625 
    626 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
    627                                     int pos,
    628                                     bool is_tagged) {
    629   TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
    630   ASSERT(operand->HasFixedPolicy());
    631   if (operand->HasFixedSlotPolicy()) {
    632     operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
    633   } else if (operand->HasFixedRegisterPolicy()) {
    634     int reg_index = operand->fixed_register_index();
    635     operand->ConvertTo(LOperand::REGISTER, reg_index);
    636   } else if (operand->HasFixedDoubleRegisterPolicy()) {
    637     int reg_index = operand->fixed_register_index();
    638     operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
    639   } else {
    640     UNREACHABLE();
    641   }
    642   if (is_tagged) {
    643     TraceAlloc("Fixed reg is tagged at %d\n", pos);
    644     LInstruction* instr = InstructionAt(pos);
    645     if (instr->HasPointerMap()) {
    646       instr->pointer_map()->RecordPointer(operand, chunk()->zone());
    647     }
    648   }
    649   return operand;
    650 }
    651 
    652 
    653 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
    654   ASSERT(index < Register::kMaxNumAllocatableRegisters);
    655   LiveRange* result = fixed_live_ranges_[index];
    656   if (result == NULL) {
    657     result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
    658     ASSERT(result->IsFixed());
    659     result->kind_ = GENERAL_REGISTERS;
    660     SetLiveRangeAssignedRegister(result, index);
    661     fixed_live_ranges_[index] = result;
    662   }
    663   return result;
    664 }
    665 
    666 
    667 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
    668   ASSERT(index < DoubleRegister::NumAllocatableRegisters());
    669   LiveRange* result = fixed_double_live_ranges_[index];
    670   if (result == NULL) {
    671     result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
    672                                    chunk()->zone());
    673     ASSERT(result->IsFixed());
    674     result->kind_ = DOUBLE_REGISTERS;
    675     SetLiveRangeAssignedRegister(result, index);
    676     fixed_double_live_ranges_[index] = result;
    677   }
    678   return result;
    679 }
    680 
    681 
    682 LiveRange* LAllocator::LiveRangeFor(int index) {
    683   if (index >= live_ranges_.length()) {
    684     live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
    685   }
    686   LiveRange* result = live_ranges_[index];
    687   if (result == NULL) {
    688     result = new(zone()) LiveRange(index, chunk()->zone());
    689     live_ranges_[index] = result;
    690   }
    691   return result;
    692 }
    693 
    694 
    695 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
    696   int last_instruction = block->last_instruction_index();
    697   int index = chunk_->NearestGapPos(last_instruction);
    698   return GapAt(index);
    699 }
    700 
    701 
    702 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
    703   if (!operand->IsUnallocated()) return NULL;
    704   int index = LUnallocated::cast(operand)->virtual_register();
    705   HValue* instr = graph_->LookupValue(index);
    706   if (instr != NULL && instr->IsPhi()) {
    707     return HPhi::cast(instr);
    708   }
    709   return NULL;
    710 }
    711 
    712 
    713 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
    714   if (operand->IsUnallocated()) {
    715     return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
    716   } else if (operand->IsRegister()) {
    717     return FixedLiveRangeFor(operand->index());
    718   } else if (operand->IsDoubleRegister()) {
    719     return FixedDoubleLiveRangeFor(operand->index());
    720   } else {
    721     return NULL;
    722   }
    723 }
    724 
    725 
    726 void LAllocator::Define(LifetimePosition position,
    727                         LOperand* operand,
    728                         LOperand* hint) {
    729   LiveRange* range = LiveRangeFor(operand);
    730   if (range == NULL) return;
    731 
    732   if (range->IsEmpty() || range->Start().Value() > position.Value()) {
    733     // Can happen if there is a definition without use.
    734     range->AddUseInterval(position, position.NextInstruction(), zone());
    735     range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
    736   } else {
    737     range->ShortenTo(position);
    738   }
    739 
    740   if (operand->IsUnallocated()) {
    741     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
    742     range->AddUsePosition(position, unalloc_operand, hint, zone());
    743   }
    744 }
    745 
    746 
    747 void LAllocator::Use(LifetimePosition block_start,
    748                      LifetimePosition position,
    749                      LOperand* operand,
    750                      LOperand* hint) {
    751   LiveRange* range = LiveRangeFor(operand);
    752   if (range == NULL) return;
    753   if (operand->IsUnallocated()) {
    754     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
    755     range->AddUsePosition(position, unalloc_operand, hint, zone());
    756   }
    757   range->AddUseInterval(block_start, position, zone());
    758 }
    759 
    760 
    761 void LAllocator::AddConstraintsGapMove(int index,
    762                                        LOperand* from,
    763                                        LOperand* to) {
    764   LGap* gap = GapAt(index);
    765   LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
    766                                                      chunk()->zone());
    767   if (from->IsUnallocated()) {
    768     const ZoneList<LMoveOperands>* move_operands = move->move_operands();
    769     for (int i = 0; i < move_operands->length(); ++i) {
    770       LMoveOperands cur = move_operands->at(i);
    771       LOperand* cur_to = cur.destination();
    772       if (cur_to->IsUnallocated()) {
    773         if (LUnallocated::cast(cur_to)->virtual_register() ==
    774             LUnallocated::cast(from)->virtual_register()) {
    775           move->AddMove(cur.source(), to, chunk()->zone());
    776           return;
    777         }
    778       }
    779     }
    780   }
    781   move->AddMove(from, to, chunk()->zone());
    782 }
    783 
    784 
    785 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
    786   int start = block->first_instruction_index();
    787   int end = block->last_instruction_index();
    788   if (start == -1) return;
    789   for (int i = start; i <= end; ++i) {
    790     if (IsGapAt(i)) {
    791       LInstruction* instr = NULL;
    792       LInstruction* prev_instr = NULL;
    793       if (i < end) instr = InstructionAt(i + 1);
    794       if (i > start) prev_instr = InstructionAt(i - 1);
    795       MeetConstraintsBetween(prev_instr, instr, i);
    796       if (!AllocationOk()) return;
    797     }
    798   }
    799 }
    800 
    801 
    802 void LAllocator::MeetConstraintsBetween(LInstruction* first,
    803                                         LInstruction* second,
    804                                         int gap_index) {
    805   // Handle fixed temporaries.
    806   if (first != NULL) {
    807     for (TempIterator it(first); !it.Done(); it.Advance()) {
    808       LUnallocated* temp = LUnallocated::cast(it.Current());
    809       if (temp->HasFixedPolicy()) {
    810         AllocateFixed(temp, gap_index - 1, false);
    811       }
    812     }
    813   }
    814 
    815   // Handle fixed output operand.
    816   if (first != NULL && first->Output() != NULL) {
    817     LUnallocated* first_output = LUnallocated::cast(first->Output());
    818     LiveRange* range = LiveRangeFor(first_output->virtual_register());
    819     bool assigned = false;
    820     if (first_output->HasFixedPolicy()) {
    821       LUnallocated* output_copy = first_output->CopyUnconstrained(
    822           chunk()->zone());
    823       bool is_tagged = HasTaggedValue(first_output->virtual_register());
    824       AllocateFixed(first_output, gap_index, is_tagged);
    825 
    826       // This value is produced on the stack, we never need to spill it.
    827       if (first_output->IsStackSlot()) {
    828         range->SetSpillOperand(first_output);
    829         range->SetSpillStartIndex(gap_index - 1);
    830         assigned = true;
    831       }
    832       chunk_->AddGapMove(gap_index, first_output, output_copy);
    833     }
    834 
    835     if (!assigned) {
    836       range->SetSpillStartIndex(gap_index);
    837 
    838       // This move to spill operand is not a real use. Liveness analysis
    839       // and splitting of live ranges do not account for it.
    840       // Thus it should be inserted to a lifetime position corresponding to
    841       // the instruction end.
    842       LGap* gap = GapAt(gap_index);
    843       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
    844                                                          chunk()->zone());
    845       move->AddMove(first_output, range->GetSpillOperand(),
    846                     chunk()->zone());
    847     }
    848   }
    849 
    850   // Handle fixed input operands of second instruction.
    851   if (second != NULL) {
    852     for (UseIterator it(second); !it.Done(); it.Advance()) {
    853       LUnallocated* cur_input = LUnallocated::cast(it.Current());
    854       if (cur_input->HasFixedPolicy()) {
    855         LUnallocated* input_copy = cur_input->CopyUnconstrained(
    856             chunk()->zone());
    857         bool is_tagged = HasTaggedValue(cur_input->virtual_register());
    858         AllocateFixed(cur_input, gap_index + 1, is_tagged);
    859         AddConstraintsGapMove(gap_index, input_copy, cur_input);
    860       } else if (cur_input->HasWritableRegisterPolicy()) {
    861         // The live range of writable input registers always goes until the end
    862         // of the instruction.
    863         ASSERT(!cur_input->IsUsedAtStart());
    864 
    865         LUnallocated* input_copy = cur_input->CopyUnconstrained(
    866             chunk()->zone());
    867         int vreg = GetVirtualRegister();
    868         if (!AllocationOk()) return;
    869         cur_input->set_virtual_register(vreg);
    870 
    871         if (RequiredRegisterKind(input_copy->virtual_register()) ==
    872             DOUBLE_REGISTERS) {
    873           double_artificial_registers_.Add(
    874               cur_input->virtual_register() - first_artificial_register_,
    875               zone());
    876         }
    877 
    878         AddConstraintsGapMove(gap_index, input_copy, cur_input);
    879       }
    880     }
    881   }
    882 
    883   // Handle "output same as input" for second instruction.
    884   if (second != NULL && second->Output() != NULL) {
    885     LUnallocated* second_output = LUnallocated::cast(second->Output());
    886     if (second_output->HasSameAsInputPolicy()) {
    887       LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
    888       int output_vreg = second_output->virtual_register();
    889       int input_vreg = cur_input->virtual_register();
    890 
    891       LUnallocated* input_copy = cur_input->CopyUnconstrained(
    892           chunk()->zone());
    893       cur_input->set_virtual_register(second_output->virtual_register());
    894       AddConstraintsGapMove(gap_index, input_copy, cur_input);
    895 
    896       if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
    897         int index = gap_index + 1;
    898         LInstruction* instr = InstructionAt(index);
    899         if (instr->HasPointerMap()) {
    900           instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
    901         }
    902       } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
    903         // The input is assumed to immediately have a tagged representation,
    904         // before the pointer map can be used. I.e. the pointer map at the
    905         // instruction will include the output operand (whose value at the
    906         // beginning of the instruction is equal to the input operand). If
    907         // this is not desired, then the pointer map at this instruction needs
    908         // to be adjusted manually.
    909       }
    910     }
    911   }
    912 }
    913 
    914 
    915 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
    916   int block_start = block->first_instruction_index();
    917   int index = block->last_instruction_index();
    918 
    919   LifetimePosition block_start_position =
    920       LifetimePosition::FromInstructionIndex(block_start);
    921 
    922   while (index >= block_start) {
    923     LifetimePosition curr_position =
    924         LifetimePosition::FromInstructionIndex(index);
    925 
    926     if (IsGapAt(index)) {
    927       // We have a gap at this position.
    928       LGap* gap = GapAt(index);
    929       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
    930                                                          chunk()->zone());
    931       const ZoneList<LMoveOperands>* move_operands = move->move_operands();
    932       for (int i = 0; i < move_operands->length(); ++i) {
    933         LMoveOperands* cur = &move_operands->at(i);
    934         if (cur->IsIgnored()) continue;
    935         LOperand* from = cur->source();
    936         LOperand* to = cur->destination();
    937         HPhi* phi = LookupPhi(to);
    938         LOperand* hint = to;
    939         if (phi != NULL) {
    940           // This is a phi resolving move.
    941           if (!phi->block()->IsLoopHeader()) {
    942             hint = LiveRangeFor(phi->id())->current_hint_operand();
    943           }
    944         } else {
    945           if (to->IsUnallocated()) {
    946             if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
    947               Define(curr_position, to, from);
    948               live->Remove(LUnallocated::cast(to)->virtual_register());
    949             } else {
    950               cur->Eliminate();
    951               continue;
    952             }
    953           } else {
    954             Define(curr_position, to, from);
    955           }
    956         }
    957         Use(block_start_position, curr_position, from, hint);
    958         if (from->IsUnallocated()) {
    959           live->Add(LUnallocated::cast(from)->virtual_register());
    960         }
    961       }
    962     } else {
    963       ASSERT(!IsGapAt(index));
    964       LInstruction* instr = InstructionAt(index);
    965 
    966       if (instr != NULL) {
    967         LOperand* output = instr->Output();
    968         if (output != NULL) {
    969           if (output->IsUnallocated()) {
    970             live->Remove(LUnallocated::cast(output)->virtual_register());
    971           }
    972           Define(curr_position, output, NULL);
    973         }
    974 
    975         if (instr->ClobbersRegisters()) {
    976           for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
    977             if (output == NULL || !output->IsRegister() ||
    978                 output->index() != i) {
    979               LiveRange* range = FixedLiveRangeFor(i);
    980               range->AddUseInterval(curr_position,
    981                                     curr_position.InstructionEnd(),
    982                                     zone());
    983             }
    984           }
    985         }
    986 
    987         if (instr->ClobbersDoubleRegisters()) {
    988           for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
    989             if (output == NULL || !output->IsDoubleRegister() ||
    990                 output->index() != i) {
    991               LiveRange* range = FixedDoubleLiveRangeFor(i);
    992               range->AddUseInterval(curr_position,
    993                                     curr_position.InstructionEnd(),
    994                                     zone());
    995             }
    996           }
    997         }
    998 
    999         for (UseIterator it(instr); !it.Done(); it.Advance()) {
   1000           LOperand* input = it.Current();
   1001 
   1002           LifetimePosition use_pos;
   1003           if (input->IsUnallocated() &&
   1004               LUnallocated::cast(input)->IsUsedAtStart()) {
   1005             use_pos = curr_position;
   1006           } else {
   1007             use_pos = curr_position.InstructionEnd();
   1008           }
   1009 
   1010           Use(block_start_position, use_pos, input, NULL);
   1011           if (input->IsUnallocated()) {
   1012             live->Add(LUnallocated::cast(input)->virtual_register());
   1013           }
   1014         }
   1015 
   1016         for (TempIterator it(instr); !it.Done(); it.Advance()) {
   1017           LOperand* temp = it.Current();
   1018           if (instr->ClobbersTemps()) {
   1019             if (temp->IsRegister()) continue;
   1020             if (temp->IsUnallocated()) {
   1021               LUnallocated* temp_unalloc = LUnallocated::cast(temp);
   1022               if (temp_unalloc->HasFixedPolicy()) {
   1023                 continue;
   1024               }
   1025             }
   1026           }
   1027           Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
   1028           Define(curr_position, temp, NULL);
   1029         }
   1030       }
   1031     }
   1032 
   1033     index = index - 1;
   1034   }
   1035 }
   1036 
   1037 
   1038 void LAllocator::ResolvePhis(HBasicBlock* block) {
   1039   const ZoneList<HPhi*>* phis = block->phis();
   1040   for (int i = 0; i < phis->length(); ++i) {
   1041     HPhi* phi = phis->at(i);
   1042     LUnallocated* phi_operand =
   1043         new(chunk()->zone()) LUnallocated(LUnallocated::NONE);
   1044     phi_operand->set_virtual_register(phi->id());
   1045     for (int j = 0; j < phi->OperandCount(); ++j) {
   1046       HValue* op = phi->OperandAt(j);
   1047       LOperand* operand = NULL;
   1048       if (op->IsConstant() && op->EmitAtUses()) {
   1049         HConstant* constant = HConstant::cast(op);
   1050         operand = chunk_->DefineConstantOperand(constant);
   1051       } else {
   1052         ASSERT(!op->EmitAtUses());
   1053         LUnallocated* unalloc =
   1054             new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
   1055         unalloc->set_virtual_register(op->id());
   1056         operand = unalloc;
   1057       }
   1058       HBasicBlock* cur_block = block->predecessors()->at(j);
   1059       // The gap move must be added without any special processing as in
   1060       // the AddConstraintsGapMove.
   1061       chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
   1062                          operand,
   1063                          phi_operand);
   1064 
   1065       // We are going to insert a move before the branch instruction.
   1066       // Some branch instructions (e.g. loops' back edges)
   1067       // can potentially cause a GC so they have a pointer map.
   1068       // By inserting a move we essentially create a copy of a
   1069       // value which is invisible to PopulatePointerMaps(), because we store
   1070       // it into a location different from the operand of a live range
   1071       // covering a branch instruction.
   1072       // Thus we need to manually record a pointer.
   1073       LInstruction* branch =
   1074           InstructionAt(cur_block->last_instruction_index());
   1075       if (branch->HasPointerMap()) {
   1076         if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
   1077           branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
   1078         } else if (!phi->representation().IsDouble()) {
   1079           branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
   1080         }
   1081       }
   1082     }
   1083 
   1084     LiveRange* live_range = LiveRangeFor(phi->id());
   1085     LLabel* label = chunk_->GetLabel(phi->block()->block_id());
   1086     label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
   1087         AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
   1088     live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
   1089   }
   1090 }
   1091 
   1092 
   1093 bool LAllocator::Allocate(LChunk* chunk) {
   1094   ASSERT(chunk_ == NULL);
   1095   chunk_ = static_cast<LPlatformChunk*>(chunk);
   1096   assigned_registers_ =
   1097       new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(),
   1098                                    chunk->zone());
   1099   assigned_double_registers_ =
   1100       new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(),
   1101                                    chunk->zone());
   1102   MeetRegisterConstraints();
   1103   if (!AllocationOk()) return false;
   1104   ResolvePhis();
   1105   BuildLiveRanges();
   1106   AllocateGeneralRegisters();
   1107   if (!AllocationOk()) return false;
   1108   AllocateDoubleRegisters();
   1109   if (!AllocationOk()) return false;
   1110   PopulatePointerMaps();
   1111   ConnectRanges();
   1112   ResolveControlFlow();
   1113   return true;
   1114 }
   1115 
   1116 
   1117 void LAllocator::MeetRegisterConstraints() {
   1118   LAllocatorPhase phase("L_Register constraints", this);
   1119   first_artificial_register_ = next_virtual_register_;
   1120   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1121   for (int i = 0; i < blocks->length(); ++i) {
   1122     HBasicBlock* block = blocks->at(i);
   1123     MeetRegisterConstraints(block);
   1124     if (!AllocationOk()) return;
   1125   }
   1126 }
   1127 
   1128 
   1129 void LAllocator::ResolvePhis() {
   1130   LAllocatorPhase phase("L_Resolve phis", this);
   1131 
   1132   // Process the blocks in reverse order.
   1133   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1134   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
   1135     HBasicBlock* block = blocks->at(block_id);
   1136     ResolvePhis(block);
   1137   }
   1138 }
   1139 
   1140 
   1141 void LAllocator::ResolveControlFlow(LiveRange* range,
   1142                                     HBasicBlock* block,
   1143                                     HBasicBlock* pred) {
   1144   LifetimePosition pred_end =
   1145       LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
   1146   LifetimePosition cur_start =
   1147       LifetimePosition::FromInstructionIndex(block->first_instruction_index());
   1148   LiveRange* pred_cover = NULL;
   1149   LiveRange* cur_cover = NULL;
   1150   LiveRange* cur_range = range;
   1151   while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
   1152     if (cur_range->CanCover(cur_start)) {
   1153       ASSERT(cur_cover == NULL);
   1154       cur_cover = cur_range;
   1155     }
   1156     if (cur_range->CanCover(pred_end)) {
   1157       ASSERT(pred_cover == NULL);
   1158       pred_cover = cur_range;
   1159     }
   1160     cur_range = cur_range->next();
   1161   }
   1162 
   1163   if (cur_cover->IsSpilled()) return;
   1164   ASSERT(pred_cover != NULL && cur_cover != NULL);
   1165   if (pred_cover != cur_cover) {
   1166     LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
   1167     LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
   1168     if (!pred_op->Equals(cur_op)) {
   1169       LGap* gap = NULL;
   1170       if (block->predecessors()->length() == 1) {
   1171         gap = GapAt(block->first_instruction_index());
   1172       } else {
   1173         ASSERT(pred->end()->SecondSuccessor() == NULL);
   1174         gap = GetLastGap(pred);
   1175 
   1176         // We are going to insert a move before the branch instruction.
   1177         // Some branch instructions (e.g. loops' back edges)
   1178         // can potentially cause a GC so they have a pointer map.
   1179         // By inserting a move we essentially create a copy of a
   1180         // value which is invisible to PopulatePointerMaps(), because we store
   1181         // it into a location different from the operand of a live range
   1182         // covering a branch instruction.
   1183         // Thus we need to manually record a pointer.
   1184         LInstruction* branch = InstructionAt(pred->last_instruction_index());
   1185         if (branch->HasPointerMap()) {
   1186           if (HasTaggedValue(range->id())) {
   1187             branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
   1188           } else if (!cur_op->IsDoubleStackSlot() &&
   1189                      !cur_op->IsDoubleRegister()) {
   1190             branch->pointer_map()->RemovePointer(cur_op);
   1191           }
   1192         }
   1193       }
   1194       gap->GetOrCreateParallelMove(
   1195           LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
   1196                                                  chunk()->zone());
   1197     }
   1198   }
   1199 }
   1200 
   1201 
   1202 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
   1203   int index = pos.InstructionIndex();
   1204   if (IsGapAt(index)) {
   1205     LGap* gap = GapAt(index);
   1206     return gap->GetOrCreateParallelMove(
   1207         pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
   1208   }
   1209   int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
   1210   return GapAt(gap_pos)->GetOrCreateParallelMove(
   1211       (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
   1212 }
   1213 
   1214 
   1215 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
   1216   LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
   1217   return gap->block();
   1218 }
   1219 
   1220 
   1221 void LAllocator::ConnectRanges() {
   1222   LAllocatorPhase phase("L_Connect ranges", this);
   1223   for (int i = 0; i < live_ranges()->length(); ++i) {
   1224     LiveRange* first_range = live_ranges()->at(i);
   1225     if (first_range == NULL || first_range->parent() != NULL) continue;
   1226 
   1227     LiveRange* second_range = first_range->next();
   1228     while (second_range != NULL) {
   1229       LifetimePosition pos = second_range->Start();
   1230 
   1231       if (!second_range->IsSpilled()) {
   1232         // Add gap move if the two live ranges touch and there is no block
   1233         // boundary.
   1234         if (first_range->End().Value() == pos.Value()) {
   1235           bool should_insert = true;
   1236           if (IsBlockBoundary(pos)) {
   1237             should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
   1238           }
   1239           if (should_insert) {
   1240             LParallelMove* move = GetConnectingParallelMove(pos);
   1241             LOperand* prev_operand = first_range->CreateAssignedOperand(
   1242                 chunk()->zone());
   1243             LOperand* cur_operand = second_range->CreateAssignedOperand(
   1244                 chunk()->zone());
   1245             move->AddMove(prev_operand, cur_operand,
   1246                           chunk()->zone());
   1247           }
   1248         }
   1249       }
   1250 
   1251       first_range = second_range;
   1252       second_range = second_range->next();
   1253     }
   1254   }
   1255 }
   1256 
   1257 
   1258 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
   1259   if (block->predecessors()->length() != 1) return false;
   1260   return block->predecessors()->first()->block_id() == block->block_id() - 1;
   1261 }
   1262 
   1263 
   1264 void LAllocator::ResolveControlFlow() {
   1265   LAllocatorPhase phase("L_Resolve control flow", this);
   1266   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1267   for (int block_id = 1; block_id < blocks->length(); ++block_id) {
   1268     HBasicBlock* block = blocks->at(block_id);
   1269     if (CanEagerlyResolveControlFlow(block)) continue;
   1270     BitVector* live = live_in_sets_[block->block_id()];
   1271     BitVector::Iterator iterator(live);
   1272     while (!iterator.Done()) {
   1273       int operand_index = iterator.Current();
   1274       for (int i = 0; i < block->predecessors()->length(); ++i) {
   1275         HBasicBlock* cur = block->predecessors()->at(i);
   1276         LiveRange* cur_range = LiveRangeFor(operand_index);
   1277         ResolveControlFlow(cur_range, block, cur);
   1278       }
   1279       iterator.Advance();
   1280     }
   1281   }
   1282 }
   1283 
   1284 
   1285 void LAllocator::BuildLiveRanges() {
   1286   LAllocatorPhase phase("L_Build live ranges", this);
   1287   InitializeLivenessAnalysis();
   1288   // Process the blocks in reverse order.
   1289   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
   1290   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
   1291     HBasicBlock* block = blocks->at(block_id);
   1292     BitVector* live = ComputeLiveOut(block);
   1293     // Initially consider all live_out values live for the entire block. We
   1294     // will shorten these intervals if necessary.
   1295     AddInitialIntervals(block, live);
   1296 
   1297     // Process the instructions in reverse order, generating and killing
   1298     // live values.
   1299     ProcessInstructions(block, live);
   1300     // All phi output operands are killed by this block.
   1301     const ZoneList<HPhi*>* phis = block->phis();
   1302     for (int i = 0; i < phis->length(); ++i) {
   1303       // The live range interval already ends at the first instruction of the
   1304       // block.
   1305       HPhi* phi = phis->at(i);
   1306       live->Remove(phi->id());
   1307 
   1308       LOperand* hint = NULL;
   1309       LOperand* phi_operand = NULL;
   1310       LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
   1311       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
   1312                                                          chunk()->zone());
   1313       for (int j = 0; j < move->move_operands()->length(); ++j) {
   1314         LOperand* to = move->move_operands()->at(j).destination();
   1315         if (to->IsUnallocated() &&
   1316             LUnallocated::cast(to)->virtual_register() == phi->id()) {
   1317           hint = move->move_operands()->at(j).source();
   1318           phi_operand = to;
   1319           break;
   1320         }
   1321       }
   1322       ASSERT(hint != NULL);
   1323 
   1324       LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
   1325               block->first_instruction_index());
   1326       Define(block_start, phi_operand, hint);
   1327     }
   1328 
   1329     // Now live is live_in for this block except not including values live
   1330     // out on backward successor edges.
   1331     live_in_sets_[block_id] = live;
   1332 
   1333     // If this block is a loop header go back and patch up the necessary
   1334     // predecessor blocks.
   1335     if (block->IsLoopHeader()) {
   1336       // TODO(kmillikin): Need to be able to get the last block of the loop
   1337       // in the loop information. Add a live range stretching from the first
   1338       // loop instruction to the last for each value live on entry to the
   1339       // header.
   1340       HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
   1341       BitVector::Iterator iterator(live);
   1342       LifetimePosition start = LifetimePosition::FromInstructionIndex(
   1343           block->first_instruction_index());
   1344       LifetimePosition end = LifetimePosition::FromInstructionIndex(
   1345           back_edge->last_instruction_index()).NextInstruction();
   1346       while (!iterator.Done()) {
   1347         int operand_index = iterator.Current();
   1348         LiveRange* range = LiveRangeFor(operand_index);
   1349         range->EnsureInterval(start, end, zone());
   1350         iterator.Advance();
   1351       }
   1352 
   1353       for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
   1354         live_in_sets_[i]->Union(*live);
   1355       }
   1356     }
   1357 
   1358 #ifdef DEBUG
   1359     if (block_id == 0) {
   1360       BitVector::Iterator iterator(live);
   1361       bool found = false;
   1362       while (!iterator.Done()) {
   1363         found = true;
   1364         int operand_index = iterator.Current();
   1365         if (chunk_->info()->IsStub()) {
   1366           CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey();
   1367           PrintF("Function: %s\n", CodeStub::MajorName(major_key, false));
   1368         } else {
   1369           ASSERT(chunk_->info()->IsOptimizing());
   1370           AllowHandleDereference allow_deref;
   1371           PrintF("Function: %s\n",
   1372                  *chunk_->info()->function()->debug_name()->ToCString());
   1373         }
   1374         PrintF("Value %d used before first definition!\n", operand_index);
   1375         LiveRange* range = LiveRangeFor(operand_index);
   1376         PrintF("First use is at %d\n", range->first_pos()->pos().Value());
   1377         iterator.Advance();
   1378       }
   1379       ASSERT(!found);
   1380     }
   1381 #endif
   1382   }
   1383 
   1384   for (int i = 0; i < live_ranges_.length(); ++i) {
   1385     if (live_ranges_[i] != NULL) {
   1386       live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
   1387     }
   1388   }
   1389 }
   1390 
   1391 
   1392 bool LAllocator::SafePointsAreInOrder() const {
   1393   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
   1394   int safe_point = 0;
   1395   for (int i = 0; i < pointer_maps->length(); ++i) {
   1396     LPointerMap* map = pointer_maps->at(i);
   1397     if (safe_point > map->lithium_position()) return false;
   1398     safe_point = map->lithium_position();
   1399   }
   1400   return true;
   1401 }
   1402 
   1403 
   1404 void LAllocator::PopulatePointerMaps() {
   1405   LAllocatorPhase phase("L_Populate pointer maps", this);
   1406   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
   1407 
   1408   ASSERT(SafePointsAreInOrder());
   1409 
   1410   // Iterate over all safe point positions and record a pointer
   1411   // for all spilled live ranges at this point.
   1412   int first_safe_point_index = 0;
   1413   int last_range_start = 0;
   1414   for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
   1415     LiveRange* range = live_ranges()->at(range_idx);
   1416     if (range == NULL) continue;
   1417     // Iterate over the first parts of multi-part live ranges.
   1418     if (range->parent() != NULL) continue;
   1419     // Skip non-pointer values.
   1420     if (!HasTaggedValue(range->id())) continue;
   1421     // Skip empty live ranges.
   1422     if (range->IsEmpty()) continue;
   1423 
   1424     // Find the extent of the range and its children.
   1425     int start = range->Start().InstructionIndex();
   1426     int end = 0;
   1427     for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
   1428       LifetimePosition this_end = cur->End();
   1429       if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
   1430       ASSERT(cur->Start().InstructionIndex() >= start);
   1431     }
   1432 
   1433     // Most of the ranges are in order, but not all.  Keep an eye on when
   1434     // they step backwards and reset the first_safe_point_index so we don't
   1435     // miss any safe points.
   1436     if (start < last_range_start) {
   1437       first_safe_point_index = 0;
   1438     }
   1439     last_range_start = start;
   1440 
   1441     // Step across all the safe points that are before the start of this range,
   1442     // recording how far we step in order to save doing this for the next range.
   1443     while (first_safe_point_index < pointer_maps->length()) {
   1444       LPointerMap* map = pointer_maps->at(first_safe_point_index);
   1445       int safe_point = map->lithium_position();
   1446       if (safe_point >= start) break;
   1447       first_safe_point_index++;
   1448     }
   1449 
   1450     // Step through the safe points to see whether they are in the range.
   1451     for (int safe_point_index = first_safe_point_index;
   1452          safe_point_index < pointer_maps->length();
   1453          ++safe_point_index) {
   1454       LPointerMap* map = pointer_maps->at(safe_point_index);
   1455       int safe_point = map->lithium_position();
   1456 
   1457       // The safe points are sorted so we can stop searching here.
   1458       if (safe_point - 1 > end) break;
   1459 
   1460       // Advance to the next active range that covers the current
   1461       // safe point position.
   1462       LifetimePosition safe_point_pos =
   1463           LifetimePosition::FromInstructionIndex(safe_point);
   1464       LiveRange* cur = range;
   1465       while (cur != NULL && !cur->Covers(safe_point_pos)) {
   1466         cur = cur->next();
   1467       }
   1468       if (cur == NULL) continue;
   1469 
   1470       // Check if the live range is spilled and the safe point is after
   1471       // the spill position.
   1472       if (range->HasAllocatedSpillOperand() &&
   1473           safe_point >= range->spill_start_index()) {
   1474         TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
   1475                    range->id(), range->spill_start_index(), safe_point);
   1476         map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
   1477       }
   1478 
   1479       if (!cur->IsSpilled()) {
   1480         TraceAlloc("Pointer in register for range %d (start at %d) "
   1481                    "at safe point %d\n",
   1482                    cur->id(), cur->Start().Value(), safe_point);
   1483         LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
   1484         ASSERT(!operand->IsStackSlot());
   1485         map->RecordPointer(operand, chunk()->zone());
   1486       }
   1487     }
   1488   }
   1489 }
   1490 
   1491 
   1492 void LAllocator::AllocateGeneralRegisters() {
   1493   LAllocatorPhase phase("L_Allocate general registers", this);
   1494   num_registers_ = Register::NumAllocatableRegisters();
   1495   mode_ = GENERAL_REGISTERS;
   1496   AllocateRegisters();
   1497 }
   1498 
   1499 
   1500 void LAllocator::AllocateDoubleRegisters() {
   1501   LAllocatorPhase phase("L_Allocate double registers", this);
   1502   num_registers_ = DoubleRegister::NumAllocatableRegisters();
   1503   mode_ = DOUBLE_REGISTERS;
   1504   AllocateRegisters();
   1505 }
   1506 
   1507 
   1508 void LAllocator::AllocateRegisters() {
   1509   ASSERT(unhandled_live_ranges_.is_empty());
   1510 
   1511   for (int i = 0; i < live_ranges_.length(); ++i) {
   1512     if (live_ranges_[i] != NULL) {
   1513       if (live_ranges_[i]->Kind() == mode_) {
   1514         AddToUnhandledUnsorted(live_ranges_[i]);
   1515       }
   1516     }
   1517   }
   1518   SortUnhandled();
   1519   ASSERT(UnhandledIsSorted());
   1520 
   1521   ASSERT(reusable_slots_.is_empty());
   1522   ASSERT(active_live_ranges_.is_empty());
   1523   ASSERT(inactive_live_ranges_.is_empty());
   1524 
   1525   if (mode_ == DOUBLE_REGISTERS) {
   1526     for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
   1527       LiveRange* current = fixed_double_live_ranges_.at(i);
   1528       if (current != NULL) {
   1529         AddToInactive(current);
   1530       }
   1531     }
   1532   } else {
   1533     ASSERT(mode_ == GENERAL_REGISTERS);
   1534     for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
   1535       LiveRange* current = fixed_live_ranges_.at(i);
   1536       if (current != NULL) {
   1537         AddToInactive(current);
   1538       }
   1539     }
   1540   }
   1541 
   1542   while (!unhandled_live_ranges_.is_empty()) {
   1543     ASSERT(UnhandledIsSorted());
   1544     LiveRange* current = unhandled_live_ranges_.RemoveLast();
   1545     ASSERT(UnhandledIsSorted());
   1546     LifetimePosition position = current->Start();
   1547 #ifdef DEBUG
   1548     allocation_finger_ = position;
   1549 #endif
   1550     TraceAlloc("Processing interval %d start=%d\n",
   1551                current->id(),
   1552                position.Value());
   1553 
   1554     if (current->HasAllocatedSpillOperand()) {
   1555       TraceAlloc("Live range %d already has a spill operand\n", current->id());
   1556       LifetimePosition next_pos = position;
   1557       if (IsGapAt(next_pos.InstructionIndex())) {
   1558         next_pos = next_pos.NextInstruction();
   1559       }
   1560       UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
   1561       // If the range already has a spill operand and it doesn't need a
   1562       // register immediately, split it and spill the first part of the range.
   1563       if (pos == NULL) {
   1564         Spill(current);
   1565         continue;
   1566       } else if (pos->pos().Value() >
   1567                  current->Start().NextInstruction().Value()) {
   1568         // Do not spill live range eagerly if use position that can benefit from
   1569         // the register is too close to the start of live range.
   1570         SpillBetween(current, current->Start(), pos->pos());
   1571         if (!AllocationOk()) return;
   1572         ASSERT(UnhandledIsSorted());
   1573         continue;
   1574       }
   1575     }
   1576 
   1577     for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1578       LiveRange* cur_active = active_live_ranges_.at(i);
   1579       if (cur_active->End().Value() <= position.Value()) {
   1580         ActiveToHandled(cur_active);
   1581         --i;  // The live range was removed from the list of active live ranges.
   1582       } else if (!cur_active->Covers(position)) {
   1583         ActiveToInactive(cur_active);
   1584         --i;  // The live range was removed from the list of active live ranges.
   1585       }
   1586     }
   1587 
   1588     for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1589       LiveRange* cur_inactive = inactive_live_ranges_.at(i);
   1590       if (cur_inactive->End().Value() <= position.Value()) {
   1591         InactiveToHandled(cur_inactive);
   1592         --i;  // Live range was removed from the list of inactive live ranges.
   1593       } else if (cur_inactive->Covers(position)) {
   1594         InactiveToActive(cur_inactive);
   1595         --i;  // Live range was removed from the list of inactive live ranges.
   1596       }
   1597     }
   1598 
   1599     ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
   1600 
   1601     bool result = TryAllocateFreeReg(current);
   1602     if (!AllocationOk()) return;
   1603 
   1604     if (!result) AllocateBlockedReg(current);
   1605     if (!AllocationOk()) return;
   1606 
   1607     if (current->HasRegisterAssigned()) {
   1608       AddToActive(current);
   1609     }
   1610   }
   1611 
   1612   reusable_slots_.Rewind(0);
   1613   active_live_ranges_.Rewind(0);
   1614   inactive_live_ranges_.Rewind(0);
   1615 }
   1616 
   1617 
   1618 const char* LAllocator::RegisterName(int allocation_index) {
   1619   if (mode_ == GENERAL_REGISTERS) {
   1620     return Register::AllocationIndexToString(allocation_index);
   1621   } else {
   1622     return DoubleRegister::AllocationIndexToString(allocation_index);
   1623   }
   1624 }
   1625 
   1626 
   1627 void LAllocator::TraceAlloc(const char* msg, ...) {
   1628   if (FLAG_trace_alloc) {
   1629     va_list arguments;
   1630     va_start(arguments, msg);
   1631     OS::VPrint(msg, arguments);
   1632     va_end(arguments);
   1633   }
   1634 }
   1635 
   1636 
   1637 bool LAllocator::HasTaggedValue(int virtual_register) const {
   1638   HValue* value = graph_->LookupValue(virtual_register);
   1639   if (value == NULL) return false;
   1640   return value->representation().IsTagged() && !value->type().IsSmi();
   1641 }
   1642 
   1643 
   1644 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
   1645   if (virtual_register < first_artificial_register_) {
   1646     HValue* value = graph_->LookupValue(virtual_register);
   1647     if (value != NULL && value->representation().IsDouble()) {
   1648       return DOUBLE_REGISTERS;
   1649     }
   1650   } else if (double_artificial_registers_.Contains(
   1651       virtual_register - first_artificial_register_)) {
   1652     return DOUBLE_REGISTERS;
   1653   }
   1654 
   1655   return GENERAL_REGISTERS;
   1656 }
   1657 
   1658 
   1659 void LAllocator::AddToActive(LiveRange* range) {
   1660   TraceAlloc("Add live range %d to active\n", range->id());
   1661   active_live_ranges_.Add(range, zone());
   1662 }
   1663 
   1664 
   1665 void LAllocator::AddToInactive(LiveRange* range) {
   1666   TraceAlloc("Add live range %d to inactive\n", range->id());
   1667   inactive_live_ranges_.Add(range, zone());
   1668 }
   1669 
   1670 
   1671 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
   1672   if (range == NULL || range->IsEmpty()) return;
   1673   ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
   1674   ASSERT(allocation_finger_.Value() <= range->Start().Value());
   1675   for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
   1676     LiveRange* cur_range = unhandled_live_ranges_.at(i);
   1677     if (range->ShouldBeAllocatedBefore(cur_range)) {
   1678       TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
   1679       unhandled_live_ranges_.InsertAt(i + 1, range, zone());
   1680       ASSERT(UnhandledIsSorted());
   1681       return;
   1682     }
   1683   }
   1684   TraceAlloc("Add live range %d to unhandled at start\n", range->id());
   1685   unhandled_live_ranges_.InsertAt(0, range, zone());
   1686   ASSERT(UnhandledIsSorted());
   1687 }
   1688 
   1689 
   1690 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
   1691   if (range == NULL || range->IsEmpty()) return;
   1692   ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
   1693   TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
   1694   unhandled_live_ranges_.Add(range, zone());
   1695 }
   1696 
   1697 
   1698 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
   1699   ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
   1700          !(*b)->ShouldBeAllocatedBefore(*a));
   1701   if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
   1702   if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
   1703   return (*a)->id() - (*b)->id();
   1704 }
   1705 
   1706 
   1707 // Sort the unhandled live ranges so that the ranges to be processed first are
   1708 // at the end of the array list.  This is convenient for the register allocation
   1709 // algorithm because it is efficient to remove elements from the end.
   1710 void LAllocator::SortUnhandled() {
   1711   TraceAlloc("Sort unhandled\n");
   1712   unhandled_live_ranges_.Sort(&UnhandledSortHelper);
   1713 }
   1714 
   1715 
   1716 bool LAllocator::UnhandledIsSorted() {
   1717   int len = unhandled_live_ranges_.length();
   1718   for (int i = 1; i < len; i++) {
   1719     LiveRange* a = unhandled_live_ranges_.at(i - 1);
   1720     LiveRange* b = unhandled_live_ranges_.at(i);
   1721     if (a->Start().Value() < b->Start().Value()) return false;
   1722   }
   1723   return true;
   1724 }
   1725 
   1726 
   1727 void LAllocator::FreeSpillSlot(LiveRange* range) {
   1728   // Check that we are the last range.
   1729   if (range->next() != NULL) return;
   1730 
   1731   if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
   1732 
   1733   int index = range->TopLevel()->GetSpillOperand()->index();
   1734   if (index >= 0) {
   1735     reusable_slots_.Add(range, zone());
   1736   }
   1737 }
   1738 
   1739 
   1740 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
   1741   if (reusable_slots_.is_empty()) return NULL;
   1742   if (reusable_slots_.first()->End().Value() >
   1743       range->TopLevel()->Start().Value()) {
   1744     return NULL;
   1745   }
   1746   LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
   1747   reusable_slots_.Remove(0);
   1748   return result;
   1749 }
   1750 
   1751 
   1752 void LAllocator::ActiveToHandled(LiveRange* range) {
   1753   ASSERT(active_live_ranges_.Contains(range));
   1754   active_live_ranges_.RemoveElement(range);
   1755   TraceAlloc("Moving live range %d from active to handled\n", range->id());
   1756   FreeSpillSlot(range);
   1757 }
   1758 
   1759 
   1760 void LAllocator::ActiveToInactive(LiveRange* range) {
   1761   ASSERT(active_live_ranges_.Contains(range));
   1762   active_live_ranges_.RemoveElement(range);
   1763   inactive_live_ranges_.Add(range, zone());
   1764   TraceAlloc("Moving live range %d from active to inactive\n", range->id());
   1765 }
   1766 
   1767 
   1768 void LAllocator::InactiveToHandled(LiveRange* range) {
   1769   ASSERT(inactive_live_ranges_.Contains(range));
   1770   inactive_live_ranges_.RemoveElement(range);
   1771   TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
   1772   FreeSpillSlot(range);
   1773 }
   1774 
   1775 
   1776 void LAllocator::InactiveToActive(LiveRange* range) {
   1777   ASSERT(inactive_live_ranges_.Contains(range));
   1778   inactive_live_ranges_.RemoveElement(range);
   1779   active_live_ranges_.Add(range, zone());
   1780   TraceAlloc("Moving live range %d from inactive to active\n", range->id());
   1781 }
   1782 
   1783 
   1784 // TryAllocateFreeReg and AllocateBlockedReg assume this
   1785 // when allocating local arrays.
   1786 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >=
   1787               Register::kMaxNumAllocatableRegisters);
   1788 
   1789 
   1790 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
   1791   LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters];
   1792 
   1793   for (int i = 0; i < num_registers_; i++) {
   1794     free_until_pos[i] = LifetimePosition::MaxPosition();
   1795   }
   1796 
   1797   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1798     LiveRange* cur_active = active_live_ranges_.at(i);
   1799     free_until_pos[cur_active->assigned_register()] =
   1800         LifetimePosition::FromInstructionIndex(0);
   1801   }
   1802 
   1803   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1804     LiveRange* cur_inactive = inactive_live_ranges_.at(i);
   1805     ASSERT(cur_inactive->End().Value() > current->Start().Value());
   1806     LifetimePosition next_intersection =
   1807         cur_inactive->FirstIntersection(current);
   1808     if (!next_intersection.IsValid()) continue;
   1809     int cur_reg = cur_inactive->assigned_register();
   1810     free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
   1811   }
   1812 
   1813   LOperand* hint = current->FirstHint();
   1814   if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
   1815     int register_index = hint->index();
   1816     TraceAlloc(
   1817         "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
   1818         RegisterName(register_index),
   1819         free_until_pos[register_index].Value(),
   1820         current->id(),
   1821         current->End().Value());
   1822 
   1823     // The desired register is free until the end of the current live range.
   1824     if (free_until_pos[register_index].Value() >= current->End().Value()) {
   1825       TraceAlloc("Assigning preferred reg %s to live range %d\n",
   1826                  RegisterName(register_index),
   1827                  current->id());
   1828       SetLiveRangeAssignedRegister(current, register_index);
   1829       return true;
   1830     }
   1831   }
   1832 
   1833   // Find the register which stays free for the longest time.
   1834   int reg = 0;
   1835   for (int i = 1; i < RegisterCount(); ++i) {
   1836     if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
   1837       reg = i;
   1838     }
   1839   }
   1840 
   1841   LifetimePosition pos = free_until_pos[reg];
   1842 
   1843   if (pos.Value() <= current->Start().Value()) {
   1844     // All registers are blocked.
   1845     return false;
   1846   }
   1847 
   1848   if (pos.Value() < current->End().Value()) {
   1849     // Register reg is available at the range start but becomes blocked before
   1850     // the range end. Split current at position where it becomes blocked.
   1851     LiveRange* tail = SplitRangeAt(current, pos);
   1852     if (!AllocationOk()) return false;
   1853     AddToUnhandledSorted(tail);
   1854   }
   1855 
   1856 
   1857   // Register reg is available at the range start and is free until
   1858   // the range end.
   1859   ASSERT(pos.Value() >= current->End().Value());
   1860   TraceAlloc("Assigning free reg %s to live range %d\n",
   1861              RegisterName(reg),
   1862              current->id());
   1863   SetLiveRangeAssignedRegister(current, reg);
   1864 
   1865   return true;
   1866 }
   1867 
   1868 
   1869 void LAllocator::AllocateBlockedReg(LiveRange* current) {
   1870   UsePosition* register_use = current->NextRegisterPosition(current->Start());
   1871   if (register_use == NULL) {
   1872     // There is no use in the current live range that requires a register.
   1873     // We can just spill it.
   1874     Spill(current);
   1875     return;
   1876   }
   1877 
   1878 
   1879   LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters];
   1880   LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters];
   1881 
   1882   for (int i = 0; i < num_registers_; i++) {
   1883     use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
   1884   }
   1885 
   1886   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1887     LiveRange* range = active_live_ranges_[i];
   1888     int cur_reg = range->assigned_register();
   1889     if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
   1890       block_pos[cur_reg] = use_pos[cur_reg] =
   1891           LifetimePosition::FromInstructionIndex(0);
   1892     } else {
   1893       UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
   1894           current->Start());
   1895       if (next_use == NULL) {
   1896         use_pos[cur_reg] = range->End();
   1897       } else {
   1898         use_pos[cur_reg] = next_use->pos();
   1899       }
   1900     }
   1901   }
   1902 
   1903   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   1904     LiveRange* range = inactive_live_ranges_.at(i);
   1905     ASSERT(range->End().Value() > current->Start().Value());
   1906     LifetimePosition next_intersection = range->FirstIntersection(current);
   1907     if (!next_intersection.IsValid()) continue;
   1908     int cur_reg = range->assigned_register();
   1909     if (range->IsFixed()) {
   1910       block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
   1911       use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
   1912     } else {
   1913       use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
   1914     }
   1915   }
   1916 
   1917   int reg = 0;
   1918   for (int i = 1; i < RegisterCount(); ++i) {
   1919     if (use_pos[i].Value() > use_pos[reg].Value()) {
   1920       reg = i;
   1921     }
   1922   }
   1923 
   1924   LifetimePosition pos = use_pos[reg];
   1925 
   1926   if (pos.Value() < register_use->pos().Value()) {
   1927     // All registers are blocked before the first use that requires a register.
   1928     // Spill starting part of live range up to that use.
   1929     SpillBetween(current, current->Start(), register_use->pos());
   1930     return;
   1931   }
   1932 
   1933   if (block_pos[reg].Value() < current->End().Value()) {
   1934     // Register becomes blocked before the current range end. Split before that
   1935     // position.
   1936     LiveRange* tail = SplitBetween(current,
   1937                                    current->Start(),
   1938                                    block_pos[reg].InstructionStart());
   1939     if (!AllocationOk()) return;
   1940     AddToUnhandledSorted(tail);
   1941   }
   1942 
   1943   // Register reg is not blocked for the whole range.
   1944   ASSERT(block_pos[reg].Value() >= current->End().Value());
   1945   TraceAlloc("Assigning blocked reg %s to live range %d\n",
   1946              RegisterName(reg),
   1947              current->id());
   1948   SetLiveRangeAssignedRegister(current, reg);
   1949 
   1950   // This register was not free. Thus we need to find and spill
   1951   // parts of active and inactive live regions that use the same register
   1952   // at the same lifetime positions as current.
   1953   SplitAndSpillIntersecting(current);
   1954 }
   1955 
   1956 
   1957 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
   1958                                                     LifetimePosition pos) {
   1959   HBasicBlock* block = GetBlock(pos.InstructionStart());
   1960   HBasicBlock* loop_header =
   1961       block->IsLoopHeader() ? block : block->parent_loop_header();
   1962 
   1963   if (loop_header == NULL) return pos;
   1964 
   1965   UsePosition* prev_use =
   1966     range->PreviousUsePositionRegisterIsBeneficial(pos);
   1967 
   1968   while (loop_header != NULL) {
   1969     // We are going to spill live range inside the loop.
   1970     // If possible try to move spilling position backwards to loop header.
   1971     // This will reduce number of memory moves on the back edge.
   1972     LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
   1973         loop_header->first_instruction_index());
   1974 
   1975     if (range->Covers(loop_start)) {
   1976       if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
   1977         // No register beneficial use inside the loop before the pos.
   1978         pos = loop_start;
   1979       }
   1980     }
   1981 
   1982     // Try hoisting out to an outer loop.
   1983     loop_header = loop_header->parent_loop_header();
   1984   }
   1985 
   1986   return pos;
   1987 }
   1988 
   1989 
   1990 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
   1991   ASSERT(current->HasRegisterAssigned());
   1992   int reg = current->assigned_register();
   1993   LifetimePosition split_pos = current->Start();
   1994   for (int i = 0; i < active_live_ranges_.length(); ++i) {
   1995     LiveRange* range = active_live_ranges_[i];
   1996     if (range->assigned_register() == reg) {
   1997       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
   1998       LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
   1999       if (next_pos == NULL) {
   2000         SpillAfter(range, spill_pos);
   2001       } else {
   2002         // When spilling between spill_pos and next_pos ensure that the range
   2003         // remains spilled at least until the start of the current live range.
   2004         // This guarantees that we will not introduce new unhandled ranges that
   2005         // start before the current range as this violates allocation invariant
   2006         // and will lead to an inconsistent state of active and inactive
   2007         // live-ranges: ranges are allocated in order of their start positions,
   2008         // ranges are retired from active/inactive when the start of the
   2009         // current live-range is larger than their end.
   2010         SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
   2011       }
   2012       if (!AllocationOk()) return;
   2013       ActiveToHandled(range);
   2014       --i;
   2015     }
   2016   }
   2017 
   2018   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
   2019     LiveRange* range = inactive_live_ranges_[i];
   2020     ASSERT(range->End().Value() > current->Start().Value());
   2021     if (range->assigned_register() == reg && !range->IsFixed()) {
   2022       LifetimePosition next_intersection = range->FirstIntersection(current);
   2023       if (next_intersection.IsValid()) {
   2024         UsePosition* next_pos = range->NextRegisterPosition(current->Start());
   2025         if (next_pos == NULL) {
   2026           SpillAfter(range, split_pos);
   2027         } else {
   2028           next_intersection = Min(next_intersection, next_pos->pos());
   2029           SpillBetween(range, split_pos, next_intersection);
   2030         }
   2031         if (!AllocationOk()) return;
   2032         InactiveToHandled(range);
   2033         --i;
   2034       }
   2035     }
   2036   }
   2037 }
   2038 
   2039 
   2040 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
   2041   return pos.IsInstructionStart() &&
   2042       InstructionAt(pos.InstructionIndex())->IsLabel();
   2043 }
   2044 
   2045 
   2046 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
   2047   ASSERT(!range->IsFixed());
   2048   TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
   2049 
   2050   if (pos.Value() <= range->Start().Value()) return range;
   2051 
   2052   // We can't properly connect liveranges if split occured at the end
   2053   // of control instruction.
   2054   ASSERT(pos.IsInstructionStart() ||
   2055          !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
   2056 
   2057   int vreg = GetVirtualRegister();
   2058   if (!AllocationOk()) return NULL;
   2059   LiveRange* result = LiveRangeFor(vreg);
   2060   range->SplitAt(pos, result, zone());
   2061   return result;
   2062 }
   2063 
   2064 
   2065 LiveRange* LAllocator::SplitBetween(LiveRange* range,
   2066                                     LifetimePosition start,
   2067                                     LifetimePosition end) {
   2068   ASSERT(!range->IsFixed());
   2069   TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
   2070              range->id(),
   2071              start.Value(),
   2072              end.Value());
   2073 
   2074   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
   2075   ASSERT(split_pos.Value() >= start.Value());
   2076   return SplitRangeAt(range, split_pos);
   2077 }
   2078 
   2079 
   2080 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
   2081                                                  LifetimePosition end) {
   2082   int start_instr = start.InstructionIndex();
   2083   int end_instr = end.InstructionIndex();
   2084   ASSERT(start_instr <= end_instr);
   2085 
   2086   // We have no choice
   2087   if (start_instr == end_instr) return end;
   2088 
   2089   HBasicBlock* start_block = GetBlock(start);
   2090   HBasicBlock* end_block = GetBlock(end);
   2091 
   2092   if (end_block == start_block) {
   2093     // The interval is split in the same basic block. Split at the latest
   2094     // possible position.
   2095     return end;
   2096   }
   2097 
   2098   HBasicBlock* block = end_block;
   2099   // Find header of outermost loop.
   2100   while (block->parent_loop_header() != NULL &&
   2101       block->parent_loop_header()->block_id() > start_block->block_id()) {
   2102     block = block->parent_loop_header();
   2103   }
   2104 
   2105   // We did not find any suitable outer loop. Split at the latest possible
   2106   // position unless end_block is a loop header itself.
   2107   if (block == end_block && !end_block->IsLoopHeader()) return end;
   2108 
   2109   return LifetimePosition::FromInstructionIndex(
   2110       block->first_instruction_index());
   2111 }
   2112 
   2113 
   2114 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
   2115   LiveRange* second_part = SplitRangeAt(range, pos);
   2116   if (!AllocationOk()) return;
   2117   Spill(second_part);
   2118 }
   2119 
   2120 
   2121 void LAllocator::SpillBetween(LiveRange* range,
   2122                               LifetimePosition start,
   2123                               LifetimePosition end) {
   2124   SpillBetweenUntil(range, start, start, end);
   2125 }
   2126 
   2127 
   2128 void LAllocator::SpillBetweenUntil(LiveRange* range,
   2129                                    LifetimePosition start,
   2130                                    LifetimePosition until,
   2131                                    LifetimePosition end) {
   2132   CHECK(start.Value() < end.Value());
   2133   LiveRange* second_part = SplitRangeAt(range, start);
   2134   if (!AllocationOk()) return;
   2135 
   2136   if (second_part->Start().Value() < end.Value()) {
   2137     // The split result intersects with [start, end[.
   2138     // Split it at position between ]start+1, end[, spill the middle part
   2139     // and put the rest to unhandled.
   2140     LiveRange* third_part = SplitBetween(
   2141         second_part,
   2142         Max(second_part->Start().InstructionEnd(), until),
   2143         end.PrevInstruction().InstructionEnd());
   2144     if (!AllocationOk()) return;
   2145 
   2146     ASSERT(third_part != second_part);
   2147 
   2148     Spill(second_part);
   2149     AddToUnhandledSorted(third_part);
   2150   } else {
   2151     // The split result does not intersect with [start, end[.
   2152     // Nothing to spill. Just put it to unhandled as whole.
   2153     AddToUnhandledSorted(second_part);
   2154   }
   2155 }
   2156 
   2157 
   2158 void LAllocator::Spill(LiveRange* range) {
   2159   ASSERT(!range->IsSpilled());
   2160   TraceAlloc("Spilling live range %d\n", range->id());
   2161   LiveRange* first = range->TopLevel();
   2162 
   2163   if (!first->HasAllocatedSpillOperand()) {
   2164     LOperand* op = TryReuseSpillSlot(range);
   2165     if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind());
   2166     first->SetSpillOperand(op);
   2167   }
   2168   range->MakeSpilled(chunk()->zone());
   2169 }
   2170 
   2171 
   2172 int LAllocator::RegisterCount() const {
   2173   return num_registers_;
   2174 }
   2175 
   2176 
   2177 #ifdef DEBUG
   2178 
   2179 
   2180 void LAllocator::Verify() const {
   2181   for (int i = 0; i < live_ranges()->length(); ++i) {
   2182     LiveRange* current = live_ranges()->at(i);
   2183     if (current != NULL) current->Verify();
   2184   }
   2185 }
   2186 
   2187 
   2188 #endif
   2189 
   2190 
   2191 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
   2192     : CompilationPhase(name, allocator->graph()->info()),
   2193       allocator_(allocator) {
   2194   if (FLAG_hydrogen_stats) {
   2195     allocator_zone_start_allocation_size_ =
   2196         allocator->zone()->allocation_size();
   2197   }
   2198 }
   2199 
   2200 
   2201 LAllocatorPhase::~LAllocatorPhase() {
   2202   if (FLAG_hydrogen_stats) {
   2203     unsigned size = allocator_->zone()->allocation_size() -
   2204                     allocator_zone_start_allocation_size_;
   2205     isolate()->GetHStatistics()->SaveTiming(name(), TimeDelta(), size);
   2206   }
   2207 
   2208   if (ShouldProduceTraceOutput()) {
   2209     isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
   2210     isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
   2211   }
   2212 
   2213 #ifdef DEBUG
   2214   if (allocator_ != NULL) allocator_->Verify();
   2215 #endif
   2216 }
   2217 
   2218 
   2219 } }  // namespace v8::internal
   2220