Home | History | Annotate | Download | only in compiler
      1 // Copyright 2014 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_REGISTER_ALLOCATOR_H_
      6 #define V8_REGISTER_ALLOCATOR_H_
      7 
      8 #include "src/base/compiler-specific.h"
      9 #include "src/compiler/instruction.h"
     10 #include "src/globals.h"
     11 #include "src/ostreams.h"
     12 #include "src/register-configuration.h"
     13 #include "src/zone/zone-containers.h"
     14 
     15 namespace v8 {
     16 namespace internal {
     17 namespace compiler {
     18 
     19 enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
     20 
     21 // This class represents a single point of a InstructionOperand's lifetime. For
     22 // each instruction there are four lifetime positions:
     23 //
     24 //   [[START, END], [START, END]]
     25 //
     26 // Where the first half position corresponds to
     27 //
     28 //  [GapPosition::START, GapPosition::END]
     29 //
     30 // and the second half position corresponds to
     31 //
     32 //  [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
     33 //
     34 class LifetimePosition final {
     35  public:
     36   // Return the lifetime position that corresponds to the beginning of
     37   // the gap with the given index.
     38   static LifetimePosition GapFromInstructionIndex(int index) {
     39     return LifetimePosition(index * kStep);
     40   }
     41   // Return the lifetime position that corresponds to the beginning of
     42   // the instruction with the given index.
     43   static LifetimePosition InstructionFromInstructionIndex(int index) {
     44     return LifetimePosition(index * kStep + kHalfStep);
     45   }
     46 
     47   static bool ExistsGapPositionBetween(LifetimePosition pos1,
     48                                        LifetimePosition pos2) {
     49     if (pos1 > pos2) std::swap(pos1, pos2);
     50     LifetimePosition next(pos1.value_ + 1);
     51     if (next.IsGapPosition()) return next < pos2;
     52     return next.NextFullStart() < pos2;
     53   }
     54 
     55   // Returns a numeric representation of this lifetime position.
     56   int value() const { return value_; }
     57 
     58   // Returns the index of the instruction to which this lifetime position
     59   // corresponds.
     60   int ToInstructionIndex() const {
     61     DCHECK(IsValid());
     62     return value_ / kStep;
     63   }
     64 
     65   // Returns true if this lifetime position corresponds to a START value
     66   bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
     67   // Returns true if this lifetime position corresponds to an END value
     68   bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
     69   // Returns true if this lifetime position corresponds to a gap START value
     70   bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
     71 
     72   bool IsGapPosition() const { return (value_ & 0x2) == 0; }
     73   bool IsInstructionPosition() const { return !IsGapPosition(); }
     74 
     75   // Returns the lifetime position for the current START.
     76   LifetimePosition Start() const {
     77     DCHECK(IsValid());
     78     return LifetimePosition(value_ & ~(kHalfStep - 1));
     79   }
     80 
     81   // Returns the lifetime position for the current gap START.
     82   LifetimePosition FullStart() const {
     83     DCHECK(IsValid());
     84     return LifetimePosition(value_ & ~(kStep - 1));
     85   }
     86 
     87   // Returns the lifetime position for the current END.
     88   LifetimePosition End() const {
     89     DCHECK(IsValid());
     90     return LifetimePosition(Start().value_ + kHalfStep / 2);
     91   }
     92 
     93   // Returns the lifetime position for the beginning of the next START.
     94   LifetimePosition NextStart() const {
     95     DCHECK(IsValid());
     96     return LifetimePosition(Start().value_ + kHalfStep);
     97   }
     98 
     99   // Returns the lifetime position for the beginning of the next gap START.
    100   LifetimePosition NextFullStart() const {
    101     DCHECK(IsValid());
    102     return LifetimePosition(FullStart().value_ + kStep);
    103   }
    104 
    105   // Returns the lifetime position for the beginning of the previous START.
    106   LifetimePosition PrevStart() const {
    107     DCHECK(IsValid());
    108     DCHECK(value_ >= kHalfStep);
    109     return LifetimePosition(Start().value_ - kHalfStep);
    110   }
    111 
    112   // Constructs the lifetime position which does not correspond to any
    113   // instruction.
    114   LifetimePosition() : value_(-1) {}
    115 
    116   // Returns true if this lifetime positions corrensponds to some
    117   // instruction.
    118   bool IsValid() const { return value_ != -1; }
    119 
    120   bool operator<(const LifetimePosition& that) const {
    121     return this->value_ < that.value_;
    122   }
    123 
    124   bool operator<=(const LifetimePosition& that) const {
    125     return this->value_ <= that.value_;
    126   }
    127 
    128   bool operator==(const LifetimePosition& that) const {
    129     return this->value_ == that.value_;
    130   }
    131 
    132   bool operator!=(const LifetimePosition& that) const {
    133     return this->value_ != that.value_;
    134   }
    135 
    136   bool operator>(const LifetimePosition& that) const {
    137     return this->value_ > that.value_;
    138   }
    139 
    140   bool operator>=(const LifetimePosition& that) const {
    141     return this->value_ >= that.value_;
    142   }
    143 
    144   void Print() const;
    145 
    146   static inline LifetimePosition Invalid() { return LifetimePosition(); }
    147 
    148   static inline LifetimePosition MaxPosition() {
    149     // We have to use this kind of getter instead of static member due to
    150     // crash bug in GDB.
    151     return LifetimePosition(kMaxInt);
    152   }
    153 
    154   static inline LifetimePosition FromInt(int value) {
    155     return LifetimePosition(value);
    156   }
    157 
    158  private:
    159   static const int kHalfStep = 2;
    160   static const int kStep = 2 * kHalfStep;
    161 
    162   // Code relies on kStep and kHalfStep being a power of two.
    163   STATIC_ASSERT(IS_POWER_OF_TWO(kHalfStep));
    164 
    165   explicit LifetimePosition(int value) : value_(value) {}
    166 
    167   int value_;
    168 };
    169 
    170 
    171 std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
    172 
    173 
    174 // Representation of the non-empty interval [start,end[.
    175 class UseInterval final : public ZoneObject {
    176  public:
    177   UseInterval(LifetimePosition start, LifetimePosition end)
    178       : start_(start), end_(end), next_(nullptr) {
    179     DCHECK(start < end);
    180   }
    181 
    182   LifetimePosition start() const { return start_; }
    183   void set_start(LifetimePosition start) { start_ = start; }
    184   LifetimePosition end() const { return end_; }
    185   void set_end(LifetimePosition end) { end_ = end; }
    186   UseInterval* next() const { return next_; }
    187   void set_next(UseInterval* next) { next_ = next; }
    188 
    189   // Split this interval at the given position without effecting the
    190   // live range that owns it. The interval must contain the position.
    191   UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
    192 
    193   // If this interval intersects with other return smallest position
    194   // that belongs to both of them.
    195   LifetimePosition Intersect(const UseInterval* other) const {
    196     if (other->start() < start_) return other->Intersect(this);
    197     if (other->start() < end_) return other->start();
    198     return LifetimePosition::Invalid();
    199   }
    200 
    201   bool Contains(LifetimePosition point) const {
    202     return start_ <= point && point < end_;
    203   }
    204 
    205   // Returns the index of the first gap covered by this interval.
    206   int FirstGapIndex() const {
    207     int ret = start_.ToInstructionIndex();
    208     if (start_.IsInstructionPosition()) {
    209       ++ret;
    210     }
    211     return ret;
    212   }
    213 
    214   // Returns the index of the last gap covered by this interval.
    215   int LastGapIndex() const {
    216     int ret = end_.ToInstructionIndex();
    217     if (end_.IsGapPosition() && end_.IsStart()) {
    218       --ret;
    219     }
    220     return ret;
    221   }
    222 
    223  private:
    224   LifetimePosition start_;
    225   LifetimePosition end_;
    226   UseInterval* next_;
    227 
    228   DISALLOW_COPY_AND_ASSIGN(UseInterval);
    229 };
    230 
    231 
    232 enum class UsePositionType : uint8_t { kAny, kRequiresRegister, kRequiresSlot };
    233 
    234 
    235 enum class UsePositionHintType : uint8_t {
    236   kNone,
    237   kOperand,
    238   kUsePos,
    239   kPhi,
    240   kUnresolved
    241 };
    242 
    243 
    244 static const int32_t kUnassignedRegister =
    245     RegisterConfiguration::kMaxGeneralRegisters;
    246 
    247 static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxFPRegisters,
    248               "kUnassignedRegister too small");
    249 
    250 // Representation of a use position.
    251 class V8_EXPORT_PRIVATE UsePosition final
    252     : public NON_EXPORTED_BASE(ZoneObject) {
    253  public:
    254   UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
    255               UsePositionHintType hint_type);
    256 
    257   InstructionOperand* operand() const { return operand_; }
    258   bool HasOperand() const { return operand_ != nullptr; }
    259 
    260   bool RegisterIsBeneficial() const {
    261     return RegisterBeneficialField::decode(flags_);
    262   }
    263   UsePositionType type() const { return TypeField::decode(flags_); }
    264   void set_type(UsePositionType type, bool register_beneficial);
    265 
    266   LifetimePosition pos() const { return pos_; }
    267 
    268   UsePosition* next() const { return next_; }
    269   void set_next(UsePosition* next) { next_ = next; }
    270 
    271   // For hinting only.
    272   void set_assigned_register(int register_code) {
    273     flags_ = AssignedRegisterField::update(flags_, register_code);
    274   }
    275 
    276   UsePositionHintType hint_type() const {
    277     return HintTypeField::decode(flags_);
    278   }
    279   bool HasHint() const;
    280   bool HintRegister(int* register_code) const;
    281   void SetHint(UsePosition* use_pos);
    282   void ResolveHint(UsePosition* use_pos);
    283   bool IsResolved() const {
    284     return hint_type() != UsePositionHintType::kUnresolved;
    285   }
    286   static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
    287 
    288  private:
    289   typedef BitField<UsePositionType, 0, 2> TypeField;
    290   typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
    291   typedef BitField<bool, 5, 1> RegisterBeneficialField;
    292   typedef BitField<int32_t, 6, 6> AssignedRegisterField;
    293 
    294   InstructionOperand* const operand_;
    295   void* hint_;
    296   UsePosition* next_;
    297   LifetimePosition const pos_;
    298   uint32_t flags_;
    299 
    300   DISALLOW_COPY_AND_ASSIGN(UsePosition);
    301 };
    302 
    303 
    304 class SpillRange;
    305 class RegisterAllocationData;
    306 class TopLevelLiveRange;
    307 class LiveRangeGroup;
    308 
    309 // Representation of SSA values' live ranges as a collection of (continuous)
    310 // intervals over the instruction ordering.
    311 class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
    312  public:
    313   UseInterval* first_interval() const { return first_interval_; }
    314   UsePosition* first_pos() const { return first_pos_; }
    315   TopLevelLiveRange* TopLevel() { return top_level_; }
    316   const TopLevelLiveRange* TopLevel() const { return top_level_; }
    317 
    318   bool IsTopLevel() const;
    319 
    320   LiveRange* next() const { return next_; }
    321 
    322   int relative_id() const { return relative_id_; }
    323 
    324   bool IsEmpty() const { return first_interval() == nullptr; }
    325 
    326   InstructionOperand GetAssignedOperand() const;
    327 
    328   MachineRepresentation representation() const {
    329     return RepresentationField::decode(bits_);
    330   }
    331 
    332   int assigned_register() const { return AssignedRegisterField::decode(bits_); }
    333   bool HasRegisterAssigned() const {
    334     return assigned_register() != kUnassignedRegister;
    335   }
    336   void set_assigned_register(int reg);
    337   void UnsetAssignedRegister();
    338 
    339   bool spilled() const { return SpilledField::decode(bits_); }
    340   void Spill();
    341 
    342   RegisterKind kind() const;
    343 
    344   // Returns use position in this live range that follows both start
    345   // and last processed use position.
    346   UsePosition* NextUsePosition(LifetimePosition start) const;
    347 
    348   // Returns use position for which register is required in this live
    349   // range and which follows both start and last processed use position
    350   UsePosition* NextRegisterPosition(LifetimePosition start) const;
    351 
    352   // Returns the first use position requiring stack slot, or nullptr.
    353   UsePosition* NextSlotPosition(LifetimePosition start) const;
    354 
    355   // Returns use position for which register is beneficial in this live
    356   // range and which follows both start and last processed use position
    357   UsePosition* NextUsePositionRegisterIsBeneficial(
    358       LifetimePosition start) const;
    359 
    360   // Returns lifetime position for which register is beneficial in this live
    361   // range and which follows both start and last processed use position.
    362   LifetimePosition NextLifetimePositionRegisterIsBeneficial(
    363       const LifetimePosition& start) const;
    364 
    365   // Returns use position for which register is beneficial in this live
    366   // range and which precedes start.
    367   UsePosition* PreviousUsePositionRegisterIsBeneficial(
    368       LifetimePosition start) const;
    369 
    370   // Can this live range be spilled at this position.
    371   bool CanBeSpilled(LifetimePosition pos) const;
    372 
    373   // Splitting primitive used by both splitting and splintering members.
    374   // Performs the split, but does not link the resulting ranges.
    375   // The given position must follow the start of the range.
    376   // All uses following the given position will be moved from this
    377   // live range to the result live range.
    378   // The current range will terminate at position, while result will start from
    379   // position.
    380   enum HintConnectionOption : bool {
    381     DoNotConnectHints = false,
    382     ConnectHints = true
    383   };
    384   UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
    385                         Zone* zone, HintConnectionOption connect_hints);
    386 
    387   // Detaches at position, and then links the resulting ranges. Returns the
    388   // child, which starts at position.
    389   LiveRange* SplitAt(LifetimePosition position, Zone* zone);
    390 
    391   // Returns nullptr when no register is hinted, otherwise sets register_index.
    392   UsePosition* FirstHintPosition(int* register_index) const;
    393   UsePosition* FirstHintPosition() const {
    394     int register_index;
    395     return FirstHintPosition(&register_index);
    396   }
    397 
    398   UsePosition* current_hint_position() const {
    399     DCHECK(current_hint_position_ == FirstHintPosition());
    400     return current_hint_position_;
    401   }
    402 
    403   LifetimePosition Start() const {
    404     DCHECK(!IsEmpty());
    405     return first_interval()->start();
    406   }
    407 
    408   LifetimePosition End() const {
    409     DCHECK(!IsEmpty());
    410     return last_interval_->end();
    411   }
    412 
    413   bool ShouldBeAllocatedBefore(const LiveRange* other) const;
    414   bool CanCover(LifetimePosition position) const;
    415   bool Covers(LifetimePosition position) const;
    416   LifetimePosition FirstIntersection(LiveRange* other) const;
    417 
    418   void VerifyChildStructure() const {
    419     VerifyIntervals();
    420     VerifyPositions();
    421   }
    422 
    423   void ConvertUsesToOperand(const InstructionOperand& op,
    424                             const InstructionOperand& spill_op);
    425   void SetUseHints(int register_index);
    426   void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
    427 
    428   void Print(const RegisterConfiguration* config, bool with_children) const;
    429   void Print(bool with_children) const;
    430 
    431  private:
    432   friend class TopLevelLiveRange;
    433   explicit LiveRange(int relative_id, MachineRepresentation rep,
    434                      TopLevelLiveRange* top_level);
    435 
    436   void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
    437 
    438   void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
    439 
    440   UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
    441   void AdvanceLastProcessedMarker(UseInterval* to_start_of,
    442                                   LifetimePosition but_not_past) const;
    443 
    444   void VerifyPositions() const;
    445   void VerifyIntervals() const;
    446 
    447   typedef BitField<bool, 0, 1> SpilledField;
    448   typedef BitField<int32_t, 6, 6> AssignedRegisterField;
    449   typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
    450 
    451   // Unique among children and splinters of the same virtual register.
    452   int relative_id_;
    453   uint32_t bits_;
    454   UseInterval* last_interval_;
    455   UseInterval* first_interval_;
    456   UsePosition* first_pos_;
    457   TopLevelLiveRange* top_level_;
    458   LiveRange* next_;
    459   // This is used as a cache, it doesn't affect correctness.
    460   mutable UseInterval* current_interval_;
    461   // This is used as a cache, it doesn't affect correctness.
    462   mutable UsePosition* last_processed_use_;
    463   // This is used as a cache, it's invalid outside of BuildLiveRanges.
    464   mutable UsePosition* current_hint_position_;
    465   // Cache the last position splintering stopped at.
    466   mutable UsePosition* splitting_pointer_;
    467 
    468   DISALLOW_COPY_AND_ASSIGN(LiveRange);
    469 };
    470 
    471 
    472 class LiveRangeGroup final : public ZoneObject {
    473  public:
    474   explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {}
    475   ZoneVector<LiveRange*>& ranges() { return ranges_; }
    476   const ZoneVector<LiveRange*>& ranges() const { return ranges_; }
    477 
    478   int assigned_register() const { return assigned_register_; }
    479   void set_assigned_register(int reg) { assigned_register_ = reg; }
    480 
    481  private:
    482   ZoneVector<LiveRange*> ranges_;
    483   int assigned_register_;
    484   DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup);
    485 };
    486 
    487 class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
    488  public:
    489   explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
    490   int spill_start_index() const { return spill_start_index_; }
    491 
    492   bool IsFixed() const { return vreg_ < 0; }
    493 
    494   bool is_phi() const { return IsPhiField::decode(bits_); }
    495   void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
    496 
    497   bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
    498   void set_is_non_loop_phi(bool value) {
    499     bits_ = IsNonLoopPhiField::update(bits_, value);
    500   }
    501 
    502   bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
    503   void set_has_slot_use(bool value) {
    504     bits_ = HasSlotUseField::update(bits_, value);
    505   }
    506 
    507   // Add a new interval or a new use position to this live range.
    508   void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
    509   void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
    510   void AddUsePosition(UsePosition* pos);
    511 
    512   // Shorten the most recently added interval by setting a new start.
    513   void ShortenTo(LifetimePosition start);
    514 
    515   // Detaches between start and end, and attributes the resulting range to
    516   // result.
    517   // The current range is pointed to as "splintered_from". No parent/child
    518   // relationship is established between this and result.
    519   void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
    520 
    521   // Assuming other was splintered from this range, embeds other and its
    522   // children as part of the children sequence of this range.
    523   void Merge(TopLevelLiveRange* other, Zone* zone);
    524 
    525   // Spill range management.
    526   void SetSpillRange(SpillRange* spill_range);
    527   enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
    528   void set_spill_type(SpillType value) {
    529     bits_ = SpillTypeField::update(bits_, value);
    530   }
    531   SpillType spill_type() const { return SpillTypeField::decode(bits_); }
    532   InstructionOperand* GetSpillOperand() const {
    533     DCHECK(spill_type() == SpillType::kSpillOperand);
    534     return spill_operand_;
    535   }
    536 
    537   SpillRange* GetAllocatedSpillRange() const {
    538     DCHECK(spill_type() != SpillType::kSpillOperand);
    539     return spill_range_;
    540   }
    541 
    542   SpillRange* GetSpillRange() const {
    543     DCHECK(spill_type() == SpillType::kSpillRange);
    544     return spill_range_;
    545   }
    546   bool HasNoSpillType() const {
    547     return spill_type() == SpillType::kNoSpillType;
    548   }
    549   bool HasSpillOperand() const {
    550     return spill_type() == SpillType::kSpillOperand;
    551   }
    552   bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
    553 
    554   AllocatedOperand GetSpillRangeOperand() const;
    555 
    556   void RecordSpillLocation(Zone* zone, int gap_index,
    557                            InstructionOperand* operand);
    558   void SetSpillOperand(InstructionOperand* operand);
    559   void SetSpillStartIndex(int start) {
    560     spill_start_index_ = Min(start, spill_start_index_);
    561   }
    562 
    563   void CommitSpillMoves(InstructionSequence* sequence,
    564                         const InstructionOperand& operand,
    565                         bool might_be_duplicated);
    566 
    567   // If all the children of this range are spilled in deferred blocks, and if
    568   // for any non-spilled child with a use position requiring a slot, that range
    569   // is contained in a deferred block, mark the range as
    570   // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
    571   // and instead let the LiveRangeConnector perform the spills within the
    572   // deferred blocks. If so, we insert here spills for non-spilled ranges
    573   // with slot use positions.
    574   void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
    575     spill_start_index_ = -1;
    576     spilled_in_deferred_blocks_ = true;
    577     spill_move_insertion_locations_ = nullptr;
    578     list_of_blocks_requiring_spill_operands_ =
    579         new (zone) BitVector(total_block_count, zone);
    580   }
    581 
    582   void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
    583                                    const InstructionOperand& spill_operand,
    584                                    BitVector* necessary_spill_points);
    585 
    586   TopLevelLiveRange* splintered_from() const { return splintered_from_; }
    587   bool IsSplinter() const { return splintered_from_ != nullptr; }
    588   bool MayRequireSpillRange() const {
    589     DCHECK(!IsSplinter());
    590     return !HasSpillOperand() && spill_range_ == nullptr;
    591   }
    592   void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
    593   int vreg() const { return vreg_; }
    594 
    595 #if DEBUG
    596   int debug_virt_reg() const;
    597 #endif
    598 
    599   void Verify() const;
    600   void VerifyChildrenInOrder() const;
    601 
    602   int GetNextChildId() {
    603     return IsSplinter() ? splintered_from()->GetNextChildId()
    604                         : ++last_child_id_;
    605   }
    606 
    607   int GetChildCount() const { return last_child_id_ + 1; }
    608 
    609   bool IsSpilledOnlyInDeferredBlocks() const {
    610     return spilled_in_deferred_blocks_;
    611   }
    612 
    613   struct SpillMoveInsertionList;
    614 
    615   SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
    616     DCHECK(!IsSpilledOnlyInDeferredBlocks());
    617     return spill_move_insertion_locations_;
    618   }
    619   TopLevelLiveRange* splinter() const { return splinter_; }
    620   void SetSplinter(TopLevelLiveRange* splinter) {
    621     DCHECK_NULL(splinter_);
    622     DCHECK_NOT_NULL(splinter);
    623 
    624     splinter_ = splinter;
    625     splinter->relative_id_ = GetNextChildId();
    626     splinter->set_spill_type(spill_type());
    627     splinter->SetSplinteredFrom(this);
    628   }
    629 
    630   void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
    631   bool has_preassigned_slot() const { return has_preassigned_slot_; }
    632 
    633   void AddBlockRequiringSpillOperand(RpoNumber block_id) {
    634     DCHECK(IsSpilledOnlyInDeferredBlocks());
    635     GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
    636   }
    637 
    638   BitVector* GetListOfBlocksRequiringSpillOperands() const {
    639     DCHECK(IsSpilledOnlyInDeferredBlocks());
    640     return list_of_blocks_requiring_spill_operands_;
    641   }
    642 
    643  private:
    644   void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
    645 
    646   typedef BitField<bool, 1, 1> HasSlotUseField;
    647   typedef BitField<bool, 2, 1> IsPhiField;
    648   typedef BitField<bool, 3, 1> IsNonLoopPhiField;
    649   typedef BitField<SpillType, 4, 2> SpillTypeField;
    650 
    651   int vreg_;
    652   int last_child_id_;
    653   TopLevelLiveRange* splintered_from_;
    654   union {
    655     // Correct value determined by spill_type()
    656     InstructionOperand* spill_operand_;
    657     SpillRange* spill_range_;
    658   };
    659 
    660   union {
    661     SpillMoveInsertionList* spill_move_insertion_locations_;
    662     BitVector* list_of_blocks_requiring_spill_operands_;
    663   };
    664 
    665   // TODO(mtrofin): generalize spilling after definition, currently specialized
    666   // just for spill in a single deferred block.
    667   bool spilled_in_deferred_blocks_;
    668   int spill_start_index_;
    669   UsePosition* last_pos_;
    670   TopLevelLiveRange* splinter_;
    671   bool has_preassigned_slot_;
    672 
    673   DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
    674 };
    675 
    676 
    677 struct PrintableLiveRange {
    678   const RegisterConfiguration* register_configuration_;
    679   const LiveRange* range_;
    680 };
    681 
    682 
    683 std::ostream& operator<<(std::ostream& os,
    684                          const PrintableLiveRange& printable_range);
    685 
    686 
    687 class SpillRange final : public ZoneObject {
    688  public:
    689   static const int kUnassignedSlot = -1;
    690   SpillRange(TopLevelLiveRange* range, Zone* zone);
    691 
    692   UseInterval* interval() const { return use_interval_; }
    693 
    694   bool IsEmpty() const { return live_ranges_.empty(); }
    695   bool TryMerge(SpillRange* other);
    696   bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
    697 
    698   void set_assigned_slot(int index) {
    699     DCHECK_EQ(kUnassignedSlot, assigned_slot_);
    700     assigned_slot_ = index;
    701   }
    702   int assigned_slot() {
    703     DCHECK_NE(kUnassignedSlot, assigned_slot_);
    704     return assigned_slot_;
    705   }
    706   const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
    707     return live_ranges_;
    708   }
    709   ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
    710   // Spill slots can be 4, 8, or 16 bytes wide.
    711   int byte_width() const { return byte_width_; }
    712   void Print() const;
    713 
    714  private:
    715   LifetimePosition End() const { return end_position_; }
    716   bool IsIntersectingWith(SpillRange* other) const;
    717   // Merge intervals, making sure the use intervals are sorted
    718   void MergeDisjointIntervals(UseInterval* other);
    719 
    720   ZoneVector<TopLevelLiveRange*> live_ranges_;
    721   UseInterval* use_interval_;
    722   LifetimePosition end_position_;
    723   int assigned_slot_;
    724   int byte_width_;
    725 
    726   DISALLOW_COPY_AND_ASSIGN(SpillRange);
    727 };
    728 
    729 
    730 class RegisterAllocationData final : public ZoneObject {
    731  public:
    732   class PhiMapValue : public ZoneObject {
    733    public:
    734     PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
    735 
    736     const PhiInstruction* phi() const { return phi_; }
    737     const InstructionBlock* block() const { return block_; }
    738 
    739     // For hinting.
    740     int assigned_register() const { return assigned_register_; }
    741     void set_assigned_register(int register_code) {
    742       DCHECK_EQ(assigned_register_, kUnassignedRegister);
    743       assigned_register_ = register_code;
    744     }
    745     void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
    746 
    747     void AddOperand(InstructionOperand* operand);
    748     void CommitAssignment(const InstructionOperand& operand);
    749 
    750    private:
    751     PhiInstruction* const phi_;
    752     const InstructionBlock* const block_;
    753     ZoneVector<InstructionOperand*> incoming_operands_;
    754     int assigned_register_;
    755   };
    756   typedef ZoneMap<int, PhiMapValue*> PhiMap;
    757 
    758   struct DelayedReference {
    759     ReferenceMap* map;
    760     InstructionOperand* operand;
    761   };
    762   typedef ZoneVector<DelayedReference> DelayedReferences;
    763   typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
    764       RangesWithPreassignedSlots;
    765 
    766   RegisterAllocationData(const RegisterConfiguration* config,
    767                          Zone* allocation_zone, Frame* frame,
    768                          InstructionSequence* code,
    769                          const char* debug_name = nullptr);
    770 
    771   const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
    772     return live_ranges_;
    773   }
    774   ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
    775   const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
    776     return fixed_live_ranges_;
    777   }
    778   ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
    779     return fixed_live_ranges_;
    780   }
    781   ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
    782     return fixed_float_live_ranges_;
    783   }
    784   const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
    785     return fixed_float_live_ranges_;
    786   }
    787   ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
    788     return fixed_double_live_ranges_;
    789   }
    790   const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
    791     return fixed_double_live_ranges_;
    792   }
    793   ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
    794     return fixed_simd128_live_ranges_;
    795   }
    796   const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
    797     return fixed_simd128_live_ranges_;
    798   }
    799   ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
    800   ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
    801   ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
    802   DelayedReferences& delayed_references() { return delayed_references_; }
    803   InstructionSequence* code() const { return code_; }
    804   // This zone is for data structures only needed during register allocation
    805   // phases.
    806   Zone* allocation_zone() const { return allocation_zone_; }
    807   // This zone is for InstructionOperands and moves that live beyond register
    808   // allocation.
    809   Zone* code_zone() const { return code()->zone(); }
    810   Frame* frame() const { return frame_; }
    811   const char* debug_name() const { return debug_name_; }
    812   const RegisterConfiguration* config() const { return config_; }
    813 
    814   MachineRepresentation RepresentationFor(int virtual_register);
    815 
    816   TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
    817   // Creates a new live range.
    818   TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
    819   TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
    820 
    821   SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
    822   SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
    823 
    824   MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
    825                            const InstructionOperand& from,
    826                            const InstructionOperand& to);
    827 
    828   bool IsReference(TopLevelLiveRange* top_range) const {
    829     return code()->IsReference(top_range->vreg());
    830   }
    831 
    832   bool ExistsUseWithoutDefinition();
    833   bool RangesDefinedInDeferredStayInDeferred();
    834 
    835   void MarkAllocated(MachineRepresentation rep, int index);
    836 
    837   PhiMapValue* InitializePhiMap(const InstructionBlock* block,
    838                                 PhiInstruction* phi);
    839   PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
    840   PhiMapValue* GetPhiMapValueFor(int virtual_register);
    841   bool IsBlockBoundary(LifetimePosition pos) const;
    842 
    843   RangesWithPreassignedSlots& preassigned_slot_ranges() {
    844     return preassigned_slot_ranges_;
    845   }
    846 
    847  private:
    848   int GetNextLiveRangeId();
    849 
    850   Zone* const allocation_zone_;
    851   Frame* const frame_;
    852   InstructionSequence* const code_;
    853   const char* const debug_name_;
    854   const RegisterConfiguration* const config_;
    855   PhiMap phi_map_;
    856   ZoneVector<BitVector*> live_in_sets_;
    857   ZoneVector<BitVector*> live_out_sets_;
    858   ZoneVector<TopLevelLiveRange*> live_ranges_;
    859   ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
    860   ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
    861   ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
    862   ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
    863   ZoneVector<SpillRange*> spill_ranges_;
    864   DelayedReferences delayed_references_;
    865   BitVector* assigned_registers_;
    866   BitVector* assigned_double_registers_;
    867   int virtual_register_count_;
    868   RangesWithPreassignedSlots preassigned_slot_ranges_;
    869 
    870   DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
    871 };
    872 
    873 
    874 class ConstraintBuilder final : public ZoneObject {
    875  public:
    876   explicit ConstraintBuilder(RegisterAllocationData* data);
    877 
    878   // Phase 1 : insert moves to account for fixed register operands.
    879   void MeetRegisterConstraints();
    880 
    881   // Phase 2: deconstruct SSA by inserting moves in successors and the headers
    882   // of blocks containing phis.
    883   void ResolvePhis();
    884 
    885  private:
    886   RegisterAllocationData* data() const { return data_; }
    887   InstructionSequence* code() const { return data()->code(); }
    888   Zone* allocation_zone() const { return data()->allocation_zone(); }
    889 
    890   InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
    891                                     bool is_tagged);
    892   void MeetRegisterConstraints(const InstructionBlock* block);
    893   void MeetConstraintsBefore(int index);
    894   void MeetConstraintsAfter(int index);
    895   void MeetRegisterConstraintsForLastInstructionInBlock(
    896       const InstructionBlock* block);
    897   void ResolvePhis(const InstructionBlock* block);
    898 
    899   RegisterAllocationData* const data_;
    900 
    901   DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
    902 };
    903 
    904 
    905 class LiveRangeBuilder final : public ZoneObject {
    906  public:
    907   explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
    908 
    909   // Phase 3: compute liveness of all virtual register.
    910   void BuildLiveRanges();
    911   static BitVector* ComputeLiveOut(const InstructionBlock* block,
    912                                    RegisterAllocationData* data);
    913 
    914  private:
    915   RegisterAllocationData* data() const { return data_; }
    916   InstructionSequence* code() const { return data()->code(); }
    917   Zone* allocation_zone() const { return data()->allocation_zone(); }
    918   Zone* code_zone() const { return code()->zone(); }
    919   const RegisterConfiguration* config() const { return data()->config(); }
    920   ZoneVector<BitVector*>& live_in_sets() const {
    921     return data()->live_in_sets();
    922   }
    923 
    924   // Verification.
    925   void Verify() const;
    926   bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
    927   bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
    928                                           const TopLevelLiveRange* range) const;
    929   bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
    930 
    931   // Liveness analysis support.
    932   void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
    933   void ProcessInstructions(const InstructionBlock* block, BitVector* live);
    934   void ProcessPhis(const InstructionBlock* block, BitVector* live);
    935   void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
    936 
    937   static int FixedLiveRangeID(int index) { return -index - 1; }
    938   int FixedFPLiveRangeID(int index, MachineRepresentation rep);
    939   TopLevelLiveRange* FixedLiveRangeFor(int index);
    940   TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep);
    941 
    942   void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
    943   void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
    944 
    945   UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
    946                               void* hint, UsePositionHintType hint_type);
    947   UsePosition* NewUsePosition(LifetimePosition pos) {
    948     return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
    949   }
    950   TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
    951   // Helper methods for building intervals.
    952   UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
    953                       void* hint, UsePositionHintType hint_type);
    954   void Define(LifetimePosition position, InstructionOperand* operand) {
    955     Define(position, operand, nullptr, UsePositionHintType::kNone);
    956   }
    957   UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
    958                    InstructionOperand* operand, void* hint,
    959                    UsePositionHintType hint_type);
    960   void Use(LifetimePosition block_start, LifetimePosition position,
    961            InstructionOperand* operand) {
    962     Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
    963   }
    964 
    965   RegisterAllocationData* const data_;
    966   ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
    967 
    968   DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
    969 };
    970 
    971 
    972 class RegisterAllocator : public ZoneObject {
    973  public:
    974   RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
    975 
    976  protected:
    977   RegisterAllocationData* data() const { return data_; }
    978   InstructionSequence* code() const { return data()->code(); }
    979   RegisterKind mode() const { return mode_; }
    980   int num_registers() const { return num_registers_; }
    981   int num_allocatable_registers() const { return num_allocatable_registers_; }
    982   const int* allocatable_register_codes() const {
    983     return allocatable_register_codes_;
    984   }
    985   // Returns true iff. we must check float register aliasing.
    986   bool check_fp_aliasing() const { return check_fp_aliasing_; }
    987 
    988   // TODO(mtrofin): explain why splitting in gap START is always OK.
    989   LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
    990                                                   int instruction_index);
    991 
    992   Zone* allocation_zone() const { return data()->allocation_zone(); }
    993 
    994   // Find the optimal split for ranges defined by a memory operand, e.g.
    995   // constants or function parameters passed on the stack.
    996   void SplitAndSpillRangesDefinedByMemoryOperand();
    997 
    998   // Split the given range at the given position.
    999   // If range starts at or after the given position then the
   1000   // original range is returned.
   1001   // Otherwise returns the live range that starts at pos and contains
   1002   // all uses from the original range that follow pos. Uses at pos will
   1003   // still be owned by the original range after splitting.
   1004   LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
   1005 
   1006   bool CanProcessRange(LiveRange* range) const {
   1007     return range != nullptr && !range->IsEmpty() && range->kind() == mode();
   1008   }
   1009 
   1010 
   1011   // Split the given range in a position from the interval [start, end].
   1012   LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
   1013                           LifetimePosition end);
   1014 
   1015   // Find a lifetime position in the interval [start, end] which
   1016   // is optimal for splitting: it is either header of the outermost
   1017   // loop covered by this interval or the latest possible position.
   1018   LifetimePosition FindOptimalSplitPos(LifetimePosition start,
   1019                                        LifetimePosition end);
   1020 
   1021   void Spill(LiveRange* range);
   1022 
   1023   // If we are trying to spill a range inside the loop try to
   1024   // hoist spill position out to the point just before the loop.
   1025   LifetimePosition FindOptimalSpillingPos(LiveRange* range,
   1026                                           LifetimePosition pos);
   1027 
   1028   const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
   1029   const char* RegisterName(int allocation_index) const;
   1030 
   1031  private:
   1032   RegisterAllocationData* const data_;
   1033   const RegisterKind mode_;
   1034   const int num_registers_;
   1035   int num_allocatable_registers_;
   1036   const int* allocatable_register_codes_;
   1037   bool check_fp_aliasing_;
   1038 
   1039  private:
   1040   bool no_combining_;
   1041 
   1042   DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
   1043 };
   1044 
   1045 
   1046 class LinearScanAllocator final : public RegisterAllocator {
   1047  public:
   1048   LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
   1049                       Zone* local_zone);
   1050 
   1051   // Phase 4: compute register assignments.
   1052   void AllocateRegisters();
   1053 
   1054  private:
   1055   ZoneVector<LiveRange*>& unhandled_live_ranges() {
   1056     return unhandled_live_ranges_;
   1057   }
   1058   ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
   1059   ZoneVector<LiveRange*>& inactive_live_ranges() {
   1060     return inactive_live_ranges_;
   1061   }
   1062 
   1063   void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
   1064 
   1065   // Helper methods for updating the life range lists.
   1066   void AddToActive(LiveRange* range);
   1067   void AddToInactive(LiveRange* range);
   1068   void AddToUnhandledSorted(LiveRange* range);
   1069   void AddToUnhandledUnsorted(LiveRange* range);
   1070   void SortUnhandled();
   1071   bool UnhandledIsSorted();
   1072   void ActiveToHandled(LiveRange* range);
   1073   void ActiveToInactive(LiveRange* range);
   1074   void InactiveToHandled(LiveRange* range);
   1075   void InactiveToActive(LiveRange* range);
   1076 
   1077   // Helper methods for allocating registers.
   1078   bool TryReuseSpillForPhi(TopLevelLiveRange* range);
   1079   bool TryAllocateFreeReg(LiveRange* range,
   1080                           const Vector<LifetimePosition>& free_until_pos);
   1081   bool TryAllocatePreferredReg(LiveRange* range,
   1082                                const Vector<LifetimePosition>& free_until_pos);
   1083   void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
   1084                         int* num_codes, const int** codes) const;
   1085   void FindFreeRegistersForRange(LiveRange* range,
   1086                                  Vector<LifetimePosition> free_until_pos);
   1087   void ProcessCurrentRange(LiveRange* current);
   1088   void AllocateBlockedReg(LiveRange* range);
   1089   bool TrySplitAndSpillSplinter(LiveRange* range);
   1090 
   1091   // Spill the given life range after position pos.
   1092   void SpillAfter(LiveRange* range, LifetimePosition pos);
   1093 
   1094   // Spill the given life range after position [start] and up to position [end].
   1095   void SpillBetween(LiveRange* range, LifetimePosition start,
   1096                     LifetimePosition end);
   1097 
   1098   // Spill the given life range after position [start] and up to position [end].
   1099   // Range is guaranteed to be spilled at least until position [until].
   1100   void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
   1101                          LifetimePosition until, LifetimePosition end);
   1102 
   1103   void SplitAndSpillIntersecting(LiveRange* range);
   1104 
   1105   ZoneVector<LiveRange*> unhandled_live_ranges_;
   1106   ZoneVector<LiveRange*> active_live_ranges_;
   1107   ZoneVector<LiveRange*> inactive_live_ranges_;
   1108 
   1109 #ifdef DEBUG
   1110   LifetimePosition allocation_finger_;
   1111 #endif
   1112 
   1113   DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
   1114 };
   1115 
   1116 
   1117 class SpillSlotLocator final : public ZoneObject {
   1118  public:
   1119   explicit SpillSlotLocator(RegisterAllocationData* data);
   1120 
   1121   void LocateSpillSlots();
   1122 
   1123  private:
   1124   RegisterAllocationData* data() const { return data_; }
   1125 
   1126   RegisterAllocationData* const data_;
   1127 
   1128   DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
   1129 };
   1130 
   1131 
   1132 class OperandAssigner final : public ZoneObject {
   1133  public:
   1134   explicit OperandAssigner(RegisterAllocationData* data);
   1135 
   1136   // Phase 5: assign spill splots.
   1137   void AssignSpillSlots();
   1138 
   1139   // Phase 6: commit assignment.
   1140   void CommitAssignment();
   1141 
   1142  private:
   1143   RegisterAllocationData* data() const { return data_; }
   1144 
   1145   RegisterAllocationData* const data_;
   1146 
   1147   DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
   1148 };
   1149 
   1150 
   1151 class ReferenceMapPopulator final : public ZoneObject {
   1152  public:
   1153   explicit ReferenceMapPopulator(RegisterAllocationData* data);
   1154 
   1155   // Phase 7: compute values for pointer maps.
   1156   void PopulateReferenceMaps();
   1157 
   1158  private:
   1159   RegisterAllocationData* data() const { return data_; }
   1160 
   1161   bool SafePointsAreInOrder() const;
   1162 
   1163   RegisterAllocationData* const data_;
   1164 
   1165   DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
   1166 };
   1167 
   1168 
   1169 class LiveRangeBoundArray;
   1170 // Insert moves of the form
   1171 //
   1172 //          Operand(child_(k+1)) = Operand(child_k)
   1173 //
   1174 // where child_k and child_(k+1) are consecutive children of a range (so
   1175 // child_k->next() == child_(k+1)), and Operand(...) refers to the
   1176 // assigned operand, be it a register or a slot.
   1177 class LiveRangeConnector final : public ZoneObject {
   1178  public:
   1179   explicit LiveRangeConnector(RegisterAllocationData* data);
   1180 
   1181   // Phase 8: reconnect split ranges with moves, when the control flow
   1182   // between the ranges is trivial (no branches).
   1183   void ConnectRanges(Zone* local_zone);
   1184 
   1185   // Phase 9: insert moves to connect ranges across basic blocks, when the
   1186   // control flow between them cannot be trivially resolved, such as joining
   1187   // branches.
   1188   void ResolveControlFlow(Zone* local_zone);
   1189 
   1190  private:
   1191   RegisterAllocationData* data() const { return data_; }
   1192   InstructionSequence* code() const { return data()->code(); }
   1193   Zone* code_zone() const { return code()->zone(); }
   1194 
   1195   bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
   1196 
   1197   int ResolveControlFlow(const InstructionBlock* block,
   1198                          const InstructionOperand& cur_op,
   1199                          const InstructionBlock* pred,
   1200                          const InstructionOperand& pred_op);
   1201 
   1202   void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
   1203                                     LiveRangeBoundArray* array,
   1204                                     Zone* temp_zone);
   1205 
   1206   RegisterAllocationData* const data_;
   1207 
   1208   DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
   1209 };
   1210 
   1211 }  // namespace compiler
   1212 }  // namespace internal
   1213 }  // namespace v8
   1214 
   1215 #endif  // V8_REGISTER_ALLOCATOR_H_
   1216