Home | History | Annotate | Download | only in optimizing
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
     18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
     19 
     20 #include <iostream>
     21 
     22 #include "nodes.h"
     23 
     24 namespace art {
     25 
     26 class CodeGenerator;
     27 class SsaLivenessAnalysis;
     28 
     29 static constexpr int kNoRegister = -1;
     30 
     31 class BlockInfo : public ArenaObject<kArenaAllocSsaLiveness> {
     32  public:
     33   BlockInfo(ArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values)
     34       : block_(block),
     35         live_in_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
     36         live_out_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
     37         kill_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness) {
     38     UNUSED(block_);
     39     live_in_.ClearAllBits();
     40     live_out_.ClearAllBits();
     41     kill_.ClearAllBits();
     42   }
     43 
     44  private:
     45   const HBasicBlock& block_;
     46   ArenaBitVector live_in_;
     47   ArenaBitVector live_out_;
     48   ArenaBitVector kill_;
     49 
     50   friend class SsaLivenessAnalysis;
     51 
     52   DISALLOW_COPY_AND_ASSIGN(BlockInfo);
     53 };
     54 
     55 /**
     56  * A live range contains the start and end of a range where an instruction or a temporary
     57  * is live.
     58  */
     59 class LiveRange FINAL : public ArenaObject<kArenaAllocSsaLiveness> {
     60  public:
     61   LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) {
     62     DCHECK_LT(start, end);
     63     DCHECK(next_ == nullptr || next_->GetStart() > GetEnd());
     64   }
     65 
     66   size_t GetStart() const { return start_; }
     67   size_t GetEnd() const { return end_; }
     68   LiveRange* GetNext() const { return next_; }
     69 
     70   bool IntersectsWith(const LiveRange& other) const {
     71     return (start_ >= other.start_ && start_ < other.end_)
     72         || (other.start_ >= start_ && other.start_ < end_);
     73   }
     74 
     75   bool IsBefore(const LiveRange& other) const {
     76     return end_ <= other.start_;
     77   }
     78 
     79   void Dump(std::ostream& stream) const {
     80     stream << "[" << start_ << "," << end_ << ")";
     81   }
     82 
     83   LiveRange* Dup(ArenaAllocator* allocator) const {
     84     return new (allocator) LiveRange(
     85         start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator));
     86   }
     87 
     88   LiveRange* GetLastRange() {
     89     return next_ == nullptr ? this : next_->GetLastRange();
     90   }
     91 
     92  private:
     93   size_t start_;
     94   size_t end_;
     95   LiveRange* next_;
     96 
     97   friend class LiveInterval;
     98 
     99   DISALLOW_COPY_AND_ASSIGN(LiveRange);
    100 };
    101 
    102 /**
    103  * A use position represents a live interval use at a given position.
    104  */
    105 class UsePosition : public ArenaObject<kArenaAllocSsaLiveness> {
    106  public:
    107   UsePosition(HInstruction* user, size_t input_index, size_t position, UsePosition* next)
    108       : user_(user),
    109         input_index_(input_index),
    110         position_(position),
    111         next_(next) {
    112     DCHECK(next_ == nullptr || next->GetPosition() >= GetPosition());
    113   }
    114 
    115   explicit UsePosition(size_t position)
    116       : user_(nullptr),
    117         input_index_(kNoInput),
    118         position_(dchecked_integral_cast<uint32_t>(position)),
    119         next_(nullptr) {
    120   }
    121 
    122   size_t GetPosition() const { return position_; }
    123 
    124   UsePosition* GetNext() const { return next_; }
    125   void SetNext(UsePosition* next) { next_ = next; }
    126 
    127   HInstruction* GetUser() const { return user_; }
    128 
    129   bool IsSynthesized() const { return user_ == nullptr; }
    130 
    131   size_t GetInputIndex() const { return input_index_; }
    132 
    133   void Dump(std::ostream& stream) const {
    134     stream << position_;
    135   }
    136 
    137   HLoopInformation* GetLoopInformation() const {
    138     return user_->GetBlock()->GetLoopInformation();
    139   }
    140 
    141   UsePosition* Dup(ArenaAllocator* allocator) const {
    142     return new (allocator) UsePosition(
    143         user_, input_index_, position_,
    144         next_ == nullptr ? nullptr : next_->Dup(allocator));
    145   }
    146 
    147   bool RequiresRegister() const {
    148     if (IsSynthesized()) return false;
    149     Location location = GetUser()->GetLocations()->InAt(GetInputIndex());
    150     return location.IsUnallocated() && location.RequiresRegisterKind();
    151   }
    152 
    153  private:
    154   static constexpr uint32_t kNoInput = static_cast<uint32_t>(-1);
    155 
    156   HInstruction* const user_;
    157   const size_t input_index_;
    158   const size_t position_;
    159   UsePosition* next_;
    160 
    161   DISALLOW_COPY_AND_ASSIGN(UsePosition);
    162 };
    163 
    164 /**
    165  * An environment use position represents a live interval for environment use at a given position.
    166  */
    167 class EnvUsePosition : public ArenaObject<kArenaAllocSsaLiveness> {
    168  public:
    169   EnvUsePosition(HEnvironment* environment,
    170                  size_t input_index,
    171                  size_t position,
    172                  EnvUsePosition* next)
    173       : environment_(environment),
    174         input_index_(input_index),
    175         position_(position),
    176         next_(next) {
    177     DCHECK(environment != nullptr);
    178     DCHECK(next_ == nullptr || next->GetPosition() >= GetPosition());
    179   }
    180 
    181   size_t GetPosition() const { return position_; }
    182 
    183   EnvUsePosition* GetNext() const { return next_; }
    184   void SetNext(EnvUsePosition* next) { next_ = next; }
    185 
    186   HEnvironment* GetEnvironment() const { return environment_; }
    187   size_t GetInputIndex() const { return input_index_; }
    188 
    189   void Dump(std::ostream& stream) const {
    190     stream << position_;
    191   }
    192 
    193   EnvUsePosition* Dup(ArenaAllocator* allocator) const {
    194     return new (allocator) EnvUsePosition(
    195         environment_, input_index_, position_,
    196         next_ == nullptr ? nullptr : next_->Dup(allocator));
    197   }
    198 
    199  private:
    200   HEnvironment* const environment_;
    201   const size_t input_index_;
    202   const size_t position_;
    203   EnvUsePosition* next_;
    204 
    205   DISALLOW_COPY_AND_ASSIGN(EnvUsePosition);
    206 };
    207 
    208 class SafepointPosition : public ArenaObject<kArenaAllocSsaLiveness> {
    209  public:
    210   explicit SafepointPosition(HInstruction* instruction)
    211       : instruction_(instruction),
    212         next_(nullptr) {}
    213 
    214   void SetNext(SafepointPosition* next) {
    215     next_ = next;
    216   }
    217 
    218   size_t GetPosition() const {
    219     return instruction_->GetLifetimePosition();
    220   }
    221 
    222   SafepointPosition* GetNext() const {
    223     return next_;
    224   }
    225 
    226   LocationSummary* GetLocations() const {
    227     return instruction_->GetLocations();
    228   }
    229 
    230   HInstruction* GetInstruction() const {
    231     return instruction_;
    232   }
    233 
    234  private:
    235   HInstruction* const instruction_;
    236   SafepointPosition* next_;
    237 
    238   DISALLOW_COPY_AND_ASSIGN(SafepointPosition);
    239 };
    240 
    241 /**
    242  * An interval is a list of disjoint live ranges where an instruction is live.
    243  * Each instruction that has uses gets an interval.
    244  */
    245 class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> {
    246  public:
    247   static LiveInterval* MakeInterval(ArenaAllocator* allocator,
    248                                     Primitive::Type type,
    249                                     HInstruction* instruction = nullptr) {
    250     return new (allocator) LiveInterval(allocator, type, instruction);
    251   }
    252 
    253   static LiveInterval* MakeFixedInterval(ArenaAllocator* allocator, int reg, Primitive::Type type) {
    254     return new (allocator) LiveInterval(allocator, type, nullptr, true, reg, false);
    255   }
    256 
    257   static LiveInterval* MakeTempInterval(ArenaAllocator* allocator, Primitive::Type type) {
    258     return new (allocator) LiveInterval(allocator, type, nullptr, false, kNoRegister, true);
    259   }
    260 
    261   bool IsFixed() const { return is_fixed_; }
    262   bool IsTemp() const { return is_temp_; }
    263   // This interval is the result of a split.
    264   bool IsSplit() const { return parent_ != this; }
    265 
    266   void AddTempUse(HInstruction* instruction, size_t temp_index) {
    267     DCHECK(IsTemp());
    268     DCHECK(first_use_ == nullptr) << "A temporary can only have one user";
    269     DCHECK(first_env_use_ == nullptr) << "A temporary cannot have environment user";
    270     size_t position = instruction->GetLifetimePosition();
    271     first_use_ = new (allocator_) UsePosition(
    272         instruction, temp_index, position, first_use_);
    273     AddRange(position, position + 1);
    274   }
    275 
    276   // Record use of an input. The use will be recorded as an environment use if
    277   // `environment` is not null and as register use otherwise. If `actual_user`
    278   // is specified, the use will be recorded at `actual_user`'s lifetime position.
    279   void AddUse(HInstruction* instruction,
    280               HEnvironment* environment,
    281               size_t input_index,
    282               HInstruction* actual_user = nullptr,
    283               bool keep_alive = false) {
    284     bool is_environment = (environment != nullptr);
    285     LocationSummary* locations = instruction->GetLocations();
    286     if (actual_user == nullptr) {
    287       actual_user = instruction;
    288     }
    289 
    290     // Set the use within the instruction.
    291     size_t position = actual_user->GetLifetimePosition() + 1;
    292     if (!is_environment) {
    293       if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) {
    294         // For fixed inputs and output same as input, the register allocator
    295         // requires to have inputs die at the instruction, so that input moves use the
    296         // location of the input just before that instruction (and not potential moves due
    297         // to splitting).
    298         DCHECK_EQ(instruction, actual_user);
    299         position = actual_user->GetLifetimePosition();
    300       } else if (!locations->InAt(input_index).IsValid()) {
    301         return;
    302       }
    303     }
    304 
    305     if (!is_environment && instruction->IsInLoop()) {
    306       AddBackEdgeUses(*instruction->GetBlock());
    307     }
    308 
    309     if ((first_use_ != nullptr)
    310         && (first_use_->GetUser() == actual_user)
    311         && (first_use_->GetPosition() < position)) {
    312       // The user uses the instruction multiple times, and one use dies before the other.
    313       // We update the use list so that the latter is first.
    314       DCHECK(!is_environment);
    315       UsePosition* cursor = first_use_;
    316       while ((cursor->GetNext() != nullptr) && (cursor->GetNext()->GetPosition() < position)) {
    317         cursor = cursor->GetNext();
    318       }
    319       DCHECK(first_use_->GetPosition() + 1 == position);
    320       UsePosition* new_use = new (allocator_) UsePosition(
    321           instruction, input_index, position, cursor->GetNext());
    322       cursor->SetNext(new_use);
    323       if (first_range_->GetEnd() == first_use_->GetPosition()) {
    324         first_range_->end_ = position;
    325       }
    326       return;
    327     }
    328 
    329     if (is_environment) {
    330       first_env_use_ = new (allocator_) EnvUsePosition(
    331           environment, input_index, position, first_env_use_);
    332     } else {
    333       first_use_ = new (allocator_) UsePosition(
    334           instruction, input_index, position, first_use_);
    335     }
    336 
    337     if (is_environment && !keep_alive) {
    338       // If this environment use does not keep the instruction live, it does not
    339       // affect the live range of that instruction.
    340       return;
    341     }
    342 
    343     size_t start_block_position = instruction->GetBlock()->GetLifetimeStart();
    344     if (first_range_ == nullptr) {
    345       // First time we see a use of that interval.
    346       first_range_ = last_range_ = range_search_start_ =
    347           new (allocator_) LiveRange(start_block_position, position, nullptr);
    348     } else if (first_range_->GetStart() == start_block_position) {
    349       // There is a use later in the same block or in a following block.
    350       // Note that in such a case, `AddRange` for the whole blocks has been called
    351       // before arriving in this method, and this is the reason the start of
    352       // `first_range_` is before the given `position`.
    353       DCHECK_LE(position, first_range_->GetEnd());
    354     } else {
    355       DCHECK(first_range_->GetStart() > position);
    356       // There is a hole in the interval. Create a new range.
    357       // Note that the start of `first_range_` can be equal to `end`: two blocks
    358       // having adjacent lifetime positions are not necessarily
    359       // predecessor/successor. When two blocks are predecessor/successor, the
    360       // liveness algorithm has called `AddRange` before arriving in this method,
    361       // and the check line 205 would succeed.
    362       first_range_ = range_search_start_ =
    363           new (allocator_) LiveRange(start_block_position, position, first_range_);
    364     }
    365   }
    366 
    367   void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) {
    368     DCHECK(instruction->IsPhi());
    369     if (block->IsInLoop()) {
    370       AddBackEdgeUses(*block);
    371     }
    372     first_use_ = new (allocator_) UsePosition(
    373         instruction, input_index, block->GetLifetimeEnd(), first_use_);
    374   }
    375 
    376   ALWAYS_INLINE void AddRange(size_t start, size_t end) {
    377     if (first_range_ == nullptr) {
    378       first_range_ = last_range_ = range_search_start_ =
    379           new (allocator_) LiveRange(start, end, first_range_);
    380     } else if (first_range_->GetStart() == end) {
    381       // There is a use in the following block.
    382       first_range_->start_ = start;
    383     } else if (first_range_->GetStart() == start && first_range_->GetEnd() == end) {
    384       DCHECK(is_fixed_);
    385     } else {
    386       DCHECK_GT(first_range_->GetStart(), end);
    387       // There is a hole in the interval. Create a new range.
    388       first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_);
    389     }
    390   }
    391 
    392   void AddLoopRange(size_t start, size_t end) {
    393     DCHECK(first_range_ != nullptr);
    394     DCHECK_LE(start, first_range_->GetStart());
    395     // Find the range that covers the positions after the loop.
    396     LiveRange* after_loop = first_range_;
    397     LiveRange* last_in_loop = nullptr;
    398     while (after_loop != nullptr && after_loop->GetEnd() < end) {
    399       DCHECK_LE(start, after_loop->GetStart());
    400       last_in_loop = after_loop;
    401       after_loop = after_loop->GetNext();
    402     }
    403     if (after_loop == nullptr) {
    404       // Uses are only in the loop.
    405       first_range_ = last_range_ = range_search_start_ =
    406           new (allocator_) LiveRange(start, end, nullptr);
    407     } else if (after_loop->GetStart() <= end) {
    408       first_range_ = range_search_start_ = after_loop;
    409       // There are uses after the loop.
    410       first_range_->start_ = start;
    411     } else {
    412       // The use after the loop is after a lifetime hole.
    413       DCHECK(last_in_loop != nullptr);
    414       first_range_ = range_search_start_ = last_in_loop;
    415       first_range_->start_ = start;
    416       first_range_->end_ = end;
    417     }
    418   }
    419 
    420   bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; }
    421   void SetSpillSlot(int slot) {
    422     DCHECK(!is_fixed_);
    423     DCHECK(!is_temp_);
    424     spill_slot_ = slot;
    425   }
    426   int GetSpillSlot() const { return spill_slot_; }
    427 
    428   void SetFrom(size_t from) {
    429     if (first_range_ != nullptr) {
    430       first_range_->start_ = from;
    431     } else {
    432       // Instruction without uses.
    433       DCHECK(first_use_ == nullptr);
    434       DCHECK(from == defined_by_->GetLifetimePosition());
    435       first_range_ = last_range_ = range_search_start_ =
    436           new (allocator_) LiveRange(from, from + 2, nullptr);
    437     }
    438   }
    439 
    440   LiveInterval* GetParent() const { return parent_; }
    441 
    442   // Returns whether this interval is the parent interval, that is, the interval
    443   // that starts where the HInstruction is defined.
    444   bool IsParent() const { return parent_ == this; }
    445 
    446   LiveRange* GetFirstRange() const { return first_range_; }
    447   LiveRange* GetLastRange() const { return last_range_; }
    448 
    449   int GetRegister() const { return register_; }
    450   void SetRegister(int reg) { register_ = reg; }
    451   void ClearRegister() { register_ = kNoRegister; }
    452   bool HasRegister() const { return register_ != kNoRegister; }
    453 
    454   bool IsDeadAt(size_t position) const {
    455     return GetEnd() <= position;
    456   }
    457 
    458   bool IsDefinedAt(size_t position) const {
    459     return GetStart() <= position && !IsDeadAt(position);
    460   }
    461 
    462   // Returns true if the interval contains a LiveRange covering `position`.
    463   // The range at or immediately after the current position of linear scan
    464   // is cached for better performance. If `position` can be smaller than
    465   // that, CoversSlow should be used instead.
    466   bool Covers(size_t position) {
    467     LiveRange* candidate = FindRangeAtOrAfter(position, range_search_start_);
    468     range_search_start_ = candidate;
    469     return (candidate != nullptr && candidate->GetStart() <= position);
    470   }
    471 
    472   // Same as Covers but always tests all ranges.
    473   bool CoversSlow(size_t position) const {
    474     LiveRange* candidate = FindRangeAtOrAfter(position, first_range_);
    475     return candidate != nullptr && candidate->GetStart() <= position;
    476   }
    477 
    478   // Returns the first intersection of this interval with `current`, which
    479   // must be the interval currently being allocated by linear scan.
    480   size_t FirstIntersectionWith(LiveInterval* current) const {
    481     // Find the first range after the start of `current`. We use the search
    482     // cache to improve performance.
    483     DCHECK(GetStart() <= current->GetStart() || IsFixed());
    484     LiveRange* other_range = current->first_range_;
    485     LiveRange* my_range = FindRangeAtOrAfter(other_range->GetStart(), range_search_start_);
    486     if (my_range == nullptr) {
    487       return kNoLifetime;
    488     }
    489 
    490     // Advance both intervals and find the first matching range start in
    491     // this interval.
    492     do {
    493       if (my_range->IsBefore(*other_range)) {
    494         my_range = my_range->GetNext();
    495         if (my_range == nullptr) {
    496           return kNoLifetime;
    497         }
    498       } else if (other_range->IsBefore(*my_range)) {
    499         other_range = other_range->GetNext();
    500         if (other_range == nullptr) {
    501           return kNoLifetime;
    502         }
    503       } else {
    504         DCHECK(my_range->IntersectsWith(*other_range));
    505         return std::max(my_range->GetStart(), other_range->GetStart());
    506       }
    507     } while (true);
    508   }
    509 
    510   size_t GetStart() const {
    511     return first_range_->GetStart();
    512   }
    513 
    514   size_t GetEnd() const {
    515     return last_range_->GetEnd();
    516   }
    517 
    518   size_t GetLength() const {
    519     return GetEnd() - GetStart();
    520   }
    521 
    522   size_t FirstRegisterUseAfter(size_t position) const {
    523     if (is_temp_) {
    524       return position == GetStart() ? position : kNoLifetime;
    525     }
    526 
    527     if (IsDefiningPosition(position) && DefinitionRequiresRegister()) {
    528       return position;
    529     }
    530 
    531     UsePosition* use = first_use_;
    532     size_t end = GetEnd();
    533     while (use != nullptr && use->GetPosition() <= end) {
    534       size_t use_position = use->GetPosition();
    535       if (use_position > position) {
    536         if (use->RequiresRegister()) {
    537           return use_position;
    538         }
    539       }
    540       use = use->GetNext();
    541     }
    542     return kNoLifetime;
    543   }
    544 
    545   // Returns the location of the first register use for this live interval,
    546   // including a register definition if applicable.
    547   size_t FirstRegisterUse() const {
    548     return FirstRegisterUseAfter(GetStart());
    549   }
    550 
    551   // Whether the interval requires a register rather than a stack location.
    552   // If needed for performance, this could be cached.
    553   bool RequiresRegister() const {
    554     return !HasRegister() && FirstRegisterUse() != kNoLifetime;
    555   }
    556 
    557   size_t FirstUseAfter(size_t position) const {
    558     if (is_temp_) {
    559       return position == GetStart() ? position : kNoLifetime;
    560     }
    561 
    562     if (IsDefiningPosition(position)) {
    563       DCHECK(defined_by_->GetLocations()->Out().IsValid());
    564       return position;
    565     }
    566 
    567     UsePosition* use = first_use_;
    568     size_t end = GetEnd();
    569     while (use != nullptr && use->GetPosition() <= end) {
    570       size_t use_position = use->GetPosition();
    571       if (use_position > position) {
    572         return use_position;
    573       }
    574       use = use->GetNext();
    575     }
    576     return kNoLifetime;
    577   }
    578 
    579   UsePosition* GetFirstUse() const {
    580     return first_use_;
    581   }
    582 
    583   EnvUsePosition* GetFirstEnvironmentUse() const {
    584     return first_env_use_;
    585   }
    586 
    587   Primitive::Type GetType() const {
    588     return type_;
    589   }
    590 
    591   HInstruction* GetDefinedBy() const {
    592     return defined_by_;
    593   }
    594 
    595   bool HasWillCallSafepoint() const {
    596     for (SafepointPosition* safepoint = first_safepoint_;
    597          safepoint != nullptr;
    598          safepoint = safepoint->GetNext()) {
    599       if (safepoint->GetLocations()->WillCall()) return true;
    600     }
    601     return false;
    602   }
    603 
    604   SafepointPosition* FindSafepointJustBefore(size_t position) const {
    605     for (SafepointPosition* safepoint = first_safepoint_, *previous = nullptr;
    606          safepoint != nullptr;
    607          previous = safepoint, safepoint = safepoint->GetNext()) {
    608       if (safepoint->GetPosition() >= position) return previous;
    609     }
    610     return last_safepoint_;
    611   }
    612 
    613   /**
    614    * Split this interval at `position`. This interval is changed to:
    615    * [start ... position).
    616    *
    617    * The new interval covers:
    618    * [position ... end)
    619    */
    620   LiveInterval* SplitAt(size_t position) {
    621     DCHECK(!is_temp_);
    622     DCHECK(!is_fixed_);
    623     DCHECK_GT(position, GetStart());
    624 
    625     if (GetEnd() <= position) {
    626       // This range dies before `position`, no need to split.
    627       return nullptr;
    628     }
    629 
    630     LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_);
    631     SafepointPosition* new_last_safepoint = FindSafepointJustBefore(position);
    632     if (new_last_safepoint == nullptr) {
    633       new_interval->first_safepoint_ = first_safepoint_;
    634       new_interval->last_safepoint_ = last_safepoint_;
    635       first_safepoint_ = last_safepoint_ = nullptr;
    636     } else if (last_safepoint_ != new_last_safepoint) {
    637       new_interval->last_safepoint_ = last_safepoint_;
    638       new_interval->first_safepoint_ = new_last_safepoint->GetNext();
    639       DCHECK(new_interval->first_safepoint_ != nullptr);
    640       last_safepoint_ = new_last_safepoint;
    641       last_safepoint_->SetNext(nullptr);
    642     }
    643 
    644     new_interval->next_sibling_ = next_sibling_;
    645     next_sibling_ = new_interval;
    646     new_interval->parent_ = parent_;
    647 
    648     new_interval->first_use_ = first_use_;
    649     new_interval->first_env_use_ = first_env_use_;
    650     LiveRange* current = first_range_;
    651     LiveRange* previous = nullptr;
    652     // Iterate over the ranges, and either find a range that covers this position, or
    653     // two ranges in between this position (that is, the position is in a lifetime hole).
    654     do {
    655       if (position >= current->GetEnd()) {
    656         // Move to next range.
    657         previous = current;
    658         current = current->next_;
    659       } else if (position <= current->GetStart()) {
    660         // If the previous range did not cover this position, we know position is in
    661         // a lifetime hole. We can just break the first_range_ and last_range_ links
    662         // and return the new interval.
    663         DCHECK(previous != nullptr);
    664         DCHECK(current != first_range_);
    665         new_interval->last_range_ = last_range_;
    666         last_range_ = previous;
    667         previous->next_ = nullptr;
    668         new_interval->first_range_ = current;
    669         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
    670           // Search start point is inside `new_interval`. Change it to null
    671           // (i.e. the end of the interval) in the original interval.
    672           range_search_start_ = nullptr;
    673         }
    674         new_interval->range_search_start_ = new_interval->first_range_;
    675         return new_interval;
    676       } else {
    677         // This range covers position. We create a new last_range_ for this interval
    678         // that covers last_range_->Start() and position. We also shorten the current
    679         // range and make it the first range of the new interval.
    680         DCHECK(position < current->GetEnd() && position > current->GetStart());
    681         new_interval->last_range_ = last_range_;
    682         last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr);
    683         if (previous != nullptr) {
    684           previous->next_ = last_range_;
    685         } else {
    686           first_range_ = last_range_;
    687         }
    688         new_interval->first_range_ = current;
    689         current->start_ = position;
    690         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
    691           // Search start point is inside `new_interval`. Change it to `last_range`
    692           // in the original interval. This is conservative but always correct.
    693           range_search_start_ = last_range_;
    694         }
    695         new_interval->range_search_start_ = new_interval->first_range_;
    696         return new_interval;
    697       }
    698     } while (current != nullptr);
    699 
    700     LOG(FATAL) << "Unreachable";
    701     return nullptr;
    702   }
    703 
    704   bool StartsBeforeOrAt(LiveInterval* other) const {
    705     return GetStart() <= other->GetStart();
    706   }
    707 
    708   bool StartsAfter(LiveInterval* other) const {
    709     return GetStart() > other->GetStart();
    710   }
    711 
    712   void Dump(std::ostream& stream) const {
    713     stream << "ranges: { ";
    714     LiveRange* current = first_range_;
    715     while (current != nullptr) {
    716       current->Dump(stream);
    717       stream << " ";
    718       current = current->GetNext();
    719     }
    720     stream << "}, uses: { ";
    721     const UsePosition* use = first_use_;
    722     if (use != nullptr) {
    723       do {
    724         use->Dump(stream);
    725         stream << " ";
    726       } while ((use = use->GetNext()) != nullptr);
    727     }
    728     stream << "}, { ";
    729     const EnvUsePosition* env_use = first_env_use_;
    730     if (env_use != nullptr) {
    731       do {
    732         env_use->Dump(stream);
    733         stream << " ";
    734       } while ((env_use = env_use->GetNext()) != nullptr);
    735     }
    736     stream << "}";
    737     stream << " is_fixed: " << is_fixed_ << ", is_split: " << IsSplit();
    738     stream << " is_low: " << IsLowInterval();
    739     stream << " is_high: " << IsHighInterval();
    740   }
    741 
    742   // Same as Dump, but adds context such as the instruction defining this interval, and
    743   // the register currently assigned to this interval.
    744   void DumpWithContext(std::ostream& stream, const CodeGenerator& codegen) const;
    745 
    746   LiveInterval* GetNextSibling() const { return next_sibling_; }
    747   LiveInterval* GetLastSibling() {
    748     LiveInterval* result = this;
    749     while (result->next_sibling_ != nullptr) {
    750       result = result->next_sibling_;
    751     }
    752     return result;
    753   }
    754 
    755   // Returns the first register hint that is at least free before
    756   // the value contained in `free_until`. If none is found, returns
    757   // `kNoRegister`.
    758   int FindFirstRegisterHint(size_t* free_until, const SsaLivenessAnalysis& liveness) const;
    759 
    760   // If there is enough at the definition site to find a register (for example
    761   // it uses the same input as the first input), returns the register as a hint.
    762   // Returns kNoRegister otherwise.
    763   int FindHintAtDefinition() const;
    764 
    765   // Returns the number of required spilling slots (measured as a multiple of the
    766   // Dex virtual register size `kVRegSize`).
    767   size_t NumberOfSpillSlotsNeeded() const;
    768 
    769   bool IsFloatingPoint() const {
    770     return type_ == Primitive::kPrimFloat || type_ == Primitive::kPrimDouble;
    771   }
    772 
    773   // Converts the location of the interval to a `Location` object.
    774   Location ToLocation() const;
    775 
    776   // Returns the location of the interval following its siblings at `position`.
    777   Location GetLocationAt(size_t position);
    778 
    779   // Finds the sibling that is defined at `position`.
    780   LiveInterval* GetSiblingAt(size_t position);
    781 
    782   // Returns whether `other` and `this` share the same kind of register.
    783   bool SameRegisterKind(Location other) const;
    784   bool SameRegisterKind(const LiveInterval& other) const {
    785     return IsFloatingPoint() == other.IsFloatingPoint();
    786   }
    787 
    788   bool HasHighInterval() const {
    789     return IsLowInterval();
    790   }
    791 
    792   bool HasLowInterval() const {
    793     return IsHighInterval();
    794   }
    795 
    796   LiveInterval* GetLowInterval() const {
    797     DCHECK(HasLowInterval());
    798     return high_or_low_interval_;
    799   }
    800 
    801   LiveInterval* GetHighInterval() const {
    802     DCHECK(HasHighInterval());
    803     return high_or_low_interval_;
    804   }
    805 
    806   bool IsHighInterval() const {
    807     return GetParent()->is_high_interval_;
    808   }
    809 
    810   bool IsLowInterval() const {
    811     return !IsHighInterval() && (GetParent()->high_or_low_interval_ != nullptr);
    812   }
    813 
    814   void SetLowInterval(LiveInterval* low) {
    815     DCHECK(IsHighInterval());
    816     high_or_low_interval_ = low;
    817   }
    818 
    819   void SetHighInterval(LiveInterval* high) {
    820     DCHECK(IsLowInterval());
    821     high_or_low_interval_ = high;
    822   }
    823 
    824   void AddHighInterval(bool is_temp = false) {
    825     DCHECK(IsParent());
    826     DCHECK(!HasHighInterval());
    827     DCHECK(!HasLowInterval());
    828     high_or_low_interval_ = new (allocator_) LiveInterval(
    829         allocator_, type_, defined_by_, false, kNoRegister, is_temp, true);
    830     high_or_low_interval_->high_or_low_interval_ = this;
    831     if (first_range_ != nullptr) {
    832       high_or_low_interval_->first_range_ = first_range_->Dup(allocator_);
    833       high_or_low_interval_->last_range_ = high_or_low_interval_->first_range_->GetLastRange();
    834       high_or_low_interval_->range_search_start_ = high_or_low_interval_->first_range_;
    835     }
    836     if (first_use_ != nullptr) {
    837       high_or_low_interval_->first_use_ = first_use_->Dup(allocator_);
    838     }
    839 
    840     if (first_env_use_ != nullptr) {
    841       high_or_low_interval_->first_env_use_ = first_env_use_->Dup(allocator_);
    842     }
    843   }
    844 
    845   // Returns whether an interval, when it is non-split, is using
    846   // the same register of one of its input.
    847   bool IsUsingInputRegister() const {
    848     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
    849     if (defined_by_ != nullptr && !IsSplit()) {
    850       for (const HInstruction* input : defined_by_->GetInputs()) {
    851         LiveInterval* interval = input->GetLiveInterval();
    852 
    853         // Find the interval that covers `defined_by`_. Calls to this function
    854         // are made outside the linear scan, hence we need to use CoversSlow.
    855         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
    856           interval = interval->GetNextSibling();
    857         }
    858 
    859         // Check if both intervals have the same register of the same kind.
    860         if (interval != nullptr
    861             && interval->SameRegisterKind(*this)
    862             && interval->GetRegister() == GetRegister()) {
    863           return true;
    864         }
    865       }
    866     }
    867     return false;
    868   }
    869 
    870   // Returns whether an interval, when it is non-split, can safely use
    871   // the same register of one of its input. Note that this method requires
    872   // IsUsingInputRegister() to be true.
    873   bool CanUseInputRegister() const {
    874     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
    875     DCHECK(IsUsingInputRegister());
    876     if (defined_by_ != nullptr && !IsSplit()) {
    877       LocationSummary* locations = defined_by_->GetLocations();
    878       if (locations->OutputCanOverlapWithInputs()) {
    879         return false;
    880       }
    881       for (const HInstruction* input : defined_by_->GetInputs()) {
    882         LiveInterval* interval = input->GetLiveInterval();
    883 
    884         // Find the interval that covers `defined_by`_. Calls to this function
    885         // are made outside the linear scan, hence we need to use CoversSlow.
    886         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
    887           interval = interval->GetNextSibling();
    888         }
    889 
    890         if (interval != nullptr
    891             && interval->SameRegisterKind(*this)
    892             && interval->GetRegister() == GetRegister()) {
    893           // We found the input that has the same register. Check if it is live after
    894           // `defined_by`_.
    895           return !interval->CoversSlow(defined_by_->GetLifetimePosition() + 1);
    896         }
    897       }
    898     }
    899     LOG(FATAL) << "Unreachable";
    900     UNREACHABLE();
    901   }
    902 
    903   void AddSafepoint(HInstruction* instruction) {
    904     SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction);
    905     if (first_safepoint_ == nullptr) {
    906       first_safepoint_ = last_safepoint_ = safepoint;
    907     } else {
    908       DCHECK_LT(last_safepoint_->GetPosition(), safepoint->GetPosition());
    909       last_safepoint_->SetNext(safepoint);
    910       last_safepoint_ = safepoint;
    911     }
    912   }
    913 
    914   SafepointPosition* GetFirstSafepoint() const {
    915     return first_safepoint_;
    916   }
    917 
    918   // Resets the starting point for range-searching queries to the first range.
    919   // Intervals must be reset prior to starting a new linear scan over them.
    920   void ResetSearchCache() {
    921     range_search_start_ = first_range_;
    922   }
    923 
    924   bool DefinitionRequiresRegister() const {
    925     DCHECK(IsParent());
    926     LocationSummary* locations = defined_by_->GetLocations();
    927     Location location = locations->Out();
    928     // This interval is the first interval of the instruction. If the output
    929     // of the instruction requires a register, we return the position of that instruction
    930     // as the first register use.
    931     if (location.IsUnallocated()) {
    932       if ((location.GetPolicy() == Location::kRequiresRegister)
    933            || (location.GetPolicy() == Location::kSameAsFirstInput
    934                && (locations->InAt(0).IsRegister()
    935                    || locations->InAt(0).IsRegisterPair()
    936                    || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) {
    937         return true;
    938       } else if ((location.GetPolicy() == Location::kRequiresFpuRegister)
    939                  || (location.GetPolicy() == Location::kSameAsFirstInput
    940                      && (locations->InAt(0).IsFpuRegister()
    941                          || locations->InAt(0).IsFpuRegisterPair()
    942                          || locations->InAt(0).GetPolicy() == Location::kRequiresFpuRegister))) {
    943         return true;
    944       }
    945     } else if (location.IsRegister() || location.IsRegisterPair()) {
    946       return true;
    947     }
    948     return false;
    949   }
    950 
    951  private:
    952   LiveInterval(ArenaAllocator* allocator,
    953                Primitive::Type type,
    954                HInstruction* defined_by = nullptr,
    955                bool is_fixed = false,
    956                int reg = kNoRegister,
    957                bool is_temp = false,
    958                bool is_high_interval = false)
    959       : allocator_(allocator),
    960         first_range_(nullptr),
    961         last_range_(nullptr),
    962         range_search_start_(nullptr),
    963         first_safepoint_(nullptr),
    964         last_safepoint_(nullptr),
    965         first_use_(nullptr),
    966         first_env_use_(nullptr),
    967         type_(type),
    968         next_sibling_(nullptr),
    969         parent_(this),
    970         register_(reg),
    971         spill_slot_(kNoSpillSlot),
    972         is_fixed_(is_fixed),
    973         is_temp_(is_temp),
    974         is_high_interval_(is_high_interval),
    975         high_or_low_interval_(nullptr),
    976         defined_by_(defined_by) {}
    977 
    978   // Searches for a LiveRange that either covers the given position or is the
    979   // first next LiveRange. Returns null if no such LiveRange exists. Ranges
    980   // known to end before `position` can be skipped with `search_start`.
    981   LiveRange* FindRangeAtOrAfter(size_t position, LiveRange* search_start) const {
    982     if (kIsDebugBuild) {
    983       if (search_start != first_range_) {
    984         // If we are not searching the entire list of ranges, make sure we do
    985         // not skip the range we are searching for.
    986         if (search_start == nullptr) {
    987           DCHECK(IsDeadAt(position));
    988         } else if (search_start->GetStart() > position) {
    989           DCHECK_EQ(search_start, FindRangeAtOrAfter(position, first_range_));
    990         }
    991       }
    992     }
    993 
    994     LiveRange* range;
    995     for (range = search_start;
    996          range != nullptr && range->GetEnd() <= position;
    997          range = range->GetNext()) {
    998       continue;
    999     }
   1000     return range;
   1001   }
   1002 
   1003   bool IsDefiningPosition(size_t position) const {
   1004     return IsParent() && (position == GetStart());
   1005   }
   1006 
   1007   bool HasSynthesizeUseAt(size_t position) const {
   1008     UsePosition* use = first_use_;
   1009     while (use != nullptr) {
   1010       size_t use_position = use->GetPosition();
   1011       if ((use_position == position) && use->IsSynthesized()) {
   1012         return true;
   1013       }
   1014       if (use_position > position) break;
   1015       use = use->GetNext();
   1016     }
   1017     return false;
   1018   }
   1019 
   1020   void AddBackEdgeUses(const HBasicBlock& block_at_use) {
   1021     DCHECK(block_at_use.IsInLoop());
   1022     if (block_at_use.GetGraph()->HasIrreducibleLoops()) {
   1023       // Linear order may not be well formed when irreducible loops are present,
   1024       // i.e. loop blocks may not be adjacent and a back edge may not be last,
   1025       // which violates assumptions made in this method.
   1026       return;
   1027     }
   1028 
   1029     // Add synthesized uses at the back edge of loops to help the register allocator.
   1030     // Note that this method is called in decreasing liveness order, to faciliate adding
   1031     // uses at the head of the `first_use_` linked list. Because below
   1032     // we iterate from inner-most to outer-most, which is in increasing liveness order,
   1033     // we need to take extra care of how the `first_use_` linked list is being updated.
   1034     UsePosition* first_in_new_list = nullptr;
   1035     UsePosition* last_in_new_list = nullptr;
   1036     for (HLoopInformationOutwardIterator it(block_at_use);
   1037          !it.Done();
   1038          it.Advance()) {
   1039       HLoopInformation* current = it.Current();
   1040       if (GetDefinedBy()->GetLifetimePosition() >= current->GetHeader()->GetLifetimeStart()) {
   1041         // This interval is defined in the loop. We can stop going outward.
   1042         break;
   1043       }
   1044 
   1045       // We're only adding a synthesized use at the last back edge. Adding syntehsized uses on
   1046       // all back edges is not necessary: anything used in the loop will have its use at the
   1047       // last back edge. If we want branches in a loop to have better register allocation than
   1048       // another branch, then it is the linear order we should change.
   1049       size_t back_edge_use_position = current->GetLifetimeEnd();
   1050       if ((first_use_ != nullptr) && (first_use_->GetPosition() <= back_edge_use_position)) {
   1051         // There was a use already seen in this loop. Therefore the previous call to `AddUse`
   1052         // already inserted the backedge use. We can stop going outward.
   1053         DCHECK(HasSynthesizeUseAt(back_edge_use_position));
   1054         break;
   1055       }
   1056 
   1057       DCHECK(last_in_new_list == nullptr ||
   1058              back_edge_use_position > last_in_new_list->GetPosition());
   1059 
   1060       UsePosition* new_use = new (allocator_) UsePosition(back_edge_use_position);
   1061 
   1062       if (last_in_new_list != nullptr) {
   1063         // Going outward. The latest created use needs to point to the new use.
   1064         last_in_new_list->SetNext(new_use);
   1065       } else {
   1066         // This is the inner-most loop.
   1067         DCHECK_EQ(current, block_at_use.GetLoopInformation());
   1068         first_in_new_list = new_use;
   1069       }
   1070       last_in_new_list = new_use;
   1071     }
   1072     // Link the newly created linked list with `first_use_`.
   1073     if (last_in_new_list != nullptr) {
   1074       last_in_new_list->SetNext(first_use_);
   1075       first_use_ = first_in_new_list;
   1076     }
   1077   }
   1078 
   1079   ArenaAllocator* const allocator_;
   1080 
   1081   // Ranges of this interval. We need a quick access to the last range to test
   1082   // for liveness (see `IsDeadAt`).
   1083   LiveRange* first_range_;
   1084   LiveRange* last_range_;
   1085 
   1086   // The first range at or after the current position of a linear scan. It is
   1087   // used to optimize range-searching queries.
   1088   LiveRange* range_search_start_;
   1089 
   1090   // Safepoints where this interval is live.
   1091   SafepointPosition* first_safepoint_;
   1092   SafepointPosition* last_safepoint_;
   1093 
   1094   // Uses of this interval. Note that this linked list is shared amongst siblings.
   1095   UsePosition* first_use_;
   1096   EnvUsePosition* first_env_use_;
   1097 
   1098   // The instruction type this interval corresponds to.
   1099   const Primitive::Type type_;
   1100 
   1101   // Live interval that is the result of a split.
   1102   LiveInterval* next_sibling_;
   1103 
   1104   // The first interval from which split intervals come from.
   1105   LiveInterval* parent_;
   1106 
   1107   // The register allocated to this interval.
   1108   int register_;
   1109 
   1110   // The spill slot allocated to this interval.
   1111   int spill_slot_;
   1112 
   1113   // Whether the interval is for a fixed register.
   1114   const bool is_fixed_;
   1115 
   1116   // Whether the interval is for a temporary.
   1117   const bool is_temp_;
   1118 
   1119   // Whether this interval is a synthesized interval for register pair.
   1120   const bool is_high_interval_;
   1121 
   1122   // If this interval needs a register pair, the high or low equivalent.
   1123   // `is_high_interval_` tells whether this holds the low or the high.
   1124   LiveInterval* high_or_low_interval_;
   1125 
   1126   // The instruction represented by this interval.
   1127   HInstruction* const defined_by_;
   1128 
   1129   static constexpr int kNoRegister = -1;
   1130   static constexpr int kNoSpillSlot = -1;
   1131 
   1132   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
   1133 
   1134   DISALLOW_COPY_AND_ASSIGN(LiveInterval);
   1135 };
   1136 
   1137 /**
   1138  * Analysis that computes the liveness of instructions:
   1139  *
   1140  * (a) Non-environment uses of an instruction always make
   1141  *     the instruction live.
   1142  * (b) Environment uses of an instruction whose type is
   1143  *     object (that is, non-primitive), make the instruction live.
   1144  *     This is due to having to keep alive objects that have
   1145  *     finalizers deleting native objects.
   1146  * (c) When the graph has the debuggable property, environment uses
   1147  *     of an instruction that has a primitive type make the instruction live.
   1148  *     If the graph does not have the debuggable property, the environment
   1149  *     use has no effect, and may get a 'none' value after register allocation.
   1150  *
   1151  * (b) and (c) are implemented through SsaLivenessAnalysis::ShouldBeLiveForEnvironment.
   1152  */
   1153 class SsaLivenessAnalysis : public ValueObject {
   1154  public:
   1155   SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen)
   1156       : graph_(graph),
   1157         codegen_(codegen),
   1158         block_infos_(graph->GetBlocks().size(),
   1159                      nullptr,
   1160                      graph->GetArena()->Adapter(kArenaAllocSsaLiveness)),
   1161         instructions_from_ssa_index_(graph->GetArena()->Adapter(kArenaAllocSsaLiveness)),
   1162         instructions_from_lifetime_position_(graph->GetArena()->Adapter(kArenaAllocSsaLiveness)),
   1163         number_of_ssa_values_(0) {
   1164   }
   1165 
   1166   void Analyze();
   1167 
   1168   BitVector* GetLiveInSet(const HBasicBlock& block) const {
   1169     return &block_infos_[block.GetBlockId()]->live_in_;
   1170   }
   1171 
   1172   BitVector* GetLiveOutSet(const HBasicBlock& block) const {
   1173     return &block_infos_[block.GetBlockId()]->live_out_;
   1174   }
   1175 
   1176   BitVector* GetKillSet(const HBasicBlock& block) const {
   1177     return &block_infos_[block.GetBlockId()]->kill_;
   1178   }
   1179 
   1180   HInstruction* GetInstructionFromSsaIndex(size_t index) const {
   1181     return instructions_from_ssa_index_[index];
   1182   }
   1183 
   1184   HInstruction* GetInstructionFromPosition(size_t index) const {
   1185     return instructions_from_lifetime_position_[index];
   1186   }
   1187 
   1188   HBasicBlock* GetBlockFromPosition(size_t index) const {
   1189     HInstruction* instruction = GetInstructionFromPosition(index);
   1190     if (instruction == nullptr) {
   1191       // If we are at a block boundary, get the block following.
   1192       instruction = GetInstructionFromPosition(index + 1);
   1193     }
   1194     return instruction->GetBlock();
   1195   }
   1196 
   1197   bool IsAtBlockBoundary(size_t index) const {
   1198     return GetInstructionFromPosition(index) == nullptr;
   1199   }
   1200 
   1201   HInstruction* GetTempUser(LiveInterval* temp) const {
   1202     // A temporary shares the same lifetime start as the instruction that requires it.
   1203     DCHECK(temp->IsTemp());
   1204     HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2);
   1205     DCHECK_EQ(user, temp->GetFirstUse()->GetUser());
   1206     return user;
   1207   }
   1208 
   1209   size_t GetTempIndex(LiveInterval* temp) const {
   1210     // We use the input index to store the index of the temporary in the user's temporary list.
   1211     DCHECK(temp->IsTemp());
   1212     return temp->GetFirstUse()->GetInputIndex();
   1213   }
   1214 
   1215   size_t GetMaxLifetimePosition() const {
   1216     return instructions_from_lifetime_position_.size() * 2 - 1;
   1217   }
   1218 
   1219   size_t GetNumberOfSsaValues() const {
   1220     return number_of_ssa_values_;
   1221   }
   1222 
   1223   static constexpr const char* kLivenessPassName = "liveness";
   1224 
   1225  private:
   1226   // Give an SSA number to each instruction that defines a value used by another instruction,
   1227   // and setup the lifetime information of each instruction and block.
   1228   void NumberInstructions();
   1229 
   1230   // Compute live ranges of instructions, as well as live_in, live_out and kill sets.
   1231   void ComputeLiveness();
   1232 
   1233   // Compute the live ranges of instructions, as well as the initial live_in, live_out and
   1234   // kill sets, that do not take into account backward branches.
   1235   void ComputeLiveRanges();
   1236 
   1237   // After computing the initial sets, this method does a fixed point
   1238   // calculation over the live_in and live_out set to take into account
   1239   // backwards branches.
   1240   void ComputeLiveInAndLiveOutSets();
   1241 
   1242   // Update the live_in set of the block and returns whether it has changed.
   1243   bool UpdateLiveIn(const HBasicBlock& block);
   1244 
   1245   // Update the live_out set of the block and returns whether it has changed.
   1246   bool UpdateLiveOut(const HBasicBlock& block);
   1247 
   1248   // Returns whether `instruction` in an HEnvironment held by `env_holder`
   1249   // should be kept live by the HEnvironment.
   1250   static bool ShouldBeLiveForEnvironment(HInstruction* env_holder, HInstruction* instruction) {
   1251     if (instruction == nullptr) return false;
   1252     // A value that's not live in compiled code may still be needed in interpreter,
   1253     // due to code motion, etc.
   1254     if (env_holder->IsDeoptimize()) return true;
   1255     // A value live at a throwing instruction in a try block may be copied by
   1256     // the exception handler to its location at the top of the catch block.
   1257     if (env_holder->CanThrowIntoCatchBlock()) return true;
   1258     if (instruction->GetBlock()->GetGraph()->IsDebuggable()) return true;
   1259     return instruction->GetType() == Primitive::kPrimNot;
   1260   }
   1261 
   1262   void CheckNoLiveInIrreducibleLoop(const HBasicBlock& block) const {
   1263     if (!block.IsLoopHeader() || !block.GetLoopInformation()->IsIrreducible()) {
   1264       return;
   1265     }
   1266     BitVector* live_in = GetLiveInSet(block);
   1267     // To satisfy our liveness algorithm, we need to ensure loop headers of
   1268     // irreducible loops do not have any live-in instructions, except constants
   1269     // and the current method, which can be trivially re-materialized.
   1270     for (uint32_t idx : live_in->Indexes()) {
   1271       HInstruction* instruction = GetInstructionFromSsaIndex(idx);
   1272       DCHECK(instruction->GetBlock()->IsEntryBlock()) << instruction->DebugName();
   1273       DCHECK(!instruction->IsParameterValue());
   1274       DCHECK(instruction->IsCurrentMethod() || instruction->IsConstant())
   1275           << instruction->DebugName();
   1276     }
   1277   }
   1278 
   1279   HGraph* const graph_;
   1280   CodeGenerator* const codegen_;
   1281   ArenaVector<BlockInfo*> block_infos_;
   1282 
   1283   // Temporary array used when computing live_in, live_out, and kill sets.
   1284   ArenaVector<HInstruction*> instructions_from_ssa_index_;
   1285 
   1286   // Temporary array used when inserting moves in the graph.
   1287   ArenaVector<HInstruction*> instructions_from_lifetime_position_;
   1288   size_t number_of_ssa_values_;
   1289 
   1290   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
   1291   ART_FRIEND_TEST(RegisterAllocatorTest, FreeUntil);
   1292 
   1293   DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis);
   1294 };
   1295 
   1296 }  // namespace art
   1297 
   1298 #endif  // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
   1299