Home | History | Annotate | Download | only in optimizing
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
     18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
     19 
     20 #include <iostream>
     21 
     22 #include "base/iteration_range.h"
     23 #include "nodes.h"
     24 #include "utils/intrusive_forward_list.h"
     25 
     26 namespace art {
     27 
     28 class CodeGenerator;
     29 class SsaLivenessAnalysis;
     30 
     31 static constexpr int kNoRegister = -1;
     32 
     33 class BlockInfo : public ArenaObject<kArenaAllocSsaLiveness> {
     34  public:
     35   BlockInfo(ArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values)
     36       : block_(block),
     37         live_in_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
     38         live_out_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
     39         kill_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness) {
     40     UNUSED(block_);
     41     live_in_.ClearAllBits();
     42     live_out_.ClearAllBits();
     43     kill_.ClearAllBits();
     44   }
     45 
     46  private:
     47   const HBasicBlock& block_;
     48   ArenaBitVector live_in_;
     49   ArenaBitVector live_out_;
     50   ArenaBitVector kill_;
     51 
     52   friend class SsaLivenessAnalysis;
     53 
     54   DISALLOW_COPY_AND_ASSIGN(BlockInfo);
     55 };
     56 
     57 /**
     58  * A live range contains the start and end of a range where an instruction or a temporary
     59  * is live.
     60  */
     61 class LiveRange FINAL : public ArenaObject<kArenaAllocSsaLiveness> {
     62  public:
     63   LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) {
     64     DCHECK_LT(start, end);
     65     DCHECK(next_ == nullptr || next_->GetStart() > GetEnd());
     66   }
     67 
     68   size_t GetStart() const { return start_; }
     69   size_t GetEnd() const { return end_; }
     70   LiveRange* GetNext() const { return next_; }
     71 
     72   bool IntersectsWith(const LiveRange& other) const {
     73     return (start_ >= other.start_ && start_ < other.end_)
     74         || (other.start_ >= start_ && other.start_ < end_);
     75   }
     76 
     77   bool IsBefore(const LiveRange& other) const {
     78     return end_ <= other.start_;
     79   }
     80 
     81   void Dump(std::ostream& stream) const {
     82     stream << "[" << start_ << "," << end_ << ")";
     83   }
     84 
     85   LiveRange* Dup(ArenaAllocator* allocator) const {
     86     return new (allocator) LiveRange(
     87         start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator));
     88   }
     89 
     90   LiveRange* GetLastRange() {
     91     return next_ == nullptr ? this : next_->GetLastRange();
     92   }
     93 
     94  private:
     95   size_t start_;
     96   size_t end_;
     97   LiveRange* next_;
     98 
     99   friend class LiveInterval;
    100 
    101   DISALLOW_COPY_AND_ASSIGN(LiveRange);
    102 };
    103 
    104 /**
    105  * A use position represents a live interval use at a given position.
    106  */
    107 class UsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
    108                     public IntrusiveForwardListNode<UsePosition> {
    109  public:
    110   UsePosition(HInstruction* user, size_t input_index, size_t position)
    111       : user_(user),
    112         input_index_(input_index),
    113         position_(position) {
    114   }
    115 
    116   explicit UsePosition(size_t position)
    117       : user_(nullptr),
    118         input_index_(kNoInput),
    119         position_(dchecked_integral_cast<uint32_t>(position)) {
    120   }
    121 
    122   size_t GetPosition() const { return position_; }
    123 
    124   HInstruction* GetUser() const { return user_; }
    125 
    126   bool IsSynthesized() const { return user_ == nullptr; }
    127 
    128   size_t GetInputIndex() const { return input_index_; }
    129 
    130   void Dump(std::ostream& stream) const {
    131     stream << position_;
    132   }
    133 
    134   HLoopInformation* GetLoopInformation() const {
    135     return user_->GetBlock()->GetLoopInformation();
    136   }
    137 
    138   UsePosition* Clone(ArenaAllocator* allocator) const {
    139     return new (allocator) UsePosition(user_, input_index_, position_);
    140   }
    141 
    142   bool RequiresRegister() const {
    143     if (IsSynthesized()) return false;
    144     Location location = GetUser()->GetLocations()->InAt(GetInputIndex());
    145     return location.IsUnallocated() && location.RequiresRegisterKind();
    146   }
    147 
    148  private:
    149   static constexpr uint32_t kNoInput = static_cast<uint32_t>(-1);
    150 
    151   HInstruction* const user_;
    152   const size_t input_index_;
    153   const size_t position_;
    154 
    155   DISALLOW_COPY_AND_ASSIGN(UsePosition);
    156 };
    157 using UsePositionList = IntrusiveForwardList<UsePosition>;
    158 
    159 /**
    160  * An environment use position represents a live interval for environment use at a given position.
    161  */
    162 class EnvUsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
    163                        public IntrusiveForwardListNode<EnvUsePosition> {
    164  public:
    165   EnvUsePosition(HEnvironment* environment,
    166                  size_t input_index,
    167                  size_t position)
    168       : environment_(environment),
    169         input_index_(input_index),
    170         position_(position) {
    171     DCHECK(environment != nullptr);
    172   }
    173 
    174   size_t GetPosition() const { return position_; }
    175 
    176   HEnvironment* GetEnvironment() const { return environment_; }
    177   size_t GetInputIndex() const { return input_index_; }
    178 
    179   void Dump(std::ostream& stream) const {
    180     stream << position_;
    181   }
    182 
    183   EnvUsePosition* Clone(ArenaAllocator* allocator) const {
    184     return new (allocator) EnvUsePosition(environment_, input_index_, position_);
    185   }
    186 
    187  private:
    188   HEnvironment* const environment_;
    189   const size_t input_index_;
    190   const size_t position_;
    191 
    192   DISALLOW_COPY_AND_ASSIGN(EnvUsePosition);
    193 };
    194 using EnvUsePositionList = IntrusiveForwardList<EnvUsePosition>;
    195 
    196 template <typename Iterator>
    197 inline Iterator FindUseAtOrAfterPosition(Iterator first, Iterator last, size_t position) {
    198   using value_type = const typename Iterator::value_type;
    199   static_assert(std::is_same<value_type, const UsePosition>::value ||
    200                     std::is_same<value_type, const EnvUsePosition>::value,
    201                 "Expecting value type UsePosition or EnvUsePosition.");
    202   Iterator ret = std::find_if(
    203       first, last, [position](const value_type& use) { return use.GetPosition() >= position; });
    204   // Check that the processed range is sorted. Do not check the rest of the range to avoid
    205   // increasing the complexity of callers from O(n) to O(n^2).
    206   DCHECK(std::is_sorted(
    207       first,
    208       ret,
    209       [](const value_type& lhs, const value_type& rhs) {
    210           return lhs.GetPosition() < rhs.GetPosition();
    211       }));
    212   return ret;
    213 }
    214 
    215 template <typename Iterator>
    216 inline IterationRange<Iterator> FindMatchingUseRange(Iterator first,
    217                                                      Iterator last,
    218                                                      size_t position_begin,
    219                                                      size_t position_end) {
    220   Iterator begin = FindUseAtOrAfterPosition(first, last, position_begin);
    221   Iterator end = FindUseAtOrAfterPosition(begin, last, position_end);
    222   return MakeIterationRange(begin, end);
    223 }
    224 
    225 class SafepointPosition : public ArenaObject<kArenaAllocSsaLiveness> {
    226  public:
    227   explicit SafepointPosition(HInstruction* instruction)
    228       : instruction_(instruction),
    229         next_(nullptr) {}
    230 
    231   void SetNext(SafepointPosition* next) {
    232     next_ = next;
    233   }
    234 
    235   size_t GetPosition() const {
    236     return instruction_->GetLifetimePosition();
    237   }
    238 
    239   SafepointPosition* GetNext() const {
    240     return next_;
    241   }
    242 
    243   LocationSummary* GetLocations() const {
    244     return instruction_->GetLocations();
    245   }
    246 
    247   HInstruction* GetInstruction() const {
    248     return instruction_;
    249   }
    250 
    251  private:
    252   HInstruction* const instruction_;
    253   SafepointPosition* next_;
    254 
    255   DISALLOW_COPY_AND_ASSIGN(SafepointPosition);
    256 };
    257 
    258 /**
    259  * An interval is a list of disjoint live ranges where an instruction is live.
    260  * Each instruction that has uses gets an interval.
    261  */
    262 class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> {
    263  public:
    264   static LiveInterval* MakeInterval(ArenaAllocator* allocator,
    265                                     Primitive::Type type,
    266                                     HInstruction* instruction = nullptr) {
    267     return new (allocator) LiveInterval(allocator, type, instruction);
    268   }
    269 
    270   static LiveInterval* MakeFixedInterval(ArenaAllocator* allocator, int reg, Primitive::Type type) {
    271     return new (allocator) LiveInterval(allocator, type, nullptr, true, reg, false);
    272   }
    273 
    274   static LiveInterval* MakeTempInterval(ArenaAllocator* allocator, Primitive::Type type) {
    275     return new (allocator) LiveInterval(allocator, type, nullptr, false, kNoRegister, true);
    276   }
    277 
    278   bool IsFixed() const { return is_fixed_; }
    279   bool IsTemp() const { return is_temp_; }
    280   // This interval is the result of a split.
    281   bool IsSplit() const { return parent_ != this; }
    282 
    283   void AddTempUse(HInstruction* instruction, size_t temp_index) {
    284     DCHECK(IsTemp());
    285     DCHECK(GetUses().empty()) << "A temporary can only have one user";
    286     DCHECK(GetEnvironmentUses().empty()) << "A temporary cannot have environment user";
    287     size_t position = instruction->GetLifetimePosition();
    288     UsePosition* new_use = new (allocator_) UsePosition(instruction, temp_index, position);
    289     uses_.push_front(*new_use);
    290     AddRange(position, position + 1);
    291   }
    292 
    293   // Record use of an input. The use will be recorded as an environment use if
    294   // `environment` is not null and as register use otherwise. If `actual_user`
    295   // is specified, the use will be recorded at `actual_user`'s lifetime position.
    296   void AddUse(HInstruction* instruction,
    297               HEnvironment* environment,
    298               size_t input_index,
    299               HInstruction* actual_user = nullptr,
    300               bool keep_alive = false) {
    301     bool is_environment = (environment != nullptr);
    302     LocationSummary* locations = instruction->GetLocations();
    303     if (actual_user == nullptr) {
    304       actual_user = instruction;
    305     }
    306 
    307     // Set the use within the instruction.
    308     size_t position = actual_user->GetLifetimePosition() + 1;
    309     if (!is_environment) {
    310       if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) {
    311         // For fixed inputs and output same as input, the register allocator
    312         // requires to have inputs die at the instruction, so that input moves use the
    313         // location of the input just before that instruction (and not potential moves due
    314         // to splitting).
    315         DCHECK_EQ(instruction, actual_user);
    316         position = actual_user->GetLifetimePosition();
    317       } else if (!locations->InAt(input_index).IsValid()) {
    318         return;
    319       }
    320     }
    321 
    322     if (!is_environment && instruction->IsInLoop()) {
    323       AddBackEdgeUses(*instruction->GetBlock());
    324     }
    325 
    326     if ((!uses_.empty()) &&
    327         (uses_.front().GetUser() == actual_user) &&
    328         (uses_.front().GetPosition() < position)) {
    329       // The user uses the instruction multiple times, and one use dies before the other.
    330       // We update the use list so that the latter is first.
    331       DCHECK(!is_environment);
    332       DCHECK(uses_.front().GetPosition() + 1 == position);
    333       UsePositionList::iterator next_pos = uses_.begin();
    334       UsePositionList::iterator insert_pos;
    335       do {
    336         insert_pos = next_pos;
    337         ++next_pos;
    338       } while (next_pos != uses_.end() && next_pos->GetPosition() < position);
    339       UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
    340       uses_.insert_after(insert_pos, *new_use);
    341       if (first_range_->GetEnd() == uses_.front().GetPosition()) {
    342         first_range_->end_ = position;
    343       }
    344       return;
    345     }
    346 
    347     if (is_environment) {
    348       DCHECK(env_uses_.empty() || position <= env_uses_.front().GetPosition());
    349       EnvUsePosition* new_env_use =
    350           new (allocator_) EnvUsePosition(environment, input_index, position);
    351       env_uses_.push_front(*new_env_use);
    352     } else {
    353       DCHECK(uses_.empty() || position <= uses_.front().GetPosition());
    354       UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
    355       uses_.push_front(*new_use);
    356     }
    357 
    358     if (is_environment && !keep_alive) {
    359       // If this environment use does not keep the instruction live, it does not
    360       // affect the live range of that instruction.
    361       return;
    362     }
    363 
    364     size_t start_block_position = instruction->GetBlock()->GetLifetimeStart();
    365     if (first_range_ == nullptr) {
    366       // First time we see a use of that interval.
    367       first_range_ = last_range_ = range_search_start_ =
    368           new (allocator_) LiveRange(start_block_position, position, nullptr);
    369     } else if (first_range_->GetStart() == start_block_position) {
    370       // There is a use later in the same block or in a following block.
    371       // Note that in such a case, `AddRange` for the whole blocks has been called
    372       // before arriving in this method, and this is the reason the start of
    373       // `first_range_` is before the given `position`.
    374       DCHECK_LE(position, first_range_->GetEnd());
    375     } else {
    376       DCHECK(first_range_->GetStart() > position);
    377       // There is a hole in the interval. Create a new range.
    378       // Note that the start of `first_range_` can be equal to `end`: two blocks
    379       // having adjacent lifetime positions are not necessarily
    380       // predecessor/successor. When two blocks are predecessor/successor, the
    381       // liveness algorithm has called `AddRange` before arriving in this method,
    382       // and the check line 205 would succeed.
    383       first_range_ = range_search_start_ =
    384           new (allocator_) LiveRange(start_block_position, position, first_range_);
    385     }
    386   }
    387 
    388   void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) {
    389     DCHECK(instruction->IsPhi());
    390     if (block->IsInLoop()) {
    391       AddBackEdgeUses(*block);
    392     }
    393     UsePosition* new_use =
    394         new (allocator_) UsePosition(instruction, input_index, block->GetLifetimeEnd());
    395     uses_.push_front(*new_use);
    396   }
    397 
    398   ALWAYS_INLINE void AddRange(size_t start, size_t end) {
    399     if (first_range_ == nullptr) {
    400       first_range_ = last_range_ = range_search_start_ =
    401           new (allocator_) LiveRange(start, end, first_range_);
    402     } else if (first_range_->GetStart() == end) {
    403       // There is a use in the following block.
    404       first_range_->start_ = start;
    405     } else if (first_range_->GetStart() == start && first_range_->GetEnd() == end) {
    406       DCHECK(is_fixed_);
    407     } else {
    408       DCHECK_GT(first_range_->GetStart(), end);
    409       // There is a hole in the interval. Create a new range.
    410       first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_);
    411     }
    412   }
    413 
    414   void AddLoopRange(size_t start, size_t end) {
    415     DCHECK(first_range_ != nullptr);
    416     DCHECK_LE(start, first_range_->GetStart());
    417     // Find the range that covers the positions after the loop.
    418     LiveRange* after_loop = first_range_;
    419     LiveRange* last_in_loop = nullptr;
    420     while (after_loop != nullptr && after_loop->GetEnd() < end) {
    421       DCHECK_LE(start, after_loop->GetStart());
    422       last_in_loop = after_loop;
    423       after_loop = after_loop->GetNext();
    424     }
    425     if (after_loop == nullptr) {
    426       // Uses are only in the loop.
    427       first_range_ = last_range_ = range_search_start_ =
    428           new (allocator_) LiveRange(start, end, nullptr);
    429     } else if (after_loop->GetStart() <= end) {
    430       first_range_ = range_search_start_ = after_loop;
    431       // There are uses after the loop.
    432       first_range_->start_ = start;
    433     } else {
    434       // The use after the loop is after a lifetime hole.
    435       DCHECK(last_in_loop != nullptr);
    436       first_range_ = range_search_start_ = last_in_loop;
    437       first_range_->start_ = start;
    438       first_range_->end_ = end;
    439     }
    440   }
    441 
    442   bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; }
    443   void SetSpillSlot(int slot) {
    444     DCHECK(!is_fixed_);
    445     DCHECK(!is_temp_);
    446     spill_slot_ = slot;
    447   }
    448   int GetSpillSlot() const { return spill_slot_; }
    449 
    450   void SetFrom(size_t from) {
    451     if (first_range_ != nullptr) {
    452       first_range_->start_ = from;
    453     } else {
    454       // Instruction without uses.
    455       DCHECK(uses_.empty());
    456       DCHECK(from == defined_by_->GetLifetimePosition());
    457       first_range_ = last_range_ = range_search_start_ =
    458           new (allocator_) LiveRange(from, from + 2, nullptr);
    459     }
    460   }
    461 
    462   LiveInterval* GetParent() const { return parent_; }
    463 
    464   // Returns whether this interval is the parent interval, that is, the interval
    465   // that starts where the HInstruction is defined.
    466   bool IsParent() const { return parent_ == this; }
    467 
    468   LiveRange* GetFirstRange() const { return first_range_; }
    469   LiveRange* GetLastRange() const { return last_range_; }
    470 
    471   int GetRegister() const { return register_; }
    472   void SetRegister(int reg) { register_ = reg; }
    473   void ClearRegister() { register_ = kNoRegister; }
    474   bool HasRegister() const { return register_ != kNoRegister; }
    475 
    476   bool IsDeadAt(size_t position) const {
    477     return GetEnd() <= position;
    478   }
    479 
    480   bool IsDefinedAt(size_t position) const {
    481     return GetStart() <= position && !IsDeadAt(position);
    482   }
    483 
    484   // Returns true if the interval contains a LiveRange covering `position`.
    485   // The range at or immediately after the current position of linear scan
    486   // is cached for better performance. If `position` can be smaller than
    487   // that, CoversSlow should be used instead.
    488   bool Covers(size_t position) {
    489     LiveRange* candidate = FindRangeAtOrAfter(position, range_search_start_);
    490     range_search_start_ = candidate;
    491     return (candidate != nullptr && candidate->GetStart() <= position);
    492   }
    493 
    494   // Same as Covers but always tests all ranges.
    495   bool CoversSlow(size_t position) const {
    496     LiveRange* candidate = FindRangeAtOrAfter(position, first_range_);
    497     return candidate != nullptr && candidate->GetStart() <= position;
    498   }
    499 
    500   // Returns the first intersection of this interval with `current`, which
    501   // must be the interval currently being allocated by linear scan.
    502   size_t FirstIntersectionWith(LiveInterval* current) const {
    503     // Find the first range after the start of `current`. We use the search
    504     // cache to improve performance.
    505     DCHECK(GetStart() <= current->GetStart() || IsFixed());
    506     LiveRange* other_range = current->first_range_;
    507     LiveRange* my_range = FindRangeAtOrAfter(other_range->GetStart(), range_search_start_);
    508     if (my_range == nullptr) {
    509       return kNoLifetime;
    510     }
    511 
    512     // Advance both intervals and find the first matching range start in
    513     // this interval.
    514     do {
    515       if (my_range->IsBefore(*other_range)) {
    516         my_range = my_range->GetNext();
    517         if (my_range == nullptr) {
    518           return kNoLifetime;
    519         }
    520       } else if (other_range->IsBefore(*my_range)) {
    521         other_range = other_range->GetNext();
    522         if (other_range == nullptr) {
    523           return kNoLifetime;
    524         }
    525       } else {
    526         DCHECK(my_range->IntersectsWith(*other_range));
    527         return std::max(my_range->GetStart(), other_range->GetStart());
    528       }
    529     } while (true);
    530   }
    531 
    532   size_t GetStart() const {
    533     return first_range_->GetStart();
    534   }
    535 
    536   size_t GetEnd() const {
    537     return last_range_->GetEnd();
    538   }
    539 
    540   size_t GetLength() const {
    541     return GetEnd() - GetStart();
    542   }
    543 
    544   size_t FirstRegisterUseAfter(size_t position) const {
    545     if (is_temp_) {
    546       return position == GetStart() ? position : kNoLifetime;
    547     }
    548 
    549     if (IsDefiningPosition(position) && DefinitionRequiresRegister()) {
    550       return position;
    551     }
    552 
    553     size_t end = GetEnd();
    554     for (const UsePosition& use : GetUses()) {
    555       size_t use_position = use.GetPosition();
    556       if (use_position > end) {
    557         break;
    558       }
    559       if (use_position > position) {
    560         if (use.RequiresRegister()) {
    561           return use_position;
    562         }
    563       }
    564     }
    565     return kNoLifetime;
    566   }
    567 
    568   // Returns the location of the first register use for this live interval,
    569   // including a register definition if applicable.
    570   size_t FirstRegisterUse() const {
    571     return FirstRegisterUseAfter(GetStart());
    572   }
    573 
    574   // Whether the interval requires a register rather than a stack location.
    575   // If needed for performance, this could be cached.
    576   bool RequiresRegister() const {
    577     return !HasRegister() && FirstRegisterUse() != kNoLifetime;
    578   }
    579 
    580   size_t FirstUseAfter(size_t position) const {
    581     if (is_temp_) {
    582       return position == GetStart() ? position : kNoLifetime;
    583     }
    584 
    585     if (IsDefiningPosition(position)) {
    586       DCHECK(defined_by_->GetLocations()->Out().IsValid());
    587       return position;
    588     }
    589 
    590     size_t end = GetEnd();
    591     for (const UsePosition& use : GetUses()) {
    592       size_t use_position = use.GetPosition();
    593       if (use_position > end) {
    594         break;
    595       }
    596       if (use_position > position) {
    597         return use_position;
    598       }
    599     }
    600     return kNoLifetime;
    601   }
    602 
    603   const UsePositionList& GetUses() const {
    604     return parent_->uses_;
    605   }
    606 
    607   const EnvUsePositionList& GetEnvironmentUses() const {
    608     return parent_->env_uses_;
    609   }
    610 
    611   Primitive::Type GetType() const {
    612     return type_;
    613   }
    614 
    615   HInstruction* GetDefinedBy() const {
    616     return defined_by_;
    617   }
    618 
    619   bool HasWillCallSafepoint() const {
    620     for (SafepointPosition* safepoint = first_safepoint_;
    621          safepoint != nullptr;
    622          safepoint = safepoint->GetNext()) {
    623       if (safepoint->GetLocations()->WillCall()) return true;
    624     }
    625     return false;
    626   }
    627 
    628   SafepointPosition* FindSafepointJustBefore(size_t position) const {
    629     for (SafepointPosition* safepoint = first_safepoint_, *previous = nullptr;
    630          safepoint != nullptr;
    631          previous = safepoint, safepoint = safepoint->GetNext()) {
    632       if (safepoint->GetPosition() >= position) return previous;
    633     }
    634     return last_safepoint_;
    635   }
    636 
    637   /**
    638    * Split this interval at `position`. This interval is changed to:
    639    * [start ... position).
    640    *
    641    * The new interval covers:
    642    * [position ... end)
    643    */
    644   LiveInterval* SplitAt(size_t position) {
    645     DCHECK(!is_temp_);
    646     DCHECK(!is_fixed_);
    647     DCHECK_GT(position, GetStart());
    648 
    649     if (GetEnd() <= position) {
    650       // This range dies before `position`, no need to split.
    651       return nullptr;
    652     }
    653 
    654     LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_);
    655     SafepointPosition* new_last_safepoint = FindSafepointJustBefore(position);
    656     if (new_last_safepoint == nullptr) {
    657       new_interval->first_safepoint_ = first_safepoint_;
    658       new_interval->last_safepoint_ = last_safepoint_;
    659       first_safepoint_ = last_safepoint_ = nullptr;
    660     } else if (last_safepoint_ != new_last_safepoint) {
    661       new_interval->last_safepoint_ = last_safepoint_;
    662       new_interval->first_safepoint_ = new_last_safepoint->GetNext();
    663       DCHECK(new_interval->first_safepoint_ != nullptr);
    664       last_safepoint_ = new_last_safepoint;
    665       last_safepoint_->SetNext(nullptr);
    666     }
    667 
    668     new_interval->next_sibling_ = next_sibling_;
    669     next_sibling_ = new_interval;
    670     new_interval->parent_ = parent_;
    671 
    672     LiveRange* current = first_range_;
    673     LiveRange* previous = nullptr;
    674     // Iterate over the ranges, and either find a range that covers this position, or
    675     // two ranges in between this position (that is, the position is in a lifetime hole).
    676     do {
    677       if (position >= current->GetEnd()) {
    678         // Move to next range.
    679         previous = current;
    680         current = current->next_;
    681       } else if (position <= current->GetStart()) {
    682         // If the previous range did not cover this position, we know position is in
    683         // a lifetime hole. We can just break the first_range_ and last_range_ links
    684         // and return the new interval.
    685         DCHECK(previous != nullptr);
    686         DCHECK(current != first_range_);
    687         new_interval->last_range_ = last_range_;
    688         last_range_ = previous;
    689         previous->next_ = nullptr;
    690         new_interval->first_range_ = current;
    691         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
    692           // Search start point is inside `new_interval`. Change it to null
    693           // (i.e. the end of the interval) in the original interval.
    694           range_search_start_ = nullptr;
    695         }
    696         new_interval->range_search_start_ = new_interval->first_range_;
    697         return new_interval;
    698       } else {
    699         // This range covers position. We create a new last_range_ for this interval
    700         // that covers last_range_->Start() and position. We also shorten the current
    701         // range and make it the first range of the new interval.
    702         DCHECK(position < current->GetEnd() && position > current->GetStart());
    703         new_interval->last_range_ = last_range_;
    704         last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr);
    705         if (previous != nullptr) {
    706           previous->next_ = last_range_;
    707         } else {
    708           first_range_ = last_range_;
    709         }
    710         new_interval->first_range_ = current;
    711         current->start_ = position;
    712         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
    713           // Search start point is inside `new_interval`. Change it to `last_range`
    714           // in the original interval. This is conservative but always correct.
    715           range_search_start_ = last_range_;
    716         }
    717         new_interval->range_search_start_ = new_interval->first_range_;
    718         return new_interval;
    719       }
    720     } while (current != nullptr);
    721 
    722     LOG(FATAL) << "Unreachable";
    723     return nullptr;
    724   }
    725 
    726   bool StartsBeforeOrAt(LiveInterval* other) const {
    727     return GetStart() <= other->GetStart();
    728   }
    729 
    730   bool StartsAfter(LiveInterval* other) const {
    731     return GetStart() > other->GetStart();
    732   }
    733 
    734   void Dump(std::ostream& stream) const {
    735     stream << "ranges: { ";
    736     LiveRange* current = first_range_;
    737     while (current != nullptr) {
    738       current->Dump(stream);
    739       stream << " ";
    740       current = current->GetNext();
    741     }
    742     stream << "}, uses: { ";
    743     for (const UsePosition& use : GetUses()) {
    744       use.Dump(stream);
    745       stream << " ";
    746     }
    747     stream << "}, { ";
    748     for (const EnvUsePosition& env_use : GetEnvironmentUses()) {
    749       env_use.Dump(stream);
    750       stream << " ";
    751     }
    752     stream << "}";
    753     stream << " is_fixed: " << is_fixed_ << ", is_split: " << IsSplit();
    754     stream << " is_low: " << IsLowInterval();
    755     stream << " is_high: " << IsHighInterval();
    756   }
    757 
    758   // Same as Dump, but adds context such as the instruction defining this interval, and
    759   // the register currently assigned to this interval.
    760   void DumpWithContext(std::ostream& stream, const CodeGenerator& codegen) const;
    761 
    762   LiveInterval* GetNextSibling() const { return next_sibling_; }
    763   LiveInterval* GetLastSibling() {
    764     LiveInterval* result = this;
    765     while (result->next_sibling_ != nullptr) {
    766       result = result->next_sibling_;
    767     }
    768     return result;
    769   }
    770 
    771   // Returns the first register hint that is at least free before
    772   // the value contained in `free_until`. If none is found, returns
    773   // `kNoRegister`.
    774   int FindFirstRegisterHint(size_t* free_until, const SsaLivenessAnalysis& liveness) const;
    775 
    776   // If there is enough at the definition site to find a register (for example
    777   // it uses the same input as the first input), returns the register as a hint.
    778   // Returns kNoRegister otherwise.
    779   int FindHintAtDefinition() const;
    780 
    781   // Returns the number of required spilling slots (measured as a multiple of the
    782   // Dex virtual register size `kVRegSize`).
    783   size_t NumberOfSpillSlotsNeeded() const;
    784 
    785   bool IsFloatingPoint() const {
    786     return type_ == Primitive::kPrimFloat || type_ == Primitive::kPrimDouble;
    787   }
    788 
    789   // Converts the location of the interval to a `Location` object.
    790   Location ToLocation() const;
    791 
    792   // Returns the location of the interval following its siblings at `position`.
    793   Location GetLocationAt(size_t position);
    794 
    795   // Finds the sibling that is defined at `position`.
    796   LiveInterval* GetSiblingAt(size_t position);
    797 
    798   // Returns whether `other` and `this` share the same kind of register.
    799   bool SameRegisterKind(Location other) const;
    800   bool SameRegisterKind(const LiveInterval& other) const {
    801     return IsFloatingPoint() == other.IsFloatingPoint();
    802   }
    803 
    804   bool HasHighInterval() const {
    805     return IsLowInterval();
    806   }
    807 
    808   bool HasLowInterval() const {
    809     return IsHighInterval();
    810   }
    811 
    812   LiveInterval* GetLowInterval() const {
    813     DCHECK(HasLowInterval());
    814     return high_or_low_interval_;
    815   }
    816 
    817   LiveInterval* GetHighInterval() const {
    818     DCHECK(HasHighInterval());
    819     return high_or_low_interval_;
    820   }
    821 
    822   bool IsHighInterval() const {
    823     return GetParent()->is_high_interval_;
    824   }
    825 
    826   bool IsLowInterval() const {
    827     return !IsHighInterval() && (GetParent()->high_or_low_interval_ != nullptr);
    828   }
    829 
    830   void SetLowInterval(LiveInterval* low) {
    831     DCHECK(IsHighInterval());
    832     high_or_low_interval_ = low;
    833   }
    834 
    835   void SetHighInterval(LiveInterval* high) {
    836     DCHECK(IsLowInterval());
    837     high_or_low_interval_ = high;
    838   }
    839 
    840   void AddHighInterval(bool is_temp = false) {
    841     DCHECK(IsParent());
    842     DCHECK(!HasHighInterval());
    843     DCHECK(!HasLowInterval());
    844     high_or_low_interval_ = new (allocator_) LiveInterval(
    845         allocator_, type_, defined_by_, false, kNoRegister, is_temp, true);
    846     high_or_low_interval_->high_or_low_interval_ = this;
    847     if (first_range_ != nullptr) {
    848       high_or_low_interval_->first_range_ = first_range_->Dup(allocator_);
    849       high_or_low_interval_->last_range_ = high_or_low_interval_->first_range_->GetLastRange();
    850       high_or_low_interval_->range_search_start_ = high_or_low_interval_->first_range_;
    851     }
    852     auto pos = high_or_low_interval_->uses_.before_begin();
    853     for (const UsePosition& use : uses_) {
    854       UsePosition* new_use = use.Clone(allocator_);
    855       pos = high_or_low_interval_->uses_.insert_after(pos, *new_use);
    856     }
    857 
    858     auto env_pos = high_or_low_interval_->env_uses_.before_begin();
    859     for (const EnvUsePosition& env_use : env_uses_) {
    860       EnvUsePosition* new_env_use = env_use.Clone(allocator_);
    861       env_pos = high_or_low_interval_->env_uses_.insert_after(env_pos, *new_env_use);
    862     }
    863   }
    864 
    865   // Returns whether an interval, when it is non-split, is using
    866   // the same register of one of its input.
    867   bool IsUsingInputRegister() const {
    868     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
    869     if (defined_by_ != nullptr && !IsSplit()) {
    870       for (const HInstruction* input : defined_by_->GetInputs()) {
    871         LiveInterval* interval = input->GetLiveInterval();
    872 
    873         // Find the interval that covers `defined_by`_. Calls to this function
    874         // are made outside the linear scan, hence we need to use CoversSlow.
    875         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
    876           interval = interval->GetNextSibling();
    877         }
    878 
    879         // Check if both intervals have the same register of the same kind.
    880         if (interval != nullptr
    881             && interval->SameRegisterKind(*this)
    882             && interval->GetRegister() == GetRegister()) {
    883           return true;
    884         }
    885       }
    886     }
    887     return false;
    888   }
    889 
    890   // Returns whether an interval, when it is non-split, can safely use
    891   // the same register of one of its input. Note that this method requires
    892   // IsUsingInputRegister() to be true.
    893   bool CanUseInputRegister() const {
    894     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
    895     DCHECK(IsUsingInputRegister());
    896     if (defined_by_ != nullptr && !IsSplit()) {
    897       LocationSummary* locations = defined_by_->GetLocations();
    898       if (locations->OutputCanOverlapWithInputs()) {
    899         return false;
    900       }
    901       for (const HInstruction* input : defined_by_->GetInputs()) {
    902         LiveInterval* interval = input->GetLiveInterval();
    903 
    904         // Find the interval that covers `defined_by`_. Calls to this function
    905         // are made outside the linear scan, hence we need to use CoversSlow.
    906         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
    907           interval = interval->GetNextSibling();
    908         }
    909 
    910         if (interval != nullptr
    911             && interval->SameRegisterKind(*this)
    912             && interval->GetRegister() == GetRegister()) {
    913           // We found the input that has the same register. Check if it is live after
    914           // `defined_by`_.
    915           return !interval->CoversSlow(defined_by_->GetLifetimePosition() + 1);
    916         }
    917       }
    918     }
    919     LOG(FATAL) << "Unreachable";
    920     UNREACHABLE();
    921   }
    922 
    923   void AddSafepoint(HInstruction* instruction) {
    924     SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction);
    925     if (first_safepoint_ == nullptr) {
    926       first_safepoint_ = last_safepoint_ = safepoint;
    927     } else {
    928       DCHECK_LT(last_safepoint_->GetPosition(), safepoint->GetPosition());
    929       last_safepoint_->SetNext(safepoint);
    930       last_safepoint_ = safepoint;
    931     }
    932   }
    933 
    934   SafepointPosition* GetFirstSafepoint() const {
    935     return first_safepoint_;
    936   }
    937 
    938   // Resets the starting point for range-searching queries to the first range.
    939   // Intervals must be reset prior to starting a new linear scan over them.
    940   void ResetSearchCache() {
    941     range_search_start_ = first_range_;
    942   }
    943 
    944   bool DefinitionRequiresRegister() const {
    945     DCHECK(IsParent());
    946     LocationSummary* locations = defined_by_->GetLocations();
    947     Location location = locations->Out();
    948     // This interval is the first interval of the instruction. If the output
    949     // of the instruction requires a register, we return the position of that instruction
    950     // as the first register use.
    951     if (location.IsUnallocated()) {
    952       if ((location.GetPolicy() == Location::kRequiresRegister)
    953            || (location.GetPolicy() == Location::kSameAsFirstInput
    954                && (locations->InAt(0).IsRegister()
    955                    || locations->InAt(0).IsRegisterPair()
    956                    || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) {
    957         return true;
    958       } else if ((location.GetPolicy() == Location::kRequiresFpuRegister)
    959                  || (location.GetPolicy() == Location::kSameAsFirstInput
    960                      && (locations->InAt(0).IsFpuRegister()
    961                          || locations->InAt(0).IsFpuRegisterPair()
    962                          || locations->InAt(0).GetPolicy() == Location::kRequiresFpuRegister))) {
    963         return true;
    964       }
    965     } else if (location.IsRegister() || location.IsRegisterPair()) {
    966       return true;
    967     }
    968     return false;
    969   }
    970 
    971  private:
    972   LiveInterval(ArenaAllocator* allocator,
    973                Primitive::Type type,
    974                HInstruction* defined_by = nullptr,
    975                bool is_fixed = false,
    976                int reg = kNoRegister,
    977                bool is_temp = false,
    978                bool is_high_interval = false)
    979       : allocator_(allocator),
    980         first_range_(nullptr),
    981         last_range_(nullptr),
    982         range_search_start_(nullptr),
    983         first_safepoint_(nullptr),
    984         last_safepoint_(nullptr),
    985         uses_(),
    986         env_uses_(),
    987         type_(type),
    988         next_sibling_(nullptr),
    989         parent_(this),
    990         register_(reg),
    991         spill_slot_(kNoSpillSlot),
    992         is_fixed_(is_fixed),
    993         is_temp_(is_temp),
    994         is_high_interval_(is_high_interval),
    995         high_or_low_interval_(nullptr),
    996         defined_by_(defined_by) {}
    997 
    998   // Searches for a LiveRange that either covers the given position or is the
    999   // first next LiveRange. Returns null if no such LiveRange exists. Ranges
   1000   // known to end before `position` can be skipped with `search_start`.
   1001   LiveRange* FindRangeAtOrAfter(size_t position, LiveRange* search_start) const {
   1002     if (kIsDebugBuild) {
   1003       if (search_start != first_range_) {
   1004         // If we are not searching the entire list of ranges, make sure we do
   1005         // not skip the range we are searching for.
   1006         if (search_start == nullptr) {
   1007           DCHECK(IsDeadAt(position));
   1008         } else if (search_start->GetStart() > position) {
   1009           DCHECK_EQ(search_start, FindRangeAtOrAfter(position, first_range_));
   1010         }
   1011       }
   1012     }
   1013 
   1014     LiveRange* range;
   1015     for (range = search_start;
   1016          range != nullptr && range->GetEnd() <= position;
   1017          range = range->GetNext()) {
   1018       continue;
   1019     }
   1020     return range;
   1021   }
   1022 
   1023   bool IsDefiningPosition(size_t position) const {
   1024     return IsParent() && (position == GetStart());
   1025   }
   1026 
   1027   bool HasSynthesizeUseAt(size_t position) const {
   1028     for (const UsePosition& use : GetUses()) {
   1029       size_t use_position = use.GetPosition();
   1030       if ((use_position == position) && use.IsSynthesized()) {
   1031         return true;
   1032       }
   1033       if (use_position > position) break;
   1034     }
   1035     return false;
   1036   }
   1037 
   1038   void AddBackEdgeUses(const HBasicBlock& block_at_use) {
   1039     DCHECK(block_at_use.IsInLoop());
   1040     if (block_at_use.GetGraph()->HasIrreducibleLoops()) {
   1041       // Linear order may not be well formed when irreducible loops are present,
   1042       // i.e. loop blocks may not be adjacent and a back edge may not be last,
   1043       // which violates assumptions made in this method.
   1044       return;
   1045     }
   1046 
   1047     // Add synthesized uses at the back edge of loops to help the register allocator.
   1048     // Note that this method is called in decreasing liveness order, to faciliate adding
   1049     // uses at the head of the `uses_` list. Because below
   1050     // we iterate from inner-most to outer-most, which is in increasing liveness order,
   1051     // we need to add subsequent entries after the last inserted entry.
   1052     const UsePositionList::iterator old_begin = uses_.begin();
   1053     UsePositionList::iterator insert_pos = uses_.before_begin();
   1054     for (HLoopInformationOutwardIterator it(block_at_use);
   1055          !it.Done();
   1056          it.Advance()) {
   1057       HLoopInformation* current = it.Current();
   1058       if (GetDefinedBy()->GetLifetimePosition() >= current->GetHeader()->GetLifetimeStart()) {
   1059         // This interval is defined in the loop. We can stop going outward.
   1060         break;
   1061       }
   1062 
   1063       // We're only adding a synthesized use at the last back edge. Adding synthesized uses on
   1064       // all back edges is not necessary: anything used in the loop will have its use at the
   1065       // last back edge. If we want branches in a loop to have better register allocation than
   1066       // another branch, then it is the linear order we should change.
   1067       size_t back_edge_use_position = current->GetLifetimeEnd();
   1068       if ((old_begin != uses_.end()) && (old_begin->GetPosition() <= back_edge_use_position)) {
   1069         // There was a use already seen in this loop. Therefore the previous call to `AddUse`
   1070         // already inserted the backedge use. We can stop going outward.
   1071         DCHECK(HasSynthesizeUseAt(back_edge_use_position));
   1072         break;
   1073       }
   1074 
   1075       DCHECK(insert_pos != uses_.before_begin()
   1076              ? back_edge_use_position > insert_pos->GetPosition()
   1077              : current == block_at_use.GetLoopInformation())
   1078           << std::distance(uses_.before_begin(), insert_pos);
   1079 
   1080       UsePosition* new_use = new (allocator_) UsePosition(back_edge_use_position);
   1081       insert_pos = uses_.insert_after(insert_pos, *new_use);
   1082     }
   1083   }
   1084 
   1085   ArenaAllocator* const allocator_;
   1086 
   1087   // Ranges of this interval. We need a quick access to the last range to test
   1088   // for liveness (see `IsDeadAt`).
   1089   LiveRange* first_range_;
   1090   LiveRange* last_range_;
   1091 
   1092   // The first range at or after the current position of a linear scan. It is
   1093   // used to optimize range-searching queries.
   1094   LiveRange* range_search_start_;
   1095 
   1096   // Safepoints where this interval is live.
   1097   SafepointPosition* first_safepoint_;
   1098   SafepointPosition* last_safepoint_;
   1099 
   1100   // Uses of this interval. Only the parent interval keeps these lists.
   1101   UsePositionList uses_;
   1102   EnvUsePositionList env_uses_;
   1103 
   1104   // The instruction type this interval corresponds to.
   1105   const Primitive::Type type_;
   1106 
   1107   // Live interval that is the result of a split.
   1108   LiveInterval* next_sibling_;
   1109 
   1110   // The first interval from which split intervals come from.
   1111   LiveInterval* parent_;
   1112 
   1113   // The register allocated to this interval.
   1114   int register_;
   1115 
   1116   // The spill slot allocated to this interval.
   1117   int spill_slot_;
   1118 
   1119   // Whether the interval is for a fixed register.
   1120   const bool is_fixed_;
   1121 
   1122   // Whether the interval is for a temporary.
   1123   const bool is_temp_;
   1124 
   1125   // Whether this interval is a synthesized interval for register pair.
   1126   const bool is_high_interval_;
   1127 
   1128   // If this interval needs a register pair, the high or low equivalent.
   1129   // `is_high_interval_` tells whether this holds the low or the high.
   1130   LiveInterval* high_or_low_interval_;
   1131 
   1132   // The instruction represented by this interval.
   1133   HInstruction* const defined_by_;
   1134 
   1135   static constexpr int kNoRegister = -1;
   1136   static constexpr int kNoSpillSlot = -1;
   1137 
   1138   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
   1139 
   1140   DISALLOW_COPY_AND_ASSIGN(LiveInterval);
   1141 };
   1142 
   1143 /**
   1144  * Analysis that computes the liveness of instructions:
   1145  *
   1146  * (a) Non-environment uses of an instruction always make
   1147  *     the instruction live.
   1148  * (b) Environment uses of an instruction whose type is
   1149  *     object (that is, non-primitive), make the instruction live.
   1150  *     This is due to having to keep alive objects that have
   1151  *     finalizers deleting native objects.
   1152  * (c) When the graph has the debuggable property, environment uses
   1153  *     of an instruction that has a primitive type make the instruction live.
   1154  *     If the graph does not have the debuggable property, the environment
   1155  *     use has no effect, and may get a 'none' value after register allocation.
   1156  *
   1157  * (b) and (c) are implemented through SsaLivenessAnalysis::ShouldBeLiveForEnvironment.
   1158  */
   1159 class SsaLivenessAnalysis : public ValueObject {
   1160  public:
   1161   SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen)
   1162       : graph_(graph),
   1163         codegen_(codegen),
   1164         block_infos_(graph->GetBlocks().size(),
   1165                      nullptr,
   1166                      graph->GetArena()->Adapter(kArenaAllocSsaLiveness)),
   1167         instructions_from_ssa_index_(graph->GetArena()->Adapter(kArenaAllocSsaLiveness)),
   1168         instructions_from_lifetime_position_(graph->GetArena()->Adapter(kArenaAllocSsaLiveness)),
   1169         number_of_ssa_values_(0) {
   1170   }
   1171 
   1172   void Analyze();
   1173 
   1174   BitVector* GetLiveInSet(const HBasicBlock& block) const {
   1175     return &block_infos_[block.GetBlockId()]->live_in_;
   1176   }
   1177 
   1178   BitVector* GetLiveOutSet(const HBasicBlock& block) const {
   1179     return &block_infos_[block.GetBlockId()]->live_out_;
   1180   }
   1181 
   1182   BitVector* GetKillSet(const HBasicBlock& block) const {
   1183     return &block_infos_[block.GetBlockId()]->kill_;
   1184   }
   1185 
   1186   HInstruction* GetInstructionFromSsaIndex(size_t index) const {
   1187     return instructions_from_ssa_index_[index];
   1188   }
   1189 
   1190   HInstruction* GetInstructionFromPosition(size_t index) const {
   1191     return instructions_from_lifetime_position_[index];
   1192   }
   1193 
   1194   HBasicBlock* GetBlockFromPosition(size_t index) const {
   1195     HInstruction* instruction = GetInstructionFromPosition(index);
   1196     if (instruction == nullptr) {
   1197       // If we are at a block boundary, get the block following.
   1198       instruction = GetInstructionFromPosition(index + 1);
   1199     }
   1200     return instruction->GetBlock();
   1201   }
   1202 
   1203   bool IsAtBlockBoundary(size_t index) const {
   1204     return GetInstructionFromPosition(index) == nullptr;
   1205   }
   1206 
   1207   HInstruction* GetTempUser(LiveInterval* temp) const {
   1208     // A temporary shares the same lifetime start as the instruction that requires it.
   1209     DCHECK(temp->IsTemp());
   1210     HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2);
   1211     DCHECK_EQ(user, temp->GetUses().front().GetUser());
   1212     return user;
   1213   }
   1214 
   1215   size_t GetTempIndex(LiveInterval* temp) const {
   1216     // We use the input index to store the index of the temporary in the user's temporary list.
   1217     DCHECK(temp->IsTemp());
   1218     return temp->GetUses().front().GetInputIndex();
   1219   }
   1220 
   1221   size_t GetMaxLifetimePosition() const {
   1222     return instructions_from_lifetime_position_.size() * 2 - 1;
   1223   }
   1224 
   1225   size_t GetNumberOfSsaValues() const {
   1226     return number_of_ssa_values_;
   1227   }
   1228 
   1229   static constexpr const char* kLivenessPassName = "liveness";
   1230 
   1231  private:
   1232   // Give an SSA number to each instruction that defines a value used by another instruction,
   1233   // and setup the lifetime information of each instruction and block.
   1234   void NumberInstructions();
   1235 
   1236   // Compute live ranges of instructions, as well as live_in, live_out and kill sets.
   1237   void ComputeLiveness();
   1238 
   1239   // Compute the live ranges of instructions, as well as the initial live_in, live_out and
   1240   // kill sets, that do not take into account backward branches.
   1241   void ComputeLiveRanges();
   1242 
   1243   // After computing the initial sets, this method does a fixed point
   1244   // calculation over the live_in and live_out set to take into account
   1245   // backwards branches.
   1246   void ComputeLiveInAndLiveOutSets();
   1247 
   1248   // Update the live_in set of the block and returns whether it has changed.
   1249   bool UpdateLiveIn(const HBasicBlock& block);
   1250 
   1251   // Update the live_out set of the block and returns whether it has changed.
   1252   bool UpdateLiveOut(const HBasicBlock& block);
   1253 
   1254   // Returns whether `instruction` in an HEnvironment held by `env_holder`
   1255   // should be kept live by the HEnvironment.
   1256   static bool ShouldBeLiveForEnvironment(HInstruction* env_holder, HInstruction* instruction) {
   1257     if (instruction == nullptr) return false;
   1258     // A value that's not live in compiled code may still be needed in interpreter,
   1259     // due to code motion, etc.
   1260     if (env_holder->IsDeoptimize()) return true;
   1261     // A value live at a throwing instruction in a try block may be copied by
   1262     // the exception handler to its location at the top of the catch block.
   1263     if (env_holder->CanThrowIntoCatchBlock()) return true;
   1264     if (instruction->GetBlock()->GetGraph()->IsDebuggable()) return true;
   1265     return instruction->GetType() == Primitive::kPrimNot;
   1266   }
   1267 
   1268   void CheckNoLiveInIrreducibleLoop(const HBasicBlock& block) const {
   1269     if (!block.IsLoopHeader() || !block.GetLoopInformation()->IsIrreducible()) {
   1270       return;
   1271     }
   1272     BitVector* live_in = GetLiveInSet(block);
   1273     // To satisfy our liveness algorithm, we need to ensure loop headers of
   1274     // irreducible loops do not have any live-in instructions, except constants
   1275     // and the current method, which can be trivially re-materialized.
   1276     for (uint32_t idx : live_in->Indexes()) {
   1277       HInstruction* instruction = GetInstructionFromSsaIndex(idx);
   1278       DCHECK(instruction->GetBlock()->IsEntryBlock()) << instruction->DebugName();
   1279       DCHECK(!instruction->IsParameterValue());
   1280       DCHECK(instruction->IsCurrentMethod() || instruction->IsConstant())
   1281           << instruction->DebugName();
   1282     }
   1283   }
   1284 
   1285   HGraph* const graph_;
   1286   CodeGenerator* const codegen_;
   1287   ArenaVector<BlockInfo*> block_infos_;
   1288 
   1289   // Temporary array used when computing live_in, live_out, and kill sets.
   1290   ArenaVector<HInstruction*> instructions_from_ssa_index_;
   1291 
   1292   // Temporary array used when inserting moves in the graph.
   1293   ArenaVector<HInstruction*> instructions_from_lifetime_position_;
   1294   size_t number_of_ssa_values_;
   1295 
   1296   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
   1297   ART_FRIEND_TEST(RegisterAllocatorTest, FreeUntil);
   1298 
   1299   DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis);
   1300 };
   1301 
   1302 }  // namespace art
   1303 
   1304 #endif  // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
   1305