Home | History | Annotate | Download | only in optimizing
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
     18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
     19 
     20 #include <iostream>
     21 
     22 #include "base/iteration_range.h"
     23 #include "base/scoped_arena_allocator.h"
     24 #include "base/scoped_arena_containers.h"
     25 #include "nodes.h"
     26 #include "utils/intrusive_forward_list.h"
     27 
     28 namespace art {
     29 
     30 class CodeGenerator;
     31 class SsaLivenessAnalysis;
     32 
     33 static constexpr int kNoRegister = -1;
     34 
     35 class BlockInfo : public ArenaObject<kArenaAllocSsaLiveness> {
     36  public:
     37   BlockInfo(ScopedArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values)
     38       : block_(block),
     39         live_in_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
     40         live_out_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
     41         kill_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness) {
     42     UNUSED(block_);
     43     live_in_.ClearAllBits();
     44     live_out_.ClearAllBits();
     45     kill_.ClearAllBits();
     46   }
     47 
     48  private:
     49   const HBasicBlock& block_;
     50   ArenaBitVector live_in_;
     51   ArenaBitVector live_out_;
     52   ArenaBitVector kill_;
     53 
     54   friend class SsaLivenessAnalysis;
     55 
     56   DISALLOW_COPY_AND_ASSIGN(BlockInfo);
     57 };
     58 
     59 /**
     60  * A live range contains the start and end of a range where an instruction or a temporary
     61  * is live.
     62  */
     63 class LiveRange final : public ArenaObject<kArenaAllocSsaLiveness> {
     64  public:
     65   LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) {
     66     DCHECK_LT(start, end);
     67     DCHECK(next_ == nullptr || next_->GetStart() > GetEnd());
     68   }
     69 
     70   size_t GetStart() const { return start_; }
     71   size_t GetEnd() const { return end_; }
     72   LiveRange* GetNext() const { return next_; }
     73 
     74   bool IntersectsWith(const LiveRange& other) const {
     75     return (start_ >= other.start_ && start_ < other.end_)
     76         || (other.start_ >= start_ && other.start_ < end_);
     77   }
     78 
     79   bool IsBefore(const LiveRange& other) const {
     80     return end_ <= other.start_;
     81   }
     82 
     83   void Dump(std::ostream& stream) const {
     84     stream << "[" << start_ << "," << end_ << ")";
     85   }
     86 
     87   LiveRange* Dup(ScopedArenaAllocator* allocator) const {
     88     return new (allocator) LiveRange(
     89         start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator));
     90   }
     91 
     92   LiveRange* GetLastRange() {
     93     return next_ == nullptr ? this : next_->GetLastRange();
     94   }
     95 
     96  private:
     97   size_t start_;
     98   size_t end_;
     99   LiveRange* next_;
    100 
    101   friend class LiveInterval;
    102 
    103   DISALLOW_COPY_AND_ASSIGN(LiveRange);
    104 };
    105 
    106 /**
    107  * A use position represents a live interval use at a given position.
    108  */
    109 class UsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
    110                     public IntrusiveForwardListNode<UsePosition> {
    111  public:
    112   UsePosition(HInstruction* user, size_t input_index, size_t position)
    113       : user_(user),
    114         input_index_(input_index),
    115         position_(position) {
    116   }
    117 
    118   explicit UsePosition(size_t position)
    119       : user_(nullptr),
    120         input_index_(kNoInput),
    121         position_(dchecked_integral_cast<uint32_t>(position)) {
    122   }
    123 
    124   size_t GetPosition() const { return position_; }
    125 
    126   HInstruction* GetUser() const { return user_; }
    127 
    128   bool IsSynthesized() const { return user_ == nullptr; }
    129 
    130   size_t GetInputIndex() const { return input_index_; }
    131 
    132   void Dump(std::ostream& stream) const {
    133     stream << position_;
    134   }
    135 
    136   HLoopInformation* GetLoopInformation() const {
    137     return user_->GetBlock()->GetLoopInformation();
    138   }
    139 
    140   UsePosition* Clone(ScopedArenaAllocator* allocator) const {
    141     return new (allocator) UsePosition(user_, input_index_, position_);
    142   }
    143 
    144   bool RequiresRegister() const {
    145     if (IsSynthesized()) return false;
    146     Location location = GetUser()->GetLocations()->InAt(GetInputIndex());
    147     return location.IsUnallocated() && location.RequiresRegisterKind();
    148   }
    149 
    150  private:
    151   static constexpr uint32_t kNoInput = static_cast<uint32_t>(-1);
    152 
    153   HInstruction* const user_;
    154   const size_t input_index_;
    155   const size_t position_;
    156 
    157   DISALLOW_COPY_AND_ASSIGN(UsePosition);
    158 };
    159 using UsePositionList = IntrusiveForwardList<UsePosition>;
    160 
    161 /**
    162  * An environment use position represents a live interval for environment use at a given position.
    163  */
    164 class EnvUsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
    165                        public IntrusiveForwardListNode<EnvUsePosition> {
    166  public:
    167   EnvUsePosition(HEnvironment* environment,
    168                  size_t input_index,
    169                  size_t position)
    170       : environment_(environment),
    171         input_index_(input_index),
    172         position_(position) {
    173     DCHECK(environment != nullptr);
    174   }
    175 
    176   size_t GetPosition() const { return position_; }
    177 
    178   HEnvironment* GetEnvironment() const { return environment_; }
    179   size_t GetInputIndex() const { return input_index_; }
    180 
    181   void Dump(std::ostream& stream) const {
    182     stream << position_;
    183   }
    184 
    185   EnvUsePosition* Clone(ScopedArenaAllocator* allocator) const {
    186     return new (allocator) EnvUsePosition(environment_, input_index_, position_);
    187   }
    188 
    189  private:
    190   HEnvironment* const environment_;
    191   const size_t input_index_;
    192   const size_t position_;
    193 
    194   DISALLOW_COPY_AND_ASSIGN(EnvUsePosition);
    195 };
    196 using EnvUsePositionList = IntrusiveForwardList<EnvUsePosition>;
    197 
    198 template <typename Iterator>
    199 inline Iterator FindUseAtOrAfterPosition(Iterator first, Iterator last, size_t position) {
    200   using value_type = const typename Iterator::value_type;
    201   static_assert(std::is_same<value_type, const UsePosition>::value ||
    202                     std::is_same<value_type, const EnvUsePosition>::value,
    203                 "Expecting value type UsePosition or EnvUsePosition.");
    204   Iterator ret = std::find_if(
    205       first, last, [position](const value_type& use) { return use.GetPosition() >= position; });
    206   // Check that the processed range is sorted. Do not check the rest of the range to avoid
    207   // increasing the complexity of callers from O(n) to O(n^2).
    208   DCHECK(std::is_sorted(
    209       first,
    210       ret,
    211       [](const value_type& lhs, const value_type& rhs) {
    212           return lhs.GetPosition() < rhs.GetPosition();
    213       }));
    214   return ret;
    215 }
    216 
    217 template <typename Iterator>
    218 inline IterationRange<Iterator> FindMatchingUseRange(Iterator first,
    219                                                      Iterator last,
    220                                                      size_t position_begin,
    221                                                      size_t position_end) {
    222   Iterator begin = FindUseAtOrAfterPosition(first, last, position_begin);
    223   Iterator end = FindUseAtOrAfterPosition(begin, last, position_end);
    224   return MakeIterationRange(begin, end);
    225 }
    226 
    227 class SafepointPosition : public ArenaObject<kArenaAllocSsaLiveness> {
    228  public:
    229   explicit SafepointPosition(HInstruction* instruction)
    230       : instruction_(instruction),
    231         next_(nullptr) {}
    232 
    233   static size_t ComputePosition(HInstruction* instruction) {
    234     // We special case instructions emitted at use site, as their
    235     // safepoint position needs to be at their use.
    236     if (instruction->IsEmittedAtUseSite()) {
    237       // Currently only applies to implicit null checks, which are emitted
    238       // at the next instruction.
    239       DCHECK(instruction->IsNullCheck()) << instruction->DebugName();
    240       return instruction->GetLifetimePosition() + 2;
    241     } else {
    242       return instruction->GetLifetimePosition();
    243     }
    244   }
    245 
    246   void SetNext(SafepointPosition* next) {
    247     next_ = next;
    248   }
    249 
    250   size_t GetPosition() const {
    251     return ComputePosition(instruction_);
    252   }
    253 
    254   SafepointPosition* GetNext() const {
    255     return next_;
    256   }
    257 
    258   LocationSummary* GetLocations() const {
    259     return instruction_->GetLocations();
    260   }
    261 
    262   HInstruction* GetInstruction() const {
    263     return instruction_;
    264   }
    265 
    266  private:
    267   HInstruction* const instruction_;
    268   SafepointPosition* next_;
    269 
    270   DISALLOW_COPY_AND_ASSIGN(SafepointPosition);
    271 };
    272 
    273 /**
    274  * An interval is a list of disjoint live ranges where an instruction is live.
    275  * Each instruction that has uses gets an interval.
    276  */
    277 class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> {
    278  public:
    279   static LiveInterval* MakeInterval(ScopedArenaAllocator* allocator,
    280                                     DataType::Type type,
    281                                     HInstruction* instruction = nullptr) {
    282     return new (allocator) LiveInterval(allocator, type, instruction);
    283   }
    284 
    285   static LiveInterval* MakeFixedInterval(ScopedArenaAllocator* allocator,
    286                                          int reg,
    287                                          DataType::Type type) {
    288     return new (allocator) LiveInterval(allocator, type, nullptr, true, reg, false);
    289   }
    290 
    291   static LiveInterval* MakeTempInterval(ScopedArenaAllocator* allocator, DataType::Type type) {
    292     return new (allocator) LiveInterval(allocator, type, nullptr, false, kNoRegister, true);
    293   }
    294 
    295   bool IsFixed() const { return is_fixed_; }
    296   bool IsTemp() const { return is_temp_; }
    297   // This interval is the result of a split.
    298   bool IsSplit() const { return parent_ != this; }
    299 
    300   void AddTempUse(HInstruction* instruction, size_t temp_index) {
    301     DCHECK(IsTemp());
    302     DCHECK(GetUses().empty()) << "A temporary can only have one user";
    303     DCHECK(GetEnvironmentUses().empty()) << "A temporary cannot have environment user";
    304     size_t position = instruction->GetLifetimePosition();
    305     UsePosition* new_use = new (allocator_) UsePosition(instruction, temp_index, position);
    306     uses_.push_front(*new_use);
    307     AddRange(position, position + 1);
    308   }
    309 
    310   // Record use of an input. The use will be recorded as an environment use if
    311   // `environment` is not null and as register use otherwise. If `actual_user`
    312   // is specified, the use will be recorded at `actual_user`'s lifetime position.
    313   void AddUse(HInstruction* instruction,
    314               HEnvironment* environment,
    315               size_t input_index,
    316               HInstruction* actual_user = nullptr) {
    317     bool is_environment = (environment != nullptr);
    318     LocationSummary* locations = instruction->GetLocations();
    319     if (actual_user == nullptr) {
    320       actual_user = instruction;
    321     }
    322 
    323     // Set the use within the instruction.
    324     size_t position = actual_user->GetLifetimePosition() + 1;
    325     if (!is_environment) {
    326       if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) {
    327         // For fixed inputs and output same as input, the register allocator
    328         // requires to have inputs die at the instruction, so that input moves use the
    329         // location of the input just before that instruction (and not potential moves due
    330         // to splitting).
    331         DCHECK_EQ(instruction, actual_user);
    332         position = actual_user->GetLifetimePosition();
    333       } else if (!locations->InAt(input_index).IsValid()) {
    334         return;
    335       }
    336     }
    337 
    338     if (!is_environment && instruction->IsInLoop()) {
    339       AddBackEdgeUses(*instruction->GetBlock());
    340     }
    341 
    342     if ((!uses_.empty()) &&
    343         (uses_.front().GetUser() == actual_user) &&
    344         (uses_.front().GetPosition() < position)) {
    345       // The user uses the instruction multiple times, and one use dies before the other.
    346       // We update the use list so that the latter is first.
    347       DCHECK(!is_environment);
    348       DCHECK(uses_.front().GetPosition() + 1 == position);
    349       UsePositionList::iterator next_pos = uses_.begin();
    350       UsePositionList::iterator insert_pos;
    351       do {
    352         insert_pos = next_pos;
    353         ++next_pos;
    354       } while (next_pos != uses_.end() && next_pos->GetPosition() < position);
    355       UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
    356       uses_.insert_after(insert_pos, *new_use);
    357       if (first_range_->GetEnd() == uses_.front().GetPosition()) {
    358         first_range_->end_ = position;
    359       }
    360       return;
    361     }
    362 
    363     if (is_environment) {
    364       DCHECK(env_uses_.empty() || position <= env_uses_.front().GetPosition());
    365       EnvUsePosition* new_env_use =
    366           new (allocator_) EnvUsePosition(environment, input_index, position);
    367       env_uses_.push_front(*new_env_use);
    368     } else {
    369       DCHECK(uses_.empty() || position <= uses_.front().GetPosition());
    370       UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
    371       uses_.push_front(*new_use);
    372     }
    373 
    374     size_t start_block_position = instruction->GetBlock()->GetLifetimeStart();
    375     if (first_range_ == nullptr) {
    376       // First time we see a use of that interval.
    377       first_range_ = last_range_ = range_search_start_ =
    378           new (allocator_) LiveRange(start_block_position, position, nullptr);
    379     } else if (first_range_->GetStart() == start_block_position) {
    380       // There is a use later in the same block or in a following block.
    381       // Note that in such a case, `AddRange` for the whole blocks has been called
    382       // before arriving in this method, and this is the reason the start of
    383       // `first_range_` is before the given `position`.
    384       DCHECK_LE(position, first_range_->GetEnd());
    385     } else {
    386       DCHECK(first_range_->GetStart() > position);
    387       // There is a hole in the interval. Create a new range.
    388       // Note that the start of `first_range_` can be equal to `end`: two blocks
    389       // having adjacent lifetime positions are not necessarily
    390       // predecessor/successor. When two blocks are predecessor/successor, the
    391       // liveness algorithm has called `AddRange` before arriving in this method,
    392       // and the check line 205 would succeed.
    393       first_range_ = range_search_start_ =
    394           new (allocator_) LiveRange(start_block_position, position, first_range_);
    395     }
    396   }
    397 
    398   void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) {
    399     DCHECK(instruction->IsPhi());
    400     if (block->IsInLoop()) {
    401       AddBackEdgeUses(*block);
    402     }
    403     UsePosition* new_use =
    404         new (allocator_) UsePosition(instruction, input_index, block->GetLifetimeEnd());
    405     uses_.push_front(*new_use);
    406   }
    407 
    408   ALWAYS_INLINE void AddRange(size_t start, size_t end) {
    409     if (first_range_ == nullptr) {
    410       first_range_ = last_range_ = range_search_start_ =
    411           new (allocator_) LiveRange(start, end, first_range_);
    412     } else if (first_range_->GetStart() == end) {
    413       // There is a use in the following block.
    414       first_range_->start_ = start;
    415     } else if (first_range_->GetStart() == start && first_range_->GetEnd() == end) {
    416       DCHECK(is_fixed_);
    417     } else {
    418       DCHECK_GT(first_range_->GetStart(), end);
    419       // There is a hole in the interval. Create a new range.
    420       first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_);
    421     }
    422   }
    423 
    424   void AddLoopRange(size_t start, size_t end) {
    425     DCHECK(first_range_ != nullptr);
    426     DCHECK_LE(start, first_range_->GetStart());
    427     // Find the range that covers the positions after the loop.
    428     LiveRange* after_loop = first_range_;
    429     LiveRange* last_in_loop = nullptr;
    430     while (after_loop != nullptr && after_loop->GetEnd() < end) {
    431       DCHECK_LE(start, after_loop->GetStart());
    432       last_in_loop = after_loop;
    433       after_loop = after_loop->GetNext();
    434     }
    435     if (after_loop == nullptr) {
    436       // Uses are only in the loop.
    437       first_range_ = last_range_ = range_search_start_ =
    438           new (allocator_) LiveRange(start, end, nullptr);
    439     } else if (after_loop->GetStart() <= end) {
    440       first_range_ = range_search_start_ = after_loop;
    441       // There are uses after the loop.
    442       first_range_->start_ = start;
    443     } else {
    444       // The use after the loop is after a lifetime hole.
    445       DCHECK(last_in_loop != nullptr);
    446       first_range_ = range_search_start_ = last_in_loop;
    447       first_range_->start_ = start;
    448       first_range_->end_ = end;
    449     }
    450   }
    451 
    452   bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; }
    453   void SetSpillSlot(int slot) {
    454     DCHECK(!is_fixed_);
    455     DCHECK(!is_temp_);
    456     spill_slot_ = slot;
    457   }
    458   int GetSpillSlot() const { return spill_slot_; }
    459 
    460   void SetFrom(size_t from) {
    461     if (first_range_ != nullptr) {
    462       first_range_->start_ = from;
    463     } else {
    464       // Instruction without uses.
    465       DCHECK(uses_.empty());
    466       DCHECK(from == defined_by_->GetLifetimePosition());
    467       first_range_ = last_range_ = range_search_start_ =
    468           new (allocator_) LiveRange(from, from + 2, nullptr);
    469     }
    470   }
    471 
    472   LiveInterval* GetParent() const { return parent_; }
    473 
    474   // Returns whether this interval is the parent interval, that is, the interval
    475   // that starts where the HInstruction is defined.
    476   bool IsParent() const { return parent_ == this; }
    477 
    478   LiveRange* GetFirstRange() const { return first_range_; }
    479   LiveRange* GetLastRange() const { return last_range_; }
    480 
    481   int GetRegister() const { return register_; }
    482   void SetRegister(int reg) { register_ = reg; }
    483   void ClearRegister() { register_ = kNoRegister; }
    484   bool HasRegister() const { return register_ != kNoRegister; }
    485 
    486   bool IsDeadAt(size_t position) const {
    487     return GetEnd() <= position;
    488   }
    489 
    490   bool IsDefinedAt(size_t position) const {
    491     return GetStart() <= position && !IsDeadAt(position);
    492   }
    493 
    494   // Returns true if the interval contains a LiveRange covering `position`.
    495   // The range at or immediately after the current position of linear scan
    496   // is cached for better performance. If `position` can be smaller than
    497   // that, CoversSlow should be used instead.
    498   bool Covers(size_t position) {
    499     LiveRange* candidate = FindRangeAtOrAfter(position, range_search_start_);
    500     range_search_start_ = candidate;
    501     return (candidate != nullptr && candidate->GetStart() <= position);
    502   }
    503 
    504   // Same as Covers but always tests all ranges.
    505   bool CoversSlow(size_t position) const {
    506     LiveRange* candidate = FindRangeAtOrAfter(position, first_range_);
    507     return candidate != nullptr && candidate->GetStart() <= position;
    508   }
    509 
    510   // Returns the first intersection of this interval with `current`, which
    511   // must be the interval currently being allocated by linear scan.
    512   size_t FirstIntersectionWith(LiveInterval* current) const {
    513     // Find the first range after the start of `current`. We use the search
    514     // cache to improve performance.
    515     DCHECK(GetStart() <= current->GetStart() || IsFixed());
    516     LiveRange* other_range = current->first_range_;
    517     LiveRange* my_range = FindRangeAtOrAfter(other_range->GetStart(), range_search_start_);
    518     if (my_range == nullptr) {
    519       return kNoLifetime;
    520     }
    521 
    522     // Advance both intervals and find the first matching range start in
    523     // this interval.
    524     do {
    525       if (my_range->IsBefore(*other_range)) {
    526         my_range = my_range->GetNext();
    527         if (my_range == nullptr) {
    528           return kNoLifetime;
    529         }
    530       } else if (other_range->IsBefore(*my_range)) {
    531         other_range = other_range->GetNext();
    532         if (other_range == nullptr) {
    533           return kNoLifetime;
    534         }
    535       } else {
    536         DCHECK(my_range->IntersectsWith(*other_range));
    537         return std::max(my_range->GetStart(), other_range->GetStart());
    538       }
    539     } while (true);
    540   }
    541 
    542   size_t GetStart() const {
    543     return first_range_->GetStart();
    544   }
    545 
    546   size_t GetEnd() const {
    547     return last_range_->GetEnd();
    548   }
    549 
    550   size_t GetLength() const {
    551     return GetEnd() - GetStart();
    552   }
    553 
    554   size_t FirstRegisterUseAfter(size_t position) const {
    555     if (is_temp_) {
    556       return position == GetStart() ? position : kNoLifetime;
    557     }
    558 
    559     if (IsDefiningPosition(position) && DefinitionRequiresRegister()) {
    560       return position;
    561     }
    562 
    563     size_t end = GetEnd();
    564     for (const UsePosition& use : GetUses()) {
    565       size_t use_position = use.GetPosition();
    566       if (use_position > end) {
    567         break;
    568       }
    569       if (use_position > position) {
    570         if (use.RequiresRegister()) {
    571           return use_position;
    572         }
    573       }
    574     }
    575     return kNoLifetime;
    576   }
    577 
    578   // Returns the location of the first register use for this live interval,
    579   // including a register definition if applicable.
    580   size_t FirstRegisterUse() const {
    581     return FirstRegisterUseAfter(GetStart());
    582   }
    583 
    584   // Whether the interval requires a register rather than a stack location.
    585   // If needed for performance, this could be cached.
    586   bool RequiresRegister() const {
    587     return !HasRegister() && FirstRegisterUse() != kNoLifetime;
    588   }
    589 
    590   size_t FirstUseAfter(size_t position) const {
    591     if (is_temp_) {
    592       return position == GetStart() ? position : kNoLifetime;
    593     }
    594 
    595     if (IsDefiningPosition(position)) {
    596       DCHECK(defined_by_->GetLocations()->Out().IsValid());
    597       return position;
    598     }
    599 
    600     size_t end = GetEnd();
    601     for (const UsePosition& use : GetUses()) {
    602       size_t use_position = use.GetPosition();
    603       if (use_position > end) {
    604         break;
    605       }
    606       if (use_position > position) {
    607         return use_position;
    608       }
    609     }
    610     return kNoLifetime;
    611   }
    612 
    613   const UsePositionList& GetUses() const {
    614     return parent_->uses_;
    615   }
    616 
    617   const EnvUsePositionList& GetEnvironmentUses() const {
    618     return parent_->env_uses_;
    619   }
    620 
    621   DataType::Type GetType() const {
    622     return type_;
    623   }
    624 
    625   HInstruction* GetDefinedBy() const {
    626     return defined_by_;
    627   }
    628 
    629   bool HasWillCallSafepoint() const {
    630     for (SafepointPosition* safepoint = first_safepoint_;
    631          safepoint != nullptr;
    632          safepoint = safepoint->GetNext()) {
    633       if (safepoint->GetLocations()->WillCall()) return true;
    634     }
    635     return false;
    636   }
    637 
    638   SafepointPosition* FindSafepointJustBefore(size_t position) const {
    639     for (SafepointPosition* safepoint = first_safepoint_, *previous = nullptr;
    640          safepoint != nullptr;
    641          previous = safepoint, safepoint = safepoint->GetNext()) {
    642       if (safepoint->GetPosition() >= position) return previous;
    643     }
    644     return last_safepoint_;
    645   }
    646 
    647   /**
    648    * Split this interval at `position`. This interval is changed to:
    649    * [start ... position).
    650    *
    651    * The new interval covers:
    652    * [position ... end)
    653    */
    654   LiveInterval* SplitAt(size_t position) {
    655     DCHECK(!is_temp_);
    656     DCHECK(!is_fixed_);
    657     DCHECK_GT(position, GetStart());
    658 
    659     if (GetEnd() <= position) {
    660       // This range dies before `position`, no need to split.
    661       return nullptr;
    662     }
    663 
    664     LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_);
    665     SafepointPosition* new_last_safepoint = FindSafepointJustBefore(position);
    666     if (new_last_safepoint == nullptr) {
    667       new_interval->first_safepoint_ = first_safepoint_;
    668       new_interval->last_safepoint_ = last_safepoint_;
    669       first_safepoint_ = last_safepoint_ = nullptr;
    670     } else if (last_safepoint_ != new_last_safepoint) {
    671       new_interval->last_safepoint_ = last_safepoint_;
    672       new_interval->first_safepoint_ = new_last_safepoint->GetNext();
    673       DCHECK(new_interval->first_safepoint_ != nullptr);
    674       last_safepoint_ = new_last_safepoint;
    675       last_safepoint_->SetNext(nullptr);
    676     }
    677 
    678     new_interval->next_sibling_ = next_sibling_;
    679     next_sibling_ = new_interval;
    680     new_interval->parent_ = parent_;
    681 
    682     LiveRange* current = first_range_;
    683     LiveRange* previous = nullptr;
    684     // Iterate over the ranges, and either find a range that covers this position, or
    685     // two ranges in between this position (that is, the position is in a lifetime hole).
    686     do {
    687       if (position >= current->GetEnd()) {
    688         // Move to next range.
    689         previous = current;
    690         current = current->next_;
    691       } else if (position <= current->GetStart()) {
    692         // If the previous range did not cover this position, we know position is in
    693         // a lifetime hole. We can just break the first_range_ and last_range_ links
    694         // and return the new interval.
    695         DCHECK(previous != nullptr);
    696         DCHECK(current != first_range_);
    697         new_interval->last_range_ = last_range_;
    698         last_range_ = previous;
    699         previous->next_ = nullptr;
    700         new_interval->first_range_ = current;
    701         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
    702           // Search start point is inside `new_interval`. Change it to null
    703           // (i.e. the end of the interval) in the original interval.
    704           range_search_start_ = nullptr;
    705         }
    706         new_interval->range_search_start_ = new_interval->first_range_;
    707         return new_interval;
    708       } else {
    709         // This range covers position. We create a new last_range_ for this interval
    710         // that covers last_range_->Start() and position. We also shorten the current
    711         // range and make it the first range of the new interval.
    712         DCHECK(position < current->GetEnd() && position > current->GetStart());
    713         new_interval->last_range_ = last_range_;
    714         last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr);
    715         if (previous != nullptr) {
    716           previous->next_ = last_range_;
    717         } else {
    718           first_range_ = last_range_;
    719         }
    720         new_interval->first_range_ = current;
    721         current->start_ = position;
    722         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
    723           // Search start point is inside `new_interval`. Change it to `last_range`
    724           // in the original interval. This is conservative but always correct.
    725           range_search_start_ = last_range_;
    726         }
    727         new_interval->range_search_start_ = new_interval->first_range_;
    728         return new_interval;
    729       }
    730     } while (current != nullptr);
    731 
    732     LOG(FATAL) << "Unreachable";
    733     return nullptr;
    734   }
    735 
    736   bool StartsBeforeOrAt(LiveInterval* other) const {
    737     return GetStart() <= other->GetStart();
    738   }
    739 
    740   bool StartsAfter(LiveInterval* other) const {
    741     return GetStart() > other->GetStart();
    742   }
    743 
    744   void Dump(std::ostream& stream) const {
    745     stream << "ranges: { ";
    746     LiveRange* current = first_range_;
    747     while (current != nullptr) {
    748       current->Dump(stream);
    749       stream << " ";
    750       current = current->GetNext();
    751     }
    752     stream << "}, uses: { ";
    753     for (const UsePosition& use : GetUses()) {
    754       use.Dump(stream);
    755       stream << " ";
    756     }
    757     stream << "}, { ";
    758     for (const EnvUsePosition& env_use : GetEnvironmentUses()) {
    759       env_use.Dump(stream);
    760       stream << " ";
    761     }
    762     stream << "}";
    763     stream << " is_fixed: " << is_fixed_ << ", is_split: " << IsSplit();
    764     stream << " is_low: " << IsLowInterval();
    765     stream << " is_high: " << IsHighInterval();
    766   }
    767 
    768   // Same as Dump, but adds context such as the instruction defining this interval, and
    769   // the register currently assigned to this interval.
    770   void DumpWithContext(std::ostream& stream, const CodeGenerator& codegen) const;
    771 
    772   LiveInterval* GetNextSibling() const { return next_sibling_; }
    773   LiveInterval* GetLastSibling() {
    774     LiveInterval* result = this;
    775     while (result->next_sibling_ != nullptr) {
    776       result = result->next_sibling_;
    777     }
    778     return result;
    779   }
    780 
    781   // Returns the first register hint that is at least free before
    782   // the value contained in `free_until`. If none is found, returns
    783   // `kNoRegister`.
    784   int FindFirstRegisterHint(size_t* free_until, const SsaLivenessAnalysis& liveness) const;
    785 
    786   // If there is enough at the definition site to find a register (for example
    787   // it uses the same input as the first input), returns the register as a hint.
    788   // Returns kNoRegister otherwise.
    789   int FindHintAtDefinition() const;
    790 
    791   // Returns the number of required spilling slots (measured as a multiple of the
    792   // Dex virtual register size `kVRegSize`).
    793   size_t NumberOfSpillSlotsNeeded() const;
    794 
    795   bool IsFloatingPoint() const {
    796     return type_ == DataType::Type::kFloat32 || type_ == DataType::Type::kFloat64;
    797   }
    798 
    799   // Converts the location of the interval to a `Location` object.
    800   Location ToLocation() const;
    801 
    802   // Returns the location of the interval following its siblings at `position`.
    803   Location GetLocationAt(size_t position);
    804 
    805   // Finds the sibling that is defined at `position`.
    806   LiveInterval* GetSiblingAt(size_t position);
    807 
    808   // Returns whether `other` and `this` share the same kind of register.
    809   bool SameRegisterKind(Location other) const;
    810   bool SameRegisterKind(const LiveInterval& other) const {
    811     return IsFloatingPoint() == other.IsFloatingPoint();
    812   }
    813 
    814   bool HasHighInterval() const {
    815     return IsLowInterval();
    816   }
    817 
    818   bool HasLowInterval() const {
    819     return IsHighInterval();
    820   }
    821 
    822   LiveInterval* GetLowInterval() const {
    823     DCHECK(HasLowInterval());
    824     return high_or_low_interval_;
    825   }
    826 
    827   LiveInterval* GetHighInterval() const {
    828     DCHECK(HasHighInterval());
    829     return high_or_low_interval_;
    830   }
    831 
    832   bool IsHighInterval() const {
    833     return GetParent()->is_high_interval_;
    834   }
    835 
    836   bool IsLowInterval() const {
    837     return !IsHighInterval() && (GetParent()->high_or_low_interval_ != nullptr);
    838   }
    839 
    840   void SetLowInterval(LiveInterval* low) {
    841     DCHECK(IsHighInterval());
    842     high_or_low_interval_ = low;
    843   }
    844 
    845   void SetHighInterval(LiveInterval* high) {
    846     DCHECK(IsLowInterval());
    847     high_or_low_interval_ = high;
    848   }
    849 
    850   void AddHighInterval(bool is_temp = false) {
    851     DCHECK(IsParent());
    852     DCHECK(!HasHighInterval());
    853     DCHECK(!HasLowInterval());
    854     high_or_low_interval_ = new (allocator_) LiveInterval(
    855         allocator_, type_, defined_by_, false, kNoRegister, is_temp, true);
    856     high_or_low_interval_->high_or_low_interval_ = this;
    857     if (first_range_ != nullptr) {
    858       high_or_low_interval_->first_range_ = first_range_->Dup(allocator_);
    859       high_or_low_interval_->last_range_ = high_or_low_interval_->first_range_->GetLastRange();
    860       high_or_low_interval_->range_search_start_ = high_or_low_interval_->first_range_;
    861     }
    862     auto pos = high_or_low_interval_->uses_.before_begin();
    863     for (const UsePosition& use : uses_) {
    864       UsePosition* new_use = use.Clone(allocator_);
    865       pos = high_or_low_interval_->uses_.insert_after(pos, *new_use);
    866     }
    867 
    868     auto env_pos = high_or_low_interval_->env_uses_.before_begin();
    869     for (const EnvUsePosition& env_use : env_uses_) {
    870       EnvUsePosition* new_env_use = env_use.Clone(allocator_);
    871       env_pos = high_or_low_interval_->env_uses_.insert_after(env_pos, *new_env_use);
    872     }
    873   }
    874 
    875   // Returns whether an interval, when it is non-split, is using
    876   // the same register of one of its input.
    877   bool IsUsingInputRegister() const {
    878     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
    879     if (defined_by_ != nullptr && !IsSplit()) {
    880       for (const HInstruction* input : defined_by_->GetInputs()) {
    881         LiveInterval* interval = input->GetLiveInterval();
    882 
    883         // Find the interval that covers `defined_by`_. Calls to this function
    884         // are made outside the linear scan, hence we need to use CoversSlow.
    885         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
    886           interval = interval->GetNextSibling();
    887         }
    888 
    889         // Check if both intervals have the same register of the same kind.
    890         if (interval != nullptr
    891             && interval->SameRegisterKind(*this)
    892             && interval->GetRegister() == GetRegister()) {
    893           return true;
    894         }
    895       }
    896     }
    897     return false;
    898   }
    899 
    900   // Returns whether an interval, when it is non-split, can safely use
    901   // the same register of one of its input. Note that this method requires
    902   // IsUsingInputRegister() to be true.
    903   bool CanUseInputRegister() const {
    904     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
    905     DCHECK(IsUsingInputRegister());
    906     if (defined_by_ != nullptr && !IsSplit()) {
    907       LocationSummary* locations = defined_by_->GetLocations();
    908       if (locations->OutputCanOverlapWithInputs()) {
    909         return false;
    910       }
    911       for (const HInstruction* input : defined_by_->GetInputs()) {
    912         LiveInterval* interval = input->GetLiveInterval();
    913 
    914         // Find the interval that covers `defined_by`_. Calls to this function
    915         // are made outside the linear scan, hence we need to use CoversSlow.
    916         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
    917           interval = interval->GetNextSibling();
    918         }
    919 
    920         if (interval != nullptr
    921             && interval->SameRegisterKind(*this)
    922             && interval->GetRegister() == GetRegister()) {
    923           // We found the input that has the same register. Check if it is live after
    924           // `defined_by`_.
    925           return !interval->CoversSlow(defined_by_->GetLifetimePosition() + 1);
    926         }
    927       }
    928     }
    929     LOG(FATAL) << "Unreachable";
    930     UNREACHABLE();
    931   }
    932 
    933   void AddSafepoint(HInstruction* instruction) {
    934     SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction);
    935     if (first_safepoint_ == nullptr) {
    936       first_safepoint_ = last_safepoint_ = safepoint;
    937     } else {
    938       DCHECK_LE(last_safepoint_->GetPosition(), safepoint->GetPosition());
    939       last_safepoint_->SetNext(safepoint);
    940       last_safepoint_ = safepoint;
    941     }
    942   }
    943 
    944   SafepointPosition* GetFirstSafepoint() const {
    945     return first_safepoint_;
    946   }
    947 
    948   // Resets the starting point for range-searching queries to the first range.
    949   // Intervals must be reset prior to starting a new linear scan over them.
    950   void ResetSearchCache() {
    951     range_search_start_ = first_range_;
    952   }
    953 
    954   bool DefinitionRequiresRegister() const {
    955     DCHECK(IsParent());
    956     LocationSummary* locations = defined_by_->GetLocations();
    957     Location location = locations->Out();
    958     // This interval is the first interval of the instruction. If the output
    959     // of the instruction requires a register, we return the position of that instruction
    960     // as the first register use.
    961     if (location.IsUnallocated()) {
    962       if ((location.GetPolicy() == Location::kRequiresRegister)
    963            || (location.GetPolicy() == Location::kSameAsFirstInput
    964                && (locations->InAt(0).IsRegister()
    965                    || locations->InAt(0).IsRegisterPair()
    966                    || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) {
    967         return true;
    968       } else if ((location.GetPolicy() == Location::kRequiresFpuRegister)
    969                  || (location.GetPolicy() == Location::kSameAsFirstInput
    970                      && (locations->InAt(0).IsFpuRegister()
    971                          || locations->InAt(0).IsFpuRegisterPair()
    972                          || locations->InAt(0).GetPolicy() == Location::kRequiresFpuRegister))) {
    973         return true;
    974       }
    975     } else if (location.IsRegister() || location.IsRegisterPair()) {
    976       return true;
    977     }
    978     return false;
    979   }
    980 
    981  private:
    982   LiveInterval(ScopedArenaAllocator* allocator,
    983                DataType::Type type,
    984                HInstruction* defined_by = nullptr,
    985                bool is_fixed = false,
    986                int reg = kNoRegister,
    987                bool is_temp = false,
    988                bool is_high_interval = false)
    989       : allocator_(allocator),
    990         first_range_(nullptr),
    991         last_range_(nullptr),
    992         range_search_start_(nullptr),
    993         first_safepoint_(nullptr),
    994         last_safepoint_(nullptr),
    995         uses_(),
    996         env_uses_(),
    997         type_(type),
    998         next_sibling_(nullptr),
    999         parent_(this),
   1000         register_(reg),
   1001         spill_slot_(kNoSpillSlot),
   1002         is_fixed_(is_fixed),
   1003         is_temp_(is_temp),
   1004         is_high_interval_(is_high_interval),
   1005         high_or_low_interval_(nullptr),
   1006         defined_by_(defined_by) {}
   1007 
   1008   // Searches for a LiveRange that either covers the given position or is the
   1009   // first next LiveRange. Returns null if no such LiveRange exists. Ranges
   1010   // known to end before `position` can be skipped with `search_start`.
   1011   LiveRange* FindRangeAtOrAfter(size_t position, LiveRange* search_start) const {
   1012     if (kIsDebugBuild) {
   1013       if (search_start != first_range_) {
   1014         // If we are not searching the entire list of ranges, make sure we do
   1015         // not skip the range we are searching for.
   1016         if (search_start == nullptr) {
   1017           DCHECK(IsDeadAt(position));
   1018         } else if (search_start->GetStart() > position) {
   1019           DCHECK_EQ(search_start, FindRangeAtOrAfter(position, first_range_));
   1020         }
   1021       }
   1022     }
   1023 
   1024     LiveRange* range;
   1025     for (range = search_start;
   1026          range != nullptr && range->GetEnd() <= position;
   1027          range = range->GetNext()) {
   1028       continue;
   1029     }
   1030     return range;
   1031   }
   1032 
   1033   bool IsDefiningPosition(size_t position) const {
   1034     return IsParent() && (position == GetStart());
   1035   }
   1036 
   1037   bool HasSynthesizeUseAt(size_t position) const {
   1038     for (const UsePosition& use : GetUses()) {
   1039       size_t use_position = use.GetPosition();
   1040       if ((use_position == position) && use.IsSynthesized()) {
   1041         return true;
   1042       }
   1043       if (use_position > position) break;
   1044     }
   1045     return false;
   1046   }
   1047 
   1048   void AddBackEdgeUses(const HBasicBlock& block_at_use) {
   1049     DCHECK(block_at_use.IsInLoop());
   1050     if (block_at_use.GetGraph()->HasIrreducibleLoops()) {
   1051       // Linear order may not be well formed when irreducible loops are present,
   1052       // i.e. loop blocks may not be adjacent and a back edge may not be last,
   1053       // which violates assumptions made in this method.
   1054       return;
   1055     }
   1056 
   1057     // Add synthesized uses at the back edge of loops to help the register allocator.
   1058     // Note that this method is called in decreasing liveness order, to faciliate adding
   1059     // uses at the head of the `uses_` list. Because below
   1060     // we iterate from inner-most to outer-most, which is in increasing liveness order,
   1061     // we need to add subsequent entries after the last inserted entry.
   1062     const UsePositionList::iterator old_begin = uses_.begin();
   1063     UsePositionList::iterator insert_pos = uses_.before_begin();
   1064     for (HLoopInformationOutwardIterator it(block_at_use);
   1065          !it.Done();
   1066          it.Advance()) {
   1067       HLoopInformation* current = it.Current();
   1068       if (GetDefinedBy()->GetLifetimePosition() >= current->GetHeader()->GetLifetimeStart()) {
   1069         // This interval is defined in the loop. We can stop going outward.
   1070         break;
   1071       }
   1072 
   1073       // We're only adding a synthesized use at the last back edge. Adding synthesized uses on
   1074       // all back edges is not necessary: anything used in the loop will have its use at the
   1075       // last back edge. If we want branches in a loop to have better register allocation than
   1076       // another branch, then it is the linear order we should change.
   1077       size_t back_edge_use_position = current->GetLifetimeEnd();
   1078       if ((old_begin != uses_.end()) && (old_begin->GetPosition() <= back_edge_use_position)) {
   1079         // There was a use already seen in this loop. Therefore the previous call to `AddUse`
   1080         // already inserted the backedge use. We can stop going outward.
   1081         DCHECK(HasSynthesizeUseAt(back_edge_use_position));
   1082         break;
   1083       }
   1084 
   1085       DCHECK(insert_pos != uses_.before_begin()
   1086              ? back_edge_use_position > insert_pos->GetPosition()
   1087              : current == block_at_use.GetLoopInformation())
   1088           << std::distance(uses_.before_begin(), insert_pos);
   1089 
   1090       UsePosition* new_use = new (allocator_) UsePosition(back_edge_use_position);
   1091       insert_pos = uses_.insert_after(insert_pos, *new_use);
   1092     }
   1093   }
   1094 
   1095   ScopedArenaAllocator* const allocator_;
   1096 
   1097   // Ranges of this interval. We need a quick access to the last range to test
   1098   // for liveness (see `IsDeadAt`).
   1099   LiveRange* first_range_;
   1100   LiveRange* last_range_;
   1101 
   1102   // The first range at or after the current position of a linear scan. It is
   1103   // used to optimize range-searching queries.
   1104   LiveRange* range_search_start_;
   1105 
   1106   // Safepoints where this interval is live.
   1107   SafepointPosition* first_safepoint_;
   1108   SafepointPosition* last_safepoint_;
   1109 
   1110   // Uses of this interval. Only the parent interval keeps these lists.
   1111   UsePositionList uses_;
   1112   EnvUsePositionList env_uses_;
   1113 
   1114   // The instruction type this interval corresponds to.
   1115   const DataType::Type type_;
   1116 
   1117   // Live interval that is the result of a split.
   1118   LiveInterval* next_sibling_;
   1119 
   1120   // The first interval from which split intervals come from.
   1121   LiveInterval* parent_;
   1122 
   1123   // The register allocated to this interval.
   1124   int register_;
   1125 
   1126   // The spill slot allocated to this interval.
   1127   int spill_slot_;
   1128 
   1129   // Whether the interval is for a fixed register.
   1130   const bool is_fixed_;
   1131 
   1132   // Whether the interval is for a temporary.
   1133   const bool is_temp_;
   1134 
   1135   // Whether this interval is a synthesized interval for register pair.
   1136   const bool is_high_interval_;
   1137 
   1138   // If this interval needs a register pair, the high or low equivalent.
   1139   // `is_high_interval_` tells whether this holds the low or the high.
   1140   LiveInterval* high_or_low_interval_;
   1141 
   1142   // The instruction represented by this interval.
   1143   HInstruction* const defined_by_;
   1144 
   1145   static constexpr int kNoRegister = -1;
   1146   static constexpr int kNoSpillSlot = -1;
   1147 
   1148   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
   1149 
   1150   DISALLOW_COPY_AND_ASSIGN(LiveInterval);
   1151 };
   1152 
   1153 /**
   1154  * Analysis that computes the liveness of instructions:
   1155  *
   1156  * (a) Non-environment uses of an instruction always make
   1157  *     the instruction live.
   1158  * (b) Environment uses of an instruction whose type is object (that is, non-primitive), make the
   1159  *     instruction live, unless the class has an @DeadReferenceSafe annotation.
   1160  *     This avoids unexpected premature reference enqueuing or finalization, which could
   1161  *     result in premature deletion of native objects.  In the presence of @DeadReferenceSafe,
   1162  *     object references are treated like primitive types.
   1163  * (c) When the graph has the debuggable property, environment uses
   1164  *     of an instruction that has a primitive type make the instruction live.
   1165  *     If the graph does not have the debuggable property, the environment
   1166  *     use has no effect, and may get a 'none' value after register allocation.
   1167  * (d) When compiling in OSR mode, all loops in the compiled method may be entered
   1168  *     from the interpreter via SuspendCheck; such use in SuspendCheck makes the instruction
   1169  *     live.
   1170  *
   1171  * (b), (c) and (d) are implemented through SsaLivenessAnalysis::ShouldBeLiveForEnvironment.
   1172  */
   1173 class SsaLivenessAnalysis : public ValueObject {
   1174  public:
   1175   SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen, ScopedArenaAllocator* allocator)
   1176       : graph_(graph),
   1177         codegen_(codegen),
   1178         allocator_(allocator),
   1179         block_infos_(graph->GetBlocks().size(),
   1180                      nullptr,
   1181                      allocator_->Adapter(kArenaAllocSsaLiveness)),
   1182         instructions_from_ssa_index_(allocator_->Adapter(kArenaAllocSsaLiveness)),
   1183         instructions_from_lifetime_position_(allocator_->Adapter(kArenaAllocSsaLiveness)),
   1184         number_of_ssa_values_(0) {
   1185   }
   1186 
   1187   void Analyze();
   1188 
   1189   BitVector* GetLiveInSet(const HBasicBlock& block) const {
   1190     return &block_infos_[block.GetBlockId()]->live_in_;
   1191   }
   1192 
   1193   BitVector* GetLiveOutSet(const HBasicBlock& block) const {
   1194     return &block_infos_[block.GetBlockId()]->live_out_;
   1195   }
   1196 
   1197   BitVector* GetKillSet(const HBasicBlock& block) const {
   1198     return &block_infos_[block.GetBlockId()]->kill_;
   1199   }
   1200 
   1201   HInstruction* GetInstructionFromSsaIndex(size_t index) const {
   1202     return instructions_from_ssa_index_[index];
   1203   }
   1204 
   1205   HInstruction* GetInstructionFromPosition(size_t index) const {
   1206     return instructions_from_lifetime_position_[index];
   1207   }
   1208 
   1209   HBasicBlock* GetBlockFromPosition(size_t index) const {
   1210     HInstruction* instruction = GetInstructionFromPosition(index);
   1211     if (instruction == nullptr) {
   1212       // If we are at a block boundary, get the block following.
   1213       instruction = GetInstructionFromPosition(index + 1);
   1214     }
   1215     return instruction->GetBlock();
   1216   }
   1217 
   1218   bool IsAtBlockBoundary(size_t index) const {
   1219     return GetInstructionFromPosition(index) == nullptr;
   1220   }
   1221 
   1222   HInstruction* GetTempUser(LiveInterval* temp) const {
   1223     // A temporary shares the same lifetime start as the instruction that requires it.
   1224     DCHECK(temp->IsTemp());
   1225     HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2);
   1226     DCHECK_EQ(user, temp->GetUses().front().GetUser());
   1227     return user;
   1228   }
   1229 
   1230   size_t GetTempIndex(LiveInterval* temp) const {
   1231     // We use the input index to store the index of the temporary in the user's temporary list.
   1232     DCHECK(temp->IsTemp());
   1233     return temp->GetUses().front().GetInputIndex();
   1234   }
   1235 
   1236   size_t GetMaxLifetimePosition() const {
   1237     return instructions_from_lifetime_position_.size() * 2 - 1;
   1238   }
   1239 
   1240   size_t GetNumberOfSsaValues() const {
   1241     return number_of_ssa_values_;
   1242   }
   1243 
   1244   static constexpr const char* kLivenessPassName = "liveness";
   1245 
   1246  private:
   1247   // Give an SSA number to each instruction that defines a value used by another instruction,
   1248   // and setup the lifetime information of each instruction and block.
   1249   void NumberInstructions();
   1250 
   1251   // Compute live ranges of instructions, as well as live_in, live_out and kill sets.
   1252   void ComputeLiveness();
   1253 
   1254   // Compute the live ranges of instructions, as well as the initial live_in, live_out and
   1255   // kill sets, that do not take into account backward branches.
   1256   void ComputeLiveRanges();
   1257 
   1258   // After computing the initial sets, this method does a fixed point
   1259   // calculation over the live_in and live_out set to take into account
   1260   // backwards branches.
   1261   void ComputeLiveInAndLiveOutSets();
   1262 
   1263   // Update the live_in set of the block and returns whether it has changed.
   1264   bool UpdateLiveIn(const HBasicBlock& block);
   1265 
   1266   // Update the live_out set of the block and returns whether it has changed.
   1267   bool UpdateLiveOut(const HBasicBlock& block);
   1268 
   1269   static void ProcessEnvironment(HInstruction* instruction,
   1270                                  HInstruction* actual_user,
   1271                                  BitVector* live_in);
   1272   static void RecursivelyProcessInputs(HInstruction* instruction,
   1273                                        HInstruction* actual_user,
   1274                                        BitVector* live_in);
   1275 
   1276   // Returns whether `instruction` in an HEnvironment held by `env_holder`
   1277   // should be kept live by the HEnvironment.
   1278   static bool ShouldBeLiveForEnvironment(HInstruction* env_holder, HInstruction* instruction) {
   1279     DCHECK(instruction != nullptr);
   1280     // A value that's not live in compiled code may still be needed in interpreter,
   1281     // due to code motion, etc.
   1282     if (env_holder->IsDeoptimize()) return true;
   1283     // A value live at a throwing instruction in a try block may be copied by
   1284     // the exception handler to its location at the top of the catch block.
   1285     if (env_holder->CanThrowIntoCatchBlock()) return true;
   1286     HGraph* graph = instruction->GetBlock()->GetGraph();
   1287     if (graph->IsDebuggable()) return true;
   1288     // When compiling in OSR mode, all loops in the compiled method may be entered
   1289     // from the interpreter via SuspendCheck; thus we need to preserve the environment.
   1290     if (env_holder->IsSuspendCheck() && graph->IsCompilingOsr()) return true;
   1291     if (graph -> IsDeadReferenceSafe()) return false;
   1292     return instruction->GetType() == DataType::Type::kReference;
   1293   }
   1294 
   1295   void CheckNoLiveInIrreducibleLoop(const HBasicBlock& block) const {
   1296     if (!block.IsLoopHeader() || !block.GetLoopInformation()->IsIrreducible()) {
   1297       return;
   1298     }
   1299     BitVector* live_in = GetLiveInSet(block);
   1300     // To satisfy our liveness algorithm, we need to ensure loop headers of
   1301     // irreducible loops do not have any live-in instructions, except constants
   1302     // and the current method, which can be trivially re-materialized.
   1303     for (uint32_t idx : live_in->Indexes()) {
   1304       HInstruction* instruction = GetInstructionFromSsaIndex(idx);
   1305       DCHECK(instruction->GetBlock()->IsEntryBlock()) << instruction->DebugName();
   1306       DCHECK(!instruction->IsParameterValue());
   1307       DCHECK(instruction->IsCurrentMethod() || instruction->IsConstant())
   1308           << instruction->DebugName();
   1309     }
   1310   }
   1311 
   1312   HGraph* const graph_;
   1313   CodeGenerator* const codegen_;
   1314 
   1315   // Use a local ScopedArenaAllocator for allocating memory.
   1316   // This allocator must remain alive while doing register allocation.
   1317   ScopedArenaAllocator* const allocator_;
   1318 
   1319   ScopedArenaVector<BlockInfo*> block_infos_;
   1320 
   1321   // Temporary array used when computing live_in, live_out, and kill sets.
   1322   ScopedArenaVector<HInstruction*> instructions_from_ssa_index_;
   1323 
   1324   // Temporary array used when inserting moves in the graph.
   1325   ScopedArenaVector<HInstruction*> instructions_from_lifetime_position_;
   1326   size_t number_of_ssa_values_;
   1327 
   1328   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
   1329   ART_FRIEND_TEST(RegisterAllocatorTest, FreeUntil);
   1330 
   1331   DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis);
   1332 };
   1333 
   1334 }  // namespace art
   1335 
   1336 #endif  // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
   1337