Home | History | Annotate | Download | only in optimizing
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
     18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
     19 
     20 #include "nodes.h"
     21 #include <iostream>
     22 
     23 namespace art {
     24 
     25 class CodeGenerator;
     26 class SsaLivenessAnalysis;
     27 
     28 static constexpr int kNoRegister = -1;
     29 
     30 class BlockInfo : public ArenaObject<kArenaAllocSsaLiveness> {
     31  public:
     32   BlockInfo(ArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values)
     33       : block_(block),
     34         live_in_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
     35         live_out_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
     36         kill_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness) {
     37     UNUSED(block_);
     38     live_in_.ClearAllBits();
     39     live_out_.ClearAllBits();
     40     kill_.ClearAllBits();
     41   }
     42 
     43  private:
     44   const HBasicBlock& block_;
     45   ArenaBitVector live_in_;
     46   ArenaBitVector live_out_;
     47   ArenaBitVector kill_;
     48 
     49   friend class SsaLivenessAnalysis;
     50 
     51   DISALLOW_COPY_AND_ASSIGN(BlockInfo);
     52 };
     53 
     54 /**
     55  * A live range contains the start and end of a range where an instruction or a temporary
     56  * is live.
     57  */
     58 class LiveRange FINAL : public ArenaObject<kArenaAllocSsaLiveness> {
     59  public:
     60   LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) {
     61     DCHECK_LT(start, end);
     62     DCHECK(next_ == nullptr || next_->GetStart() > GetEnd());
     63   }
     64 
     65   size_t GetStart() const { return start_; }
     66   size_t GetEnd() const { return end_; }
     67   LiveRange* GetNext() const { return next_; }
     68 
     69   bool IntersectsWith(const LiveRange& other) const {
     70     return (start_ >= other.start_ && start_ < other.end_)
     71         || (other.start_ >= start_ && other.start_ < end_);
     72   }
     73 
     74   bool IsBefore(const LiveRange& other) const {
     75     return end_ <= other.start_;
     76   }
     77 
     78   void Dump(std::ostream& stream) const {
     79     stream << "[" << start_ << "," << end_ << ")";
     80   }
     81 
     82   LiveRange* Dup(ArenaAllocator* allocator) const {
     83     return new (allocator) LiveRange(
     84         start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator));
     85   }
     86 
     87   LiveRange* GetLastRange() {
     88     return next_ == nullptr ? this : next_->GetLastRange();
     89   }
     90 
     91  private:
     92   size_t start_;
     93   size_t end_;
     94   LiveRange* next_;
     95 
     96   friend class LiveInterval;
     97 
     98   DISALLOW_COPY_AND_ASSIGN(LiveRange);
     99 };
    100 
    101 /**
    102  * A use position represents a live interval use at a given position.
    103  */
    104 class UsePosition : public ArenaObject<kArenaAllocSsaLiveness> {
    105  public:
    106   UsePosition(HInstruction* user,
    107               HEnvironment* environment,
    108               size_t input_index,
    109               size_t position,
    110               UsePosition* next)
    111       : user_(user),
    112         environment_(environment),
    113         input_index_(input_index),
    114         position_(position),
    115         next_(next) {
    116     DCHECK(environment == nullptr || user == nullptr);
    117     DCHECK(next_ == nullptr || next->GetPosition() >= GetPosition());
    118   }
    119 
    120   static constexpr size_t kNoInput = -1;
    121 
    122   size_t GetPosition() const { return position_; }
    123 
    124   UsePosition* GetNext() const { return next_; }
    125   void SetNext(UsePosition* next) { next_ = next; }
    126 
    127   HInstruction* GetUser() const { return user_; }
    128   HEnvironment* GetEnvironment() const { return environment_; }
    129 
    130   bool GetIsEnvironment() const { return environment_ != nullptr; }
    131   bool IsSynthesized() const { return user_ == nullptr; }
    132 
    133   size_t GetInputIndex() const { return input_index_; }
    134 
    135   void Dump(std::ostream& stream) const {
    136     stream << position_;
    137   }
    138 
    139   HLoopInformation* GetLoopInformation() const {
    140     return user_->GetBlock()->GetLoopInformation();
    141   }
    142 
    143   UsePosition* Dup(ArenaAllocator* allocator) const {
    144     return new (allocator) UsePosition(
    145         user_, environment_, input_index_, position_,
    146         next_ == nullptr ? nullptr : next_->Dup(allocator));
    147   }
    148 
    149   bool RequiresRegister() const {
    150     if (GetIsEnvironment()) return false;
    151     if (IsSynthesized()) return false;
    152     Location location = GetUser()->GetLocations()->InAt(GetInputIndex());
    153     return location.IsUnallocated()
    154         && (location.GetPolicy() == Location::kRequiresRegister
    155             || location.GetPolicy() == Location::kRequiresFpuRegister);
    156   }
    157 
    158  private:
    159   HInstruction* const user_;
    160   HEnvironment* const environment_;
    161   const size_t input_index_;
    162   const size_t position_;
    163   UsePosition* next_;
    164 
    165   DISALLOW_COPY_AND_ASSIGN(UsePosition);
    166 };
    167 
    168 class SafepointPosition : public ArenaObject<kArenaAllocSsaLiveness> {
    169  public:
    170   explicit SafepointPosition(HInstruction* instruction)
    171       : instruction_(instruction),
    172         next_(nullptr) {}
    173 
    174   void SetNext(SafepointPosition* next) {
    175     next_ = next;
    176   }
    177 
    178   size_t GetPosition() const {
    179     return instruction_->GetLifetimePosition();
    180   }
    181 
    182   SafepointPosition* GetNext() const {
    183     return next_;
    184   }
    185 
    186   LocationSummary* GetLocations() const {
    187     return instruction_->GetLocations();
    188   }
    189 
    190   HInstruction* GetInstruction() const {
    191     return instruction_;
    192   }
    193 
    194  private:
    195   HInstruction* const instruction_;
    196   SafepointPosition* next_;
    197 
    198   DISALLOW_COPY_AND_ASSIGN(SafepointPosition);
    199 };
    200 
    201 /**
    202  * An interval is a list of disjoint live ranges where an instruction is live.
    203  * Each instruction that has uses gets an interval.
    204  */
    205 class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> {
    206  public:
    207   static LiveInterval* MakeInterval(ArenaAllocator* allocator,
    208                                     Primitive::Type type,
    209                                     HInstruction* instruction = nullptr) {
    210     return new (allocator) LiveInterval(allocator, type, instruction);
    211   }
    212 
    213   static LiveInterval* MakeSlowPathInterval(ArenaAllocator* allocator, HInstruction* instruction) {
    214     return new (allocator) LiveInterval(
    215         allocator, Primitive::kPrimVoid, instruction, false, kNoRegister, false, true);
    216   }
    217 
    218   static LiveInterval* MakeFixedInterval(ArenaAllocator* allocator, int reg, Primitive::Type type) {
    219     return new (allocator) LiveInterval(allocator, type, nullptr, true, reg, false);
    220   }
    221 
    222   static LiveInterval* MakeTempInterval(ArenaAllocator* allocator, Primitive::Type type) {
    223     return new (allocator) LiveInterval(allocator, type, nullptr, false, kNoRegister, true);
    224   }
    225 
    226   bool IsFixed() const { return is_fixed_; }
    227   bool IsTemp() const { return is_temp_; }
    228   bool IsSlowPathSafepoint() const { return is_slow_path_safepoint_; }
    229   // This interval is the result of a split.
    230   bool IsSplit() const { return parent_ != this; }
    231 
    232   void AddTempUse(HInstruction* instruction, size_t temp_index) {
    233     DCHECK(IsTemp());
    234     DCHECK(first_use_ == nullptr) << "A temporary can only have one user";
    235     DCHECK(first_env_use_ == nullptr) << "A temporary cannot have environment user";
    236     size_t position = instruction->GetLifetimePosition();
    237     first_use_ = new (allocator_) UsePosition(
    238         instruction, /* environment */ nullptr, temp_index, position, first_use_);
    239     AddRange(position, position + 1);
    240   }
    241 
    242   // Record use of an input. The use will be recorded as an environment use if
    243   // `environment` is not null and as register use otherwise. If `actual_user`
    244   // is specified, the use will be recorded at `actual_user`'s lifetime position.
    245   void AddUse(HInstruction* instruction,
    246               HEnvironment* environment,
    247               size_t input_index,
    248               HInstruction* actual_user = nullptr,
    249               bool keep_alive = false) {
    250     bool is_environment = (environment != nullptr);
    251     LocationSummary* locations = instruction->GetLocations();
    252     if (actual_user == nullptr) {
    253       actual_user = instruction;
    254     }
    255 
    256     // Set the use within the instruction.
    257     size_t position = actual_user->GetLifetimePosition() + 1;
    258     if (!is_environment) {
    259       if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) {
    260         // For fixed inputs and output same as input, the register allocator
    261         // requires to have inputs die at the instruction, so that input moves use the
    262         // location of the input just before that instruction (and not potential moves due
    263         // to splitting).
    264         DCHECK_EQ(instruction, actual_user);
    265         position = actual_user->GetLifetimePosition();
    266       } else if (!locations->InAt(input_index).IsValid()) {
    267         return;
    268       }
    269     }
    270 
    271     if (!is_environment && instruction->IsInLoop()) {
    272       AddBackEdgeUses(*instruction->GetBlock());
    273     }
    274 
    275     if ((first_use_ != nullptr)
    276         && (first_use_->GetUser() == actual_user)
    277         && (first_use_->GetPosition() < position)) {
    278       // The user uses the instruction multiple times, and one use dies before the other.
    279       // We update the use list so that the latter is first.
    280       DCHECK(!is_environment);
    281       UsePosition* cursor = first_use_;
    282       while ((cursor->GetNext() != nullptr) && (cursor->GetNext()->GetPosition() < position)) {
    283         cursor = cursor->GetNext();
    284       }
    285       DCHECK(first_use_->GetPosition() + 1 == position);
    286       UsePosition* new_use = new (allocator_) UsePosition(
    287           instruction, nullptr /* environment */, input_index, position, cursor->GetNext());
    288       cursor->SetNext(new_use);
    289       if (first_range_->GetEnd() == first_use_->GetPosition()) {
    290         first_range_->end_ = position;
    291       }
    292       return;
    293     }
    294 
    295     if (is_environment) {
    296       first_env_use_ = new (allocator_) UsePosition(
    297           nullptr /* instruction */, environment, input_index, position, first_env_use_);
    298     } else {
    299       first_use_ = new (allocator_) UsePosition(
    300           instruction, nullptr /* environment */, input_index, position, first_use_);
    301     }
    302 
    303     if (is_environment && !keep_alive) {
    304       // If this environment use does not keep the instruction live, it does not
    305       // affect the live range of that instruction.
    306       return;
    307     }
    308 
    309     size_t start_block_position = instruction->GetBlock()->GetLifetimeStart();
    310     if (first_range_ == nullptr) {
    311       // First time we see a use of that interval.
    312       first_range_ = last_range_ = range_search_start_ =
    313           new (allocator_) LiveRange(start_block_position, position, nullptr);
    314     } else if (first_range_->GetStart() == start_block_position) {
    315       // There is a use later in the same block or in a following block.
    316       // Note that in such a case, `AddRange` for the whole blocks has been called
    317       // before arriving in this method, and this is the reason the start of
    318       // `first_range_` is before the given `position`.
    319       DCHECK_LE(position, first_range_->GetEnd());
    320     } else {
    321       DCHECK(first_range_->GetStart() > position);
    322       // There is a hole in the interval. Create a new range.
    323       // Note that the start of `first_range_` can be equal to `end`: two blocks
    324       // having adjacent lifetime positions are not necessarily
    325       // predecessor/successor. When two blocks are predecessor/successor, the
    326       // liveness algorithm has called `AddRange` before arriving in this method,
    327       // and the check line 205 would succeed.
    328       first_range_ = range_search_start_ =
    329           new (allocator_) LiveRange(start_block_position, position, first_range_);
    330     }
    331   }
    332 
    333   void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) {
    334     DCHECK(instruction->IsPhi());
    335     if (block->IsInLoop()) {
    336       AddBackEdgeUses(*block);
    337     }
    338     first_use_ = new (allocator_) UsePosition(
    339         instruction, /* environment */ nullptr, input_index, block->GetLifetimeEnd(), first_use_);
    340   }
    341 
    342   void AddRange(size_t start, size_t end) {
    343     if (first_range_ == nullptr) {
    344       first_range_ = last_range_ = range_search_start_ =
    345           new (allocator_) LiveRange(start, end, first_range_);
    346     } else if (first_range_->GetStart() == end) {
    347       // There is a use in the following block.
    348       first_range_->start_ = start;
    349     } else if (first_range_->GetStart() == start && first_range_->GetEnd() == end) {
    350       DCHECK(is_fixed_);
    351     } else {
    352       DCHECK_GT(first_range_->GetStart(), end);
    353       // There is a hole in the interval. Create a new range.
    354       first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_);
    355     }
    356   }
    357 
    358   void AddLoopRange(size_t start, size_t end) {
    359     DCHECK(first_range_ != nullptr);
    360     DCHECK_LE(start, first_range_->GetStart());
    361     // Find the range that covers the positions after the loop.
    362     LiveRange* after_loop = first_range_;
    363     LiveRange* last_in_loop = nullptr;
    364     while (after_loop != nullptr && after_loop->GetEnd() < end) {
    365       DCHECK_LE(start, after_loop->GetStart());
    366       last_in_loop = after_loop;
    367       after_loop = after_loop->GetNext();
    368     }
    369     if (after_loop == nullptr) {
    370       // Uses are only in the loop.
    371       first_range_ = last_range_ = range_search_start_ =
    372           new (allocator_) LiveRange(start, end, nullptr);
    373     } else if (after_loop->GetStart() <= end) {
    374       first_range_ = range_search_start_ = after_loop;
    375       // There are uses after the loop.
    376       first_range_->start_ = start;
    377     } else {
    378       // The use after the loop is after a lifetime hole.
    379       DCHECK(last_in_loop != nullptr);
    380       first_range_ = range_search_start_ = last_in_loop;
    381       first_range_->start_ = start;
    382       first_range_->end_ = end;
    383     }
    384   }
    385 
    386   bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; }
    387   void SetSpillSlot(int slot) {
    388     DCHECK(!is_fixed_);
    389     DCHECK(!is_temp_);
    390     spill_slot_ = slot;
    391   }
    392   int GetSpillSlot() const { return spill_slot_; }
    393 
    394   void SetFrom(size_t from) {
    395     if (first_range_ != nullptr) {
    396       first_range_->start_ = from;
    397     } else {
    398       // Instruction without uses.
    399       DCHECK(first_use_ == nullptr);
    400       DCHECK(from == defined_by_->GetLifetimePosition());
    401       first_range_ = last_range_ = range_search_start_ =
    402           new (allocator_) LiveRange(from, from + 2, nullptr);
    403     }
    404   }
    405 
    406   LiveInterval* GetParent() const { return parent_; }
    407 
    408   // Returns whether this interval is the parent interval, that is, the interval
    409   // that starts where the HInstruction is defined.
    410   bool IsParent() const { return parent_ == this; }
    411 
    412   LiveRange* GetFirstRange() const { return first_range_; }
    413   LiveRange* GetLastRange() const { return last_range_; }
    414 
    415   int GetRegister() const { return register_; }
    416   void SetRegister(int reg) { register_ = reg; }
    417   void ClearRegister() { register_ = kNoRegister; }
    418   bool HasRegister() const { return register_ != kNoRegister; }
    419 
    420   bool IsDeadAt(size_t position) const {
    421     return GetEnd() <= position;
    422   }
    423 
    424   bool IsDefinedAt(size_t position) const {
    425     return GetStart() <= position && !IsDeadAt(position);
    426   }
    427 
    428   // Returns true if the interval contains a LiveRange covering `position`.
    429   // The range at or immediately after the current position of linear scan
    430   // is cached for better performance. If `position` can be smaller than
    431   // that, CoversSlow should be used instead.
    432   bool Covers(size_t position) {
    433     LiveRange* candidate = FindRangeAtOrAfter(position, range_search_start_);
    434     range_search_start_ = candidate;
    435     return (candidate != nullptr && candidate->GetStart() <= position);
    436   }
    437 
    438   // Same as Covers but always tests all ranges.
    439   bool CoversSlow(size_t position) const {
    440     LiveRange* candidate = FindRangeAtOrAfter(position, first_range_);
    441     return candidate != nullptr && candidate->GetStart() <= position;
    442   }
    443 
    444   // Returns the first intersection of this interval with `current`, which
    445   // must be the interval currently being allocated by linear scan.
    446   size_t FirstIntersectionWith(LiveInterval* current) const {
    447     // Find the first range after the start of `current`. We use the search
    448     // cache to improve performance.
    449     DCHECK(GetStart() <= current->GetStart() || IsFixed());
    450     LiveRange* other_range = current->first_range_;
    451     LiveRange* my_range = FindRangeAtOrAfter(other_range->GetStart(), range_search_start_);
    452     if (my_range == nullptr) {
    453       return kNoLifetime;
    454     }
    455 
    456     // Advance both intervals and find the first matching range start in
    457     // this interval.
    458     do {
    459       if (my_range->IsBefore(*other_range)) {
    460         my_range = my_range->GetNext();
    461         if (my_range == nullptr) {
    462           return kNoLifetime;
    463         }
    464       } else if (other_range->IsBefore(*my_range)) {
    465         other_range = other_range->GetNext();
    466         if (other_range == nullptr) {
    467           return kNoLifetime;
    468         }
    469       } else {
    470         DCHECK(my_range->IntersectsWith(*other_range));
    471         return std::max(my_range->GetStart(), other_range->GetStart());
    472       }
    473     } while (true);
    474   }
    475 
    476   size_t GetStart() const {
    477     return first_range_->GetStart();
    478   }
    479 
    480   size_t GetEnd() const {
    481     return last_range_->GetEnd();
    482   }
    483 
    484   size_t FirstRegisterUseAfter(size_t position) const {
    485     if (is_temp_) {
    486       return position == GetStart() ? position : kNoLifetime;
    487     }
    488 
    489     if (IsDefiningPosition(position) && DefinitionRequiresRegister()) {
    490       return position;
    491     }
    492 
    493     UsePosition* use = first_use_;
    494     size_t end = GetEnd();
    495     while (use != nullptr && use->GetPosition() <= end) {
    496       size_t use_position = use->GetPosition();
    497       if (use_position > position) {
    498         if (use->RequiresRegister()) {
    499           return use_position;
    500         }
    501       }
    502       use = use->GetNext();
    503     }
    504     return kNoLifetime;
    505   }
    506 
    507   size_t FirstRegisterUse() const {
    508     return FirstRegisterUseAfter(GetStart());
    509   }
    510 
    511   size_t FirstUseAfter(size_t position) const {
    512     if (is_temp_) {
    513       return position == GetStart() ? position : kNoLifetime;
    514     }
    515 
    516     if (IsDefiningPosition(position)) {
    517       DCHECK(defined_by_->GetLocations()->Out().IsValid());
    518       return position;
    519     }
    520 
    521     UsePosition* use = first_use_;
    522     size_t end = GetEnd();
    523     while (use != nullptr && use->GetPosition() <= end) {
    524       size_t use_position = use->GetPosition();
    525       if (use_position > position) {
    526         return use_position;
    527       }
    528       use = use->GetNext();
    529     }
    530     return kNoLifetime;
    531   }
    532 
    533   UsePosition* GetFirstUse() const {
    534     return first_use_;
    535   }
    536 
    537   UsePosition* GetFirstEnvironmentUse() const {
    538     return first_env_use_;
    539   }
    540 
    541   Primitive::Type GetType() const {
    542     return type_;
    543   }
    544 
    545   HInstruction* GetDefinedBy() const {
    546     return defined_by_;
    547   }
    548 
    549   bool HasWillCallSafepoint() const {
    550     for (SafepointPosition* safepoint = first_safepoint_;
    551          safepoint != nullptr;
    552          safepoint = safepoint->GetNext()) {
    553       if (safepoint->GetLocations()->WillCall()) return true;
    554     }
    555     return false;
    556   }
    557 
    558   SafepointPosition* FindSafepointJustBefore(size_t position) const {
    559     for (SafepointPosition* safepoint = first_safepoint_, *previous = nullptr;
    560          safepoint != nullptr;
    561          previous = safepoint, safepoint = safepoint->GetNext()) {
    562       if (safepoint->GetPosition() >= position) return previous;
    563     }
    564     return last_safepoint_;
    565   }
    566 
    567   /**
    568    * Split this interval at `position`. This interval is changed to:
    569    * [start ... position).
    570    *
    571    * The new interval covers:
    572    * [position ... end)
    573    */
    574   LiveInterval* SplitAt(size_t position) {
    575     DCHECK(!is_temp_);
    576     DCHECK(!is_fixed_);
    577     DCHECK_GT(position, GetStart());
    578 
    579     if (GetEnd() <= position) {
    580       // This range dies before `position`, no need to split.
    581       return nullptr;
    582     }
    583 
    584     LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_);
    585     SafepointPosition* new_last_safepoint = FindSafepointJustBefore(position);
    586     if (new_last_safepoint == nullptr) {
    587       new_interval->first_safepoint_ = first_safepoint_;
    588       new_interval->last_safepoint_ = last_safepoint_;
    589       first_safepoint_ = last_safepoint_ = nullptr;
    590     } else if (last_safepoint_ != new_last_safepoint) {
    591       new_interval->last_safepoint_ = last_safepoint_;
    592       new_interval->first_safepoint_ = new_last_safepoint->GetNext();
    593       DCHECK(new_interval->first_safepoint_ != nullptr);
    594       last_safepoint_ = new_last_safepoint;
    595       last_safepoint_->SetNext(nullptr);
    596     }
    597 
    598     new_interval->next_sibling_ = next_sibling_;
    599     next_sibling_ = new_interval;
    600     new_interval->parent_ = parent_;
    601 
    602     new_interval->first_use_ = first_use_;
    603     new_interval->first_env_use_ = first_env_use_;
    604     LiveRange* current = first_range_;
    605     LiveRange* previous = nullptr;
    606     // Iterate over the ranges, and either find a range that covers this position, or
    607     // two ranges in between this position (that is, the position is in a lifetime hole).
    608     do {
    609       if (position >= current->GetEnd()) {
    610         // Move to next range.
    611         previous = current;
    612         current = current->next_;
    613       } else if (position <= current->GetStart()) {
    614         // If the previous range did not cover this position, we know position is in
    615         // a lifetime hole. We can just break the first_range_ and last_range_ links
    616         // and return the new interval.
    617         DCHECK(previous != nullptr);
    618         DCHECK(current != first_range_);
    619         new_interval->last_range_ = last_range_;
    620         last_range_ = previous;
    621         previous->next_ = nullptr;
    622         new_interval->first_range_ = current;
    623         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
    624           // Search start point is inside `new_interval`. Change it to null
    625           // (i.e. the end of the interval) in the original interval.
    626           range_search_start_ = nullptr;
    627         }
    628         new_interval->range_search_start_ = new_interval->first_range_;
    629         return new_interval;
    630       } else {
    631         // This range covers position. We create a new last_range_ for this interval
    632         // that covers last_range_->Start() and position. We also shorten the current
    633         // range and make it the first range of the new interval.
    634         DCHECK(position < current->GetEnd() && position > current->GetStart());
    635         new_interval->last_range_ = last_range_;
    636         last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr);
    637         if (previous != nullptr) {
    638           previous->next_ = last_range_;
    639         } else {
    640           first_range_ = last_range_;
    641         }
    642         new_interval->first_range_ = current;
    643         current->start_ = position;
    644         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
    645           // Search start point is inside `new_interval`. Change it to `last_range`
    646           // in the original interval. This is conservative but always correct.
    647           range_search_start_ = last_range_;
    648         }
    649         new_interval->range_search_start_ = new_interval->first_range_;
    650         return new_interval;
    651       }
    652     } while (current != nullptr);
    653 
    654     LOG(FATAL) << "Unreachable";
    655     return nullptr;
    656   }
    657 
    658   bool StartsBeforeOrAt(LiveInterval* other) const {
    659     return GetStart() <= other->GetStart();
    660   }
    661 
    662   bool StartsAfter(LiveInterval* other) const {
    663     return GetStart() > other->GetStart();
    664   }
    665 
    666   void Dump(std::ostream& stream) const {
    667     stream << "ranges: { ";
    668     LiveRange* current = first_range_;
    669     while (current != nullptr) {
    670       current->Dump(stream);
    671       stream << " ";
    672       current = current->GetNext();
    673     }
    674     stream << "}, uses: { ";
    675     UsePosition* use = first_use_;
    676     if (use != nullptr) {
    677       do {
    678         use->Dump(stream);
    679         stream << " ";
    680       } while ((use = use->GetNext()) != nullptr);
    681     }
    682     stream << "}, { ";
    683     use = first_env_use_;
    684     if (use != nullptr) {
    685       do {
    686         use->Dump(stream);
    687         stream << " ";
    688       } while ((use = use->GetNext()) != nullptr);
    689     }
    690     stream << "}";
    691     stream << " is_fixed: " << is_fixed_ << ", is_split: " << IsSplit();
    692     stream << " is_low: " << IsLowInterval();
    693     stream << " is_high: " << IsHighInterval();
    694   }
    695 
    696   LiveInterval* GetNextSibling() const { return next_sibling_; }
    697   LiveInterval* GetLastSibling() {
    698     LiveInterval* result = this;
    699     while (result->next_sibling_ != nullptr) {
    700       result = result->next_sibling_;
    701     }
    702     return result;
    703   }
    704 
    705   // Returns the first register hint that is at least free before
    706   // the value contained in `free_until`. If none is found, returns
    707   // `kNoRegister`.
    708   int FindFirstRegisterHint(size_t* free_until, const SsaLivenessAnalysis& liveness) const;
    709 
    710   // If there is enough at the definition site to find a register (for example
    711   // it uses the same input as the first input), returns the register as a hint.
    712   // Returns kNoRegister otherwise.
    713   int FindHintAtDefinition() const;
    714 
    715   // Returns whether the interval needs two (Dex virtual register size `kVRegSize`)
    716   // slots for spilling.
    717   bool NeedsTwoSpillSlots() const;
    718 
    719   bool IsFloatingPoint() const {
    720     return type_ == Primitive::kPrimFloat || type_ == Primitive::kPrimDouble;
    721   }
    722 
    723   // Converts the location of the interval to a `Location` object.
    724   Location ToLocation() const;
    725 
    726   // Returns the location of the interval following its siblings at `position`.
    727   Location GetLocationAt(size_t position);
    728 
    729   // Finds the sibling that is defined at `position`.
    730   LiveInterval* GetSiblingAt(size_t position);
    731 
    732   // Returns whether `other` and `this` share the same kind of register.
    733   bool SameRegisterKind(Location other) const;
    734   bool SameRegisterKind(const LiveInterval& other) const {
    735     return IsFloatingPoint() == other.IsFloatingPoint();
    736   }
    737 
    738   bool HasHighInterval() const {
    739     return IsLowInterval();
    740   }
    741 
    742   bool HasLowInterval() const {
    743     return IsHighInterval();
    744   }
    745 
    746   LiveInterval* GetLowInterval() const {
    747     DCHECK(HasLowInterval());
    748     return high_or_low_interval_;
    749   }
    750 
    751   LiveInterval* GetHighInterval() const {
    752     DCHECK(HasHighInterval());
    753     return high_or_low_interval_;
    754   }
    755 
    756   bool IsHighInterval() const {
    757     return GetParent()->is_high_interval_;
    758   }
    759 
    760   bool IsLowInterval() const {
    761     return !IsHighInterval() && (GetParent()->high_or_low_interval_ != nullptr);
    762   }
    763 
    764   void SetLowInterval(LiveInterval* low) {
    765     DCHECK(IsHighInterval());
    766     high_or_low_interval_ = low;
    767   }
    768 
    769   void SetHighInterval(LiveInterval* high) {
    770     DCHECK(IsLowInterval());
    771     high_or_low_interval_ = high;
    772   }
    773 
    774   void AddHighInterval(bool is_temp = false) {
    775     DCHECK(IsParent());
    776     DCHECK(!HasHighInterval());
    777     DCHECK(!HasLowInterval());
    778     high_or_low_interval_ = new (allocator_) LiveInterval(
    779         allocator_, type_, defined_by_, false, kNoRegister, is_temp, false, true);
    780     high_or_low_interval_->high_or_low_interval_ = this;
    781     if (first_range_ != nullptr) {
    782       high_or_low_interval_->first_range_ = first_range_->Dup(allocator_);
    783       high_or_low_interval_->last_range_ = high_or_low_interval_->first_range_->GetLastRange();
    784       high_or_low_interval_->range_search_start_ = high_or_low_interval_->first_range_;
    785     }
    786     if (first_use_ != nullptr) {
    787       high_or_low_interval_->first_use_ = first_use_->Dup(allocator_);
    788     }
    789 
    790     if (first_env_use_ != nullptr) {
    791       high_or_low_interval_->first_env_use_ = first_env_use_->Dup(allocator_);
    792     }
    793   }
    794 
    795   // Returns whether an interval, when it is non-split, is using
    796   // the same register of one of its input.
    797   bool IsUsingInputRegister() const {
    798     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
    799     if (defined_by_ != nullptr && !IsSplit()) {
    800       for (HInputIterator it(defined_by_); !it.Done(); it.Advance()) {
    801         LiveInterval* interval = it.Current()->GetLiveInterval();
    802 
    803         // Find the interval that covers `defined_by`_. Calls to this function
    804         // are made outside the linear scan, hence we need to use CoversSlow.
    805         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
    806           interval = interval->GetNextSibling();
    807         }
    808 
    809         // Check if both intervals have the same register of the same kind.
    810         if (interval != nullptr
    811             && interval->SameRegisterKind(*this)
    812             && interval->GetRegister() == GetRegister()) {
    813           return true;
    814         }
    815       }
    816     }
    817     return false;
    818   }
    819 
    820   // Returns whether an interval, when it is non-split, can safely use
    821   // the same register of one of its input. Note that this method requires
    822   // IsUsingInputRegister() to be true.
    823   bool CanUseInputRegister() const {
    824     CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs";
    825     DCHECK(IsUsingInputRegister());
    826     if (defined_by_ != nullptr && !IsSplit()) {
    827       LocationSummary* locations = defined_by_->GetLocations();
    828       if (locations->OutputCanOverlapWithInputs()) {
    829         return false;
    830       }
    831       for (HInputIterator it(defined_by_); !it.Done(); it.Advance()) {
    832         LiveInterval* interval = it.Current()->GetLiveInterval();
    833 
    834         // Find the interval that covers `defined_by`_. Calls to this function
    835         // are made outside the linear scan, hence we need to use CoversSlow.
    836         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
    837           interval = interval->GetNextSibling();
    838         }
    839 
    840         if (interval != nullptr
    841             && interval->SameRegisterKind(*this)
    842             && interval->GetRegister() == GetRegister()) {
    843           // We found the input that has the same register. Check if it is live after
    844           // `defined_by`_.
    845           return !interval->CoversSlow(defined_by_->GetLifetimePosition() + 1);
    846         }
    847       }
    848     }
    849     LOG(FATAL) << "Unreachable";
    850     UNREACHABLE();
    851   }
    852 
    853   void AddSafepoint(HInstruction* instruction) {
    854     SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction);
    855     if (first_safepoint_ == nullptr) {
    856       first_safepoint_ = last_safepoint_ = safepoint;
    857     } else {
    858       DCHECK_LT(last_safepoint_->GetPosition(), safepoint->GetPosition());
    859       last_safepoint_->SetNext(safepoint);
    860       last_safepoint_ = safepoint;
    861     }
    862   }
    863 
    864   SafepointPosition* GetFirstSafepoint() const {
    865     return first_safepoint_;
    866   }
    867 
    868   // Resets the starting point for range-searching queries to the first range.
    869   // Intervals must be reset prior to starting a new linear scan over them.
    870   void ResetSearchCache() {
    871     range_search_start_ = first_range_;
    872   }
    873 
    874  private:
    875   LiveInterval(ArenaAllocator* allocator,
    876                Primitive::Type type,
    877                HInstruction* defined_by = nullptr,
    878                bool is_fixed = false,
    879                int reg = kNoRegister,
    880                bool is_temp = false,
    881                bool is_slow_path_safepoint = false,
    882                bool is_high_interval = false)
    883       : allocator_(allocator),
    884         first_range_(nullptr),
    885         last_range_(nullptr),
    886         range_search_start_(nullptr),
    887         first_safepoint_(nullptr),
    888         last_safepoint_(nullptr),
    889         first_use_(nullptr),
    890         first_env_use_(nullptr),
    891         type_(type),
    892         next_sibling_(nullptr),
    893         parent_(this),
    894         register_(reg),
    895         spill_slot_(kNoSpillSlot),
    896         is_fixed_(is_fixed),
    897         is_temp_(is_temp),
    898         is_slow_path_safepoint_(is_slow_path_safepoint),
    899         is_high_interval_(is_high_interval),
    900         high_or_low_interval_(nullptr),
    901         defined_by_(defined_by) {}
    902 
    903   // Searches for a LiveRange that either covers the given position or is the
    904   // first next LiveRange. Returns null if no such LiveRange exists. Ranges
    905   // known to end before `position` can be skipped with `search_start`.
    906   LiveRange* FindRangeAtOrAfter(size_t position, LiveRange* search_start) const {
    907     if (kIsDebugBuild) {
    908       if (search_start != first_range_) {
    909         // If we are not searching the entire list of ranges, make sure we do
    910         // not skip the range we are searching for.
    911         if (search_start == nullptr) {
    912           DCHECK(IsDeadAt(position));
    913         } else if (search_start->GetStart() > position) {
    914           DCHECK_EQ(search_start, FindRangeAtOrAfter(position, first_range_));
    915         }
    916       }
    917     }
    918 
    919     LiveRange* range;
    920     for (range = search_start;
    921          range != nullptr && range->GetEnd() <= position;
    922          range = range->GetNext()) {
    923       continue;
    924     }
    925     return range;
    926   }
    927 
    928   bool DefinitionRequiresRegister() const {
    929     DCHECK(IsParent());
    930     LocationSummary* locations = defined_by_->GetLocations();
    931     Location location = locations->Out();
    932     // This interval is the first interval of the instruction. If the output
    933     // of the instruction requires a register, we return the position of that instruction
    934     // as the first register use.
    935     if (location.IsUnallocated()) {
    936       if ((location.GetPolicy() == Location::kRequiresRegister)
    937            || (location.GetPolicy() == Location::kSameAsFirstInput
    938                && (locations->InAt(0).IsRegister()
    939                    || locations->InAt(0).IsRegisterPair()
    940                    || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) {
    941         return true;
    942       } else if ((location.GetPolicy() == Location::kRequiresFpuRegister)
    943                  || (location.GetPolicy() == Location::kSameAsFirstInput
    944                      && (locations->InAt(0).IsFpuRegister()
    945                          || locations->InAt(0).IsFpuRegisterPair()
    946                          || locations->InAt(0).GetPolicy() == Location::kRequiresFpuRegister))) {
    947         return true;
    948       }
    949     } else if (location.IsRegister() || location.IsRegisterPair()) {
    950       return true;
    951     }
    952     return false;
    953   }
    954 
    955   bool IsDefiningPosition(size_t position) const {
    956     return IsParent() && (position == GetStart());
    957   }
    958 
    959   bool HasSynthesizeUseAt(size_t position) const {
    960     UsePosition* use = first_use_;
    961     while (use != nullptr) {
    962       size_t use_position = use->GetPosition();
    963       if ((use_position == position) && use->IsSynthesized()) {
    964         return true;
    965       }
    966       if (use_position > position) break;
    967       use = use->GetNext();
    968     }
    969     return false;
    970   }
    971 
    972   bool IsLinearOrderWellFormed(const HGraph& graph) {
    973     for (HBasicBlock* header : graph.GetBlocks()) {
    974       if (header == nullptr || !header->IsLoopHeader()) {
    975         continue;
    976       }
    977 
    978       HLoopInformation* loop = header->GetLoopInformation();
    979       size_t num_blocks = loop->GetBlocks().NumSetBits();
    980       size_t found_blocks = 0u;
    981 
    982       for (HLinearOrderIterator it(graph); !it.Done(); it.Advance()) {
    983         HBasicBlock* current = it.Current();
    984         if (loop->Contains(*current)) {
    985           found_blocks++;
    986           if (found_blocks == 1u && current != header) {
    987             // First block is not the header.
    988             return false;
    989           } else if (found_blocks == num_blocks && !loop->IsBackEdge(*current)) {
    990             // Last block is not a back edge.
    991             return false;
    992           }
    993         } else if (found_blocks != 0u && found_blocks != num_blocks) {
    994           // Blocks are not adjacent.
    995           return false;
    996         }
    997       }
    998       DCHECK_EQ(found_blocks, num_blocks);
    999     }
   1000 
   1001     return true;
   1002   }
   1003 
   1004   void AddBackEdgeUses(const HBasicBlock& block_at_use) {
   1005     DCHECK(block_at_use.IsInLoop());
   1006     if (block_at_use.GetGraph()->HasIrreducibleLoops()) {
   1007       // Linear order may not be well formed when irreducible loops are present,
   1008       // i.e. loop blocks may not be adjacent and a back edge may not be last,
   1009       // which violates assumptions made in this method.
   1010       return;
   1011     }
   1012 
   1013     DCHECK(IsLinearOrderWellFormed(*block_at_use.GetGraph()));
   1014 
   1015     // Add synthesized uses at the back edge of loops to help the register allocator.
   1016     // Note that this method is called in decreasing liveness order, to faciliate adding
   1017     // uses at the head of the `first_use_` linked list. Because below
   1018     // we iterate from inner-most to outer-most, which is in increasing liveness order,
   1019     // we need to take extra care of how the `first_use_` linked list is being updated.
   1020     UsePosition* first_in_new_list = nullptr;
   1021     UsePosition* last_in_new_list = nullptr;
   1022     for (HLoopInformationOutwardIterator it(block_at_use);
   1023          !it.Done();
   1024          it.Advance()) {
   1025       HLoopInformation* current = it.Current();
   1026       if (GetDefinedBy()->GetLifetimePosition() >= current->GetHeader()->GetLifetimeStart()) {
   1027         // This interval is defined in the loop. We can stop going outward.
   1028         break;
   1029       }
   1030 
   1031       // We're only adding a synthesized use at the last back edge. Adding syntehsized uses on
   1032       // all back edges is not necessary: anything used in the loop will have its use at the
   1033       // last back edge. If we want branches in a loop to have better register allocation than
   1034       // another branch, then it is the linear order we should change.
   1035       size_t back_edge_use_position = current->GetLifetimeEnd();
   1036       if ((first_use_ != nullptr) && (first_use_->GetPosition() <= back_edge_use_position)) {
   1037         // There was a use already seen in this loop. Therefore the previous call to `AddUse`
   1038         // already inserted the backedge use. We can stop going outward.
   1039         DCHECK(HasSynthesizeUseAt(back_edge_use_position));
   1040         break;
   1041       }
   1042 
   1043       DCHECK(last_in_new_list == nullptr ||
   1044              back_edge_use_position > last_in_new_list->GetPosition());
   1045 
   1046       UsePosition* new_use = new (allocator_) UsePosition(
   1047           /* user */ nullptr,
   1048           /* environment */ nullptr,
   1049           UsePosition::kNoInput,
   1050           back_edge_use_position,
   1051           /* next */ nullptr);
   1052 
   1053       if (last_in_new_list != nullptr) {
   1054         // Going outward. The latest created use needs to point to the new use.
   1055         last_in_new_list->SetNext(new_use);
   1056       } else {
   1057         // This is the inner-most loop.
   1058         DCHECK_EQ(current, block_at_use.GetLoopInformation());
   1059         first_in_new_list = new_use;
   1060       }
   1061       last_in_new_list = new_use;
   1062     }
   1063     // Link the newly created linked list with `first_use_`.
   1064     if (last_in_new_list != nullptr) {
   1065       last_in_new_list->SetNext(first_use_);
   1066       first_use_ = first_in_new_list;
   1067     }
   1068   }
   1069 
   1070   ArenaAllocator* const allocator_;
   1071 
   1072   // Ranges of this interval. We need a quick access to the last range to test
   1073   // for liveness (see `IsDeadAt`).
   1074   LiveRange* first_range_;
   1075   LiveRange* last_range_;
   1076 
   1077   // The first range at or after the current position of a linear scan. It is
   1078   // used to optimize range-searching queries.
   1079   LiveRange* range_search_start_;
   1080 
   1081   // Safepoints where this interval is live.
   1082   SafepointPosition* first_safepoint_;
   1083   SafepointPosition* last_safepoint_;
   1084 
   1085   // Uses of this interval. Note that this linked list is shared amongst siblings.
   1086   UsePosition* first_use_;
   1087   UsePosition* first_env_use_;
   1088 
   1089   // The instruction type this interval corresponds to.
   1090   const Primitive::Type type_;
   1091 
   1092   // Live interval that is the result of a split.
   1093   LiveInterval* next_sibling_;
   1094 
   1095   // The first interval from which split intervals come from.
   1096   LiveInterval* parent_;
   1097 
   1098   // The register allocated to this interval.
   1099   int register_;
   1100 
   1101   // The spill slot allocated to this interval.
   1102   int spill_slot_;
   1103 
   1104   // Whether the interval is for a fixed register.
   1105   const bool is_fixed_;
   1106 
   1107   // Whether the interval is for a temporary.
   1108   const bool is_temp_;
   1109 
   1110   // Whether the interval is for a safepoint that calls on slow path.
   1111   const bool is_slow_path_safepoint_;
   1112 
   1113   // Whether this interval is a synthesized interval for register pair.
   1114   const bool is_high_interval_;
   1115 
   1116   // If this interval needs a register pair, the high or low equivalent.
   1117   // `is_high_interval_` tells whether this holds the low or the high.
   1118   LiveInterval* high_or_low_interval_;
   1119 
   1120   // The instruction represented by this interval.
   1121   HInstruction* const defined_by_;
   1122 
   1123   static constexpr int kNoRegister = -1;
   1124   static constexpr int kNoSpillSlot = -1;
   1125 
   1126   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
   1127 
   1128   DISALLOW_COPY_AND_ASSIGN(LiveInterval);
   1129 };
   1130 
   1131 /**
   1132  * Analysis that computes the liveness of instructions:
   1133  *
   1134  * (a) Non-environment uses of an instruction always make
   1135  *     the instruction live.
   1136  * (b) Environment uses of an instruction whose type is
   1137  *     object (that is, non-primitive), make the instruction live.
   1138  *     This is due to having to keep alive objects that have
   1139  *     finalizers deleting native objects.
   1140  * (c) When the graph has the debuggable property, environment uses
   1141  *     of an instruction that has a primitive type make the instruction live.
   1142  *     If the graph does not have the debuggable property, the environment
   1143  *     use has no effect, and may get a 'none' value after register allocation.
   1144  *
   1145  * (b) and (c) are implemented through SsaLivenessAnalysis::ShouldBeLiveForEnvironment.
   1146  */
   1147 class SsaLivenessAnalysis : public ValueObject {
   1148  public:
   1149   SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen)
   1150       : graph_(graph),
   1151         codegen_(codegen),
   1152         block_infos_(graph->GetBlocks().size(),
   1153                      nullptr,
   1154                      graph->GetArena()->Adapter(kArenaAllocSsaLiveness)),
   1155         instructions_from_ssa_index_(graph->GetArena()->Adapter(kArenaAllocSsaLiveness)),
   1156         instructions_from_lifetime_position_(graph->GetArena()->Adapter(kArenaAllocSsaLiveness)),
   1157         number_of_ssa_values_(0) {
   1158   }
   1159 
   1160   void Analyze();
   1161 
   1162   BitVector* GetLiveInSet(const HBasicBlock& block) const {
   1163     return &block_infos_[block.GetBlockId()]->live_in_;
   1164   }
   1165 
   1166   BitVector* GetLiveOutSet(const HBasicBlock& block) const {
   1167     return &block_infos_[block.GetBlockId()]->live_out_;
   1168   }
   1169 
   1170   BitVector* GetKillSet(const HBasicBlock& block) const {
   1171     return &block_infos_[block.GetBlockId()]->kill_;
   1172   }
   1173 
   1174   HInstruction* GetInstructionFromSsaIndex(size_t index) const {
   1175     return instructions_from_ssa_index_[index];
   1176   }
   1177 
   1178   HInstruction* GetInstructionFromPosition(size_t index) const {
   1179     return instructions_from_lifetime_position_[index];
   1180   }
   1181 
   1182   HBasicBlock* GetBlockFromPosition(size_t index) const {
   1183     HInstruction* instruction = GetInstructionFromPosition(index);
   1184     if (instruction == nullptr) {
   1185       // If we are at a block boundary, get the block following.
   1186       instruction = GetInstructionFromPosition(index + 1);
   1187     }
   1188     return instruction->GetBlock();
   1189   }
   1190 
   1191   bool IsAtBlockBoundary(size_t index) const {
   1192     return GetInstructionFromPosition(index) == nullptr;
   1193   }
   1194 
   1195   HInstruction* GetTempUser(LiveInterval* temp) const {
   1196     // A temporary shares the same lifetime start as the instruction that requires it.
   1197     DCHECK(temp->IsTemp());
   1198     HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2);
   1199     DCHECK_EQ(user, temp->GetFirstUse()->GetUser());
   1200     return user;
   1201   }
   1202 
   1203   size_t GetTempIndex(LiveInterval* temp) const {
   1204     // We use the input index to store the index of the temporary in the user's temporary list.
   1205     DCHECK(temp->IsTemp());
   1206     return temp->GetFirstUse()->GetInputIndex();
   1207   }
   1208 
   1209   size_t GetMaxLifetimePosition() const {
   1210     return instructions_from_lifetime_position_.size() * 2 - 1;
   1211   }
   1212 
   1213   size_t GetNumberOfSsaValues() const {
   1214     return number_of_ssa_values_;
   1215   }
   1216 
   1217   static constexpr const char* kLivenessPassName = "liveness";
   1218 
   1219  private:
   1220   // Linearize the graph so that:
   1221   // (1): a block is always after its dominator,
   1222   // (2): blocks of loops are contiguous.
   1223   // This creates a natural and efficient ordering when visualizing live ranges.
   1224   void LinearizeGraph();
   1225 
   1226   // Give an SSA number to each instruction that defines a value used by another instruction,
   1227   // and setup the lifetime information of each instruction and block.
   1228   void NumberInstructions();
   1229 
   1230   // Compute live ranges of instructions, as well as live_in, live_out and kill sets.
   1231   void ComputeLiveness();
   1232 
   1233   // Compute the live ranges of instructions, as well as the initial live_in, live_out and
   1234   // kill sets, that do not take into account backward branches.
   1235   void ComputeLiveRanges();
   1236 
   1237   // After computing the initial sets, this method does a fixed point
   1238   // calculation over the live_in and live_out set to take into account
   1239   // backwards branches.
   1240   void ComputeLiveInAndLiveOutSets();
   1241 
   1242   // Update the live_in set of the block and returns whether it has changed.
   1243   bool UpdateLiveIn(const HBasicBlock& block);
   1244 
   1245   // Update the live_out set of the block and returns whether it has changed.
   1246   bool UpdateLiveOut(const HBasicBlock& block);
   1247 
   1248   // Returns whether `instruction` in an HEnvironment held by `env_holder`
   1249   // should be kept live by the HEnvironment.
   1250   static bool ShouldBeLiveForEnvironment(HInstruction* env_holder,
   1251                                          HInstruction* instruction) {
   1252     if (instruction == nullptr) return false;
   1253     // A value that's not live in compiled code may still be needed in interpreter,
   1254     // due to code motion, etc.
   1255     if (env_holder->IsDeoptimize()) return true;
   1256     // A value live at a throwing instruction in a try block may be copied by
   1257     // the exception handler to its location at the top of the catch block.
   1258     if (env_holder->CanThrowIntoCatchBlock()) return true;
   1259     if (instruction->GetBlock()->GetGraph()->IsDebuggable()) return true;
   1260     return instruction->GetType() == Primitive::kPrimNot;
   1261   }
   1262 
   1263   void CheckNoLiveInIrreducibleLoop(const HBasicBlock& block) const {
   1264     if (!block.IsLoopHeader() || !block.GetLoopInformation()->IsIrreducible()) {
   1265       return;
   1266     }
   1267     BitVector* live_in = GetLiveInSet(block);
   1268     // To satisfy our liveness algorithm, we need to ensure loop headers of
   1269     // irreducible loops do not have any live-in instructions, except constants
   1270     // and the current method, which can be trivially re-materialized.
   1271     for (uint32_t idx : live_in->Indexes()) {
   1272       HInstruction* instruction = GetInstructionFromSsaIndex(idx);
   1273       DCHECK(instruction->GetBlock()->IsEntryBlock()) << instruction->DebugName();
   1274       DCHECK(!instruction->IsParameterValue());
   1275       DCHECK(instruction->IsCurrentMethod() || instruction->IsConstant())
   1276           << instruction->DebugName();
   1277     }
   1278   }
   1279 
   1280   HGraph* const graph_;
   1281   CodeGenerator* const codegen_;
   1282   ArenaVector<BlockInfo*> block_infos_;
   1283 
   1284   // Temporary array used when computing live_in, live_out, and kill sets.
   1285   ArenaVector<HInstruction*> instructions_from_ssa_index_;
   1286 
   1287   // Temporary array used when inserting moves in the graph.
   1288   ArenaVector<HInstruction*> instructions_from_lifetime_position_;
   1289   size_t number_of_ssa_values_;
   1290 
   1291   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
   1292   ART_FRIEND_TEST(RegisterAllocatorTest, FreeUntil);
   1293 
   1294   DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis);
   1295 };
   1296 
   1297 }  // namespace art
   1298 
   1299 #endif  // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
   1300