1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_ 18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_ 19 20 #include <iostream> 21 22 #include "base/iteration_range.h" 23 #include "base/scoped_arena_allocator.h" 24 #include "base/scoped_arena_containers.h" 25 #include "nodes.h" 26 #include "utils/intrusive_forward_list.h" 27 28 namespace art { 29 30 class CodeGenerator; 31 class SsaLivenessAnalysis; 32 33 static constexpr int kNoRegister = -1; 34 35 class BlockInfo : public ArenaObject<kArenaAllocSsaLiveness> { 36 public: 37 BlockInfo(ScopedArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values) 38 : block_(block), 39 live_in_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness), 40 live_out_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness), 41 kill_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness) { 42 UNUSED(block_); 43 live_in_.ClearAllBits(); 44 live_out_.ClearAllBits(); 45 kill_.ClearAllBits(); 46 } 47 48 private: 49 const HBasicBlock& block_; 50 ArenaBitVector live_in_; 51 ArenaBitVector live_out_; 52 ArenaBitVector kill_; 53 54 friend class SsaLivenessAnalysis; 55 56 DISALLOW_COPY_AND_ASSIGN(BlockInfo); 57 }; 58 59 /** 60 * A live range contains the start and end of a range where an instruction or a temporary 61 * is live. 62 */ 63 class LiveRange FINAL : public ArenaObject<kArenaAllocSsaLiveness> { 64 public: 65 LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) { 66 DCHECK_LT(start, end); 67 DCHECK(next_ == nullptr || next_->GetStart() > GetEnd()); 68 } 69 70 size_t GetStart() const { return start_; } 71 size_t GetEnd() const { return end_; } 72 LiveRange* GetNext() const { return next_; } 73 74 bool IntersectsWith(const LiveRange& other) const { 75 return (start_ >= other.start_ && start_ < other.end_) 76 || (other.start_ >= start_ && other.start_ < end_); 77 } 78 79 bool IsBefore(const LiveRange& other) const { 80 return end_ <= other.start_; 81 } 82 83 void Dump(std::ostream& stream) const { 84 stream << "[" << start_ << "," << end_ << ")"; 85 } 86 87 LiveRange* Dup(ScopedArenaAllocator* allocator) const { 88 return new (allocator) LiveRange( 89 start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator)); 90 } 91 92 LiveRange* GetLastRange() { 93 return next_ == nullptr ? this : next_->GetLastRange(); 94 } 95 96 private: 97 size_t start_; 98 size_t end_; 99 LiveRange* next_; 100 101 friend class LiveInterval; 102 103 DISALLOW_COPY_AND_ASSIGN(LiveRange); 104 }; 105 106 /** 107 * A use position represents a live interval use at a given position. 108 */ 109 class UsePosition : public ArenaObject<kArenaAllocSsaLiveness>, 110 public IntrusiveForwardListNode<UsePosition> { 111 public: 112 UsePosition(HInstruction* user, size_t input_index, size_t position) 113 : user_(user), 114 input_index_(input_index), 115 position_(position) { 116 } 117 118 explicit UsePosition(size_t position) 119 : user_(nullptr), 120 input_index_(kNoInput), 121 position_(dchecked_integral_cast<uint32_t>(position)) { 122 } 123 124 size_t GetPosition() const { return position_; } 125 126 HInstruction* GetUser() const { return user_; } 127 128 bool IsSynthesized() const { return user_ == nullptr; } 129 130 size_t GetInputIndex() const { return input_index_; } 131 132 void Dump(std::ostream& stream) const { 133 stream << position_; 134 } 135 136 HLoopInformation* GetLoopInformation() const { 137 return user_->GetBlock()->GetLoopInformation(); 138 } 139 140 UsePosition* Clone(ScopedArenaAllocator* allocator) const { 141 return new (allocator) UsePosition(user_, input_index_, position_); 142 } 143 144 bool RequiresRegister() const { 145 if (IsSynthesized()) return false; 146 Location location = GetUser()->GetLocations()->InAt(GetInputIndex()); 147 return location.IsUnallocated() && location.RequiresRegisterKind(); 148 } 149 150 private: 151 static constexpr uint32_t kNoInput = static_cast<uint32_t>(-1); 152 153 HInstruction* const user_; 154 const size_t input_index_; 155 const size_t position_; 156 157 DISALLOW_COPY_AND_ASSIGN(UsePosition); 158 }; 159 using UsePositionList = IntrusiveForwardList<UsePosition>; 160 161 /** 162 * An environment use position represents a live interval for environment use at a given position. 163 */ 164 class EnvUsePosition : public ArenaObject<kArenaAllocSsaLiveness>, 165 public IntrusiveForwardListNode<EnvUsePosition> { 166 public: 167 EnvUsePosition(HEnvironment* environment, 168 size_t input_index, 169 size_t position) 170 : environment_(environment), 171 input_index_(input_index), 172 position_(position) { 173 DCHECK(environment != nullptr); 174 } 175 176 size_t GetPosition() const { return position_; } 177 178 HEnvironment* GetEnvironment() const { return environment_; } 179 size_t GetInputIndex() const { return input_index_; } 180 181 void Dump(std::ostream& stream) const { 182 stream << position_; 183 } 184 185 EnvUsePosition* Clone(ScopedArenaAllocator* allocator) const { 186 return new (allocator) EnvUsePosition(environment_, input_index_, position_); 187 } 188 189 private: 190 HEnvironment* const environment_; 191 const size_t input_index_; 192 const size_t position_; 193 194 DISALLOW_COPY_AND_ASSIGN(EnvUsePosition); 195 }; 196 using EnvUsePositionList = IntrusiveForwardList<EnvUsePosition>; 197 198 template <typename Iterator> 199 inline Iterator FindUseAtOrAfterPosition(Iterator first, Iterator last, size_t position) { 200 using value_type = const typename Iterator::value_type; 201 static_assert(std::is_same<value_type, const UsePosition>::value || 202 std::is_same<value_type, const EnvUsePosition>::value, 203 "Expecting value type UsePosition or EnvUsePosition."); 204 Iterator ret = std::find_if( 205 first, last, [position](const value_type& use) { return use.GetPosition() >= position; }); 206 // Check that the processed range is sorted. Do not check the rest of the range to avoid 207 // increasing the complexity of callers from O(n) to O(n^2). 208 DCHECK(std::is_sorted( 209 first, 210 ret, 211 [](const value_type& lhs, const value_type& rhs) { 212 return lhs.GetPosition() < rhs.GetPosition(); 213 })); 214 return ret; 215 } 216 217 template <typename Iterator> 218 inline IterationRange<Iterator> FindMatchingUseRange(Iterator first, 219 Iterator last, 220 size_t position_begin, 221 size_t position_end) { 222 Iterator begin = FindUseAtOrAfterPosition(first, last, position_begin); 223 Iterator end = FindUseAtOrAfterPosition(begin, last, position_end); 224 return MakeIterationRange(begin, end); 225 } 226 227 class SafepointPosition : public ArenaObject<kArenaAllocSsaLiveness> { 228 public: 229 explicit SafepointPosition(HInstruction* instruction) 230 : instruction_(instruction), 231 next_(nullptr) {} 232 233 void SetNext(SafepointPosition* next) { 234 next_ = next; 235 } 236 237 size_t GetPosition() const { 238 return instruction_->GetLifetimePosition(); 239 } 240 241 SafepointPosition* GetNext() const { 242 return next_; 243 } 244 245 LocationSummary* GetLocations() const { 246 return instruction_->GetLocations(); 247 } 248 249 HInstruction* GetInstruction() const { 250 return instruction_; 251 } 252 253 private: 254 HInstruction* const instruction_; 255 SafepointPosition* next_; 256 257 DISALLOW_COPY_AND_ASSIGN(SafepointPosition); 258 }; 259 260 /** 261 * An interval is a list of disjoint live ranges where an instruction is live. 262 * Each instruction that has uses gets an interval. 263 */ 264 class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> { 265 public: 266 static LiveInterval* MakeInterval(ScopedArenaAllocator* allocator, 267 DataType::Type type, 268 HInstruction* instruction = nullptr) { 269 return new (allocator) LiveInterval(allocator, type, instruction); 270 } 271 272 static LiveInterval* MakeFixedInterval(ScopedArenaAllocator* allocator, 273 int reg, 274 DataType::Type type) { 275 return new (allocator) LiveInterval(allocator, type, nullptr, true, reg, false); 276 } 277 278 static LiveInterval* MakeTempInterval(ScopedArenaAllocator* allocator, DataType::Type type) { 279 return new (allocator) LiveInterval(allocator, type, nullptr, false, kNoRegister, true); 280 } 281 282 bool IsFixed() const { return is_fixed_; } 283 bool IsTemp() const { return is_temp_; } 284 // This interval is the result of a split. 285 bool IsSplit() const { return parent_ != this; } 286 287 void AddTempUse(HInstruction* instruction, size_t temp_index) { 288 DCHECK(IsTemp()); 289 DCHECK(GetUses().empty()) << "A temporary can only have one user"; 290 DCHECK(GetEnvironmentUses().empty()) << "A temporary cannot have environment user"; 291 size_t position = instruction->GetLifetimePosition(); 292 UsePosition* new_use = new (allocator_) UsePosition(instruction, temp_index, position); 293 uses_.push_front(*new_use); 294 AddRange(position, position + 1); 295 } 296 297 // Record use of an input. The use will be recorded as an environment use if 298 // `environment` is not null and as register use otherwise. If `actual_user` 299 // is specified, the use will be recorded at `actual_user`'s lifetime position. 300 void AddUse(HInstruction* instruction, 301 HEnvironment* environment, 302 size_t input_index, 303 HInstruction* actual_user = nullptr, 304 bool keep_alive = false) { 305 bool is_environment = (environment != nullptr); 306 LocationSummary* locations = instruction->GetLocations(); 307 if (actual_user == nullptr) { 308 actual_user = instruction; 309 } 310 311 // Set the use within the instruction. 312 size_t position = actual_user->GetLifetimePosition() + 1; 313 if (!is_environment) { 314 if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) { 315 // For fixed inputs and output same as input, the register allocator 316 // requires to have inputs die at the instruction, so that input moves use the 317 // location of the input just before that instruction (and not potential moves due 318 // to splitting). 319 DCHECK_EQ(instruction, actual_user); 320 position = actual_user->GetLifetimePosition(); 321 } else if (!locations->InAt(input_index).IsValid()) { 322 return; 323 } 324 } 325 326 if (!is_environment && instruction->IsInLoop()) { 327 AddBackEdgeUses(*instruction->GetBlock()); 328 } 329 330 if ((!uses_.empty()) && 331 (uses_.front().GetUser() == actual_user) && 332 (uses_.front().GetPosition() < position)) { 333 // The user uses the instruction multiple times, and one use dies before the other. 334 // We update the use list so that the latter is first. 335 DCHECK(!is_environment); 336 DCHECK(uses_.front().GetPosition() + 1 == position); 337 UsePositionList::iterator next_pos = uses_.begin(); 338 UsePositionList::iterator insert_pos; 339 do { 340 insert_pos = next_pos; 341 ++next_pos; 342 } while (next_pos != uses_.end() && next_pos->GetPosition() < position); 343 UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position); 344 uses_.insert_after(insert_pos, *new_use); 345 if (first_range_->GetEnd() == uses_.front().GetPosition()) { 346 first_range_->end_ = position; 347 } 348 return; 349 } 350 351 if (is_environment) { 352 DCHECK(env_uses_.empty() || position <= env_uses_.front().GetPosition()); 353 EnvUsePosition* new_env_use = 354 new (allocator_) EnvUsePosition(environment, input_index, position); 355 env_uses_.push_front(*new_env_use); 356 } else { 357 DCHECK(uses_.empty() || position <= uses_.front().GetPosition()); 358 UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position); 359 uses_.push_front(*new_use); 360 } 361 362 if (is_environment && !keep_alive) { 363 // If this environment use does not keep the instruction live, it does not 364 // affect the live range of that instruction. 365 return; 366 } 367 368 size_t start_block_position = instruction->GetBlock()->GetLifetimeStart(); 369 if (first_range_ == nullptr) { 370 // First time we see a use of that interval. 371 first_range_ = last_range_ = range_search_start_ = 372 new (allocator_) LiveRange(start_block_position, position, nullptr); 373 } else if (first_range_->GetStart() == start_block_position) { 374 // There is a use later in the same block or in a following block. 375 // Note that in such a case, `AddRange` for the whole blocks has been called 376 // before arriving in this method, and this is the reason the start of 377 // `first_range_` is before the given `position`. 378 DCHECK_LE(position, first_range_->GetEnd()); 379 } else { 380 DCHECK(first_range_->GetStart() > position); 381 // There is a hole in the interval. Create a new range. 382 // Note that the start of `first_range_` can be equal to `end`: two blocks 383 // having adjacent lifetime positions are not necessarily 384 // predecessor/successor. When two blocks are predecessor/successor, the 385 // liveness algorithm has called `AddRange` before arriving in this method, 386 // and the check line 205 would succeed. 387 first_range_ = range_search_start_ = 388 new (allocator_) LiveRange(start_block_position, position, first_range_); 389 } 390 } 391 392 void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) { 393 DCHECK(instruction->IsPhi()); 394 if (block->IsInLoop()) { 395 AddBackEdgeUses(*block); 396 } 397 UsePosition* new_use = 398 new (allocator_) UsePosition(instruction, input_index, block->GetLifetimeEnd()); 399 uses_.push_front(*new_use); 400 } 401 402 ALWAYS_INLINE void AddRange(size_t start, size_t end) { 403 if (first_range_ == nullptr) { 404 first_range_ = last_range_ = range_search_start_ = 405 new (allocator_) LiveRange(start, end, first_range_); 406 } else if (first_range_->GetStart() == end) { 407 // There is a use in the following block. 408 first_range_->start_ = start; 409 } else if (first_range_->GetStart() == start && first_range_->GetEnd() == end) { 410 DCHECK(is_fixed_); 411 } else { 412 DCHECK_GT(first_range_->GetStart(), end); 413 // There is a hole in the interval. Create a new range. 414 first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_); 415 } 416 } 417 418 void AddLoopRange(size_t start, size_t end) { 419 DCHECK(first_range_ != nullptr); 420 DCHECK_LE(start, first_range_->GetStart()); 421 // Find the range that covers the positions after the loop. 422 LiveRange* after_loop = first_range_; 423 LiveRange* last_in_loop = nullptr; 424 while (after_loop != nullptr && after_loop->GetEnd() < end) { 425 DCHECK_LE(start, after_loop->GetStart()); 426 last_in_loop = after_loop; 427 after_loop = after_loop->GetNext(); 428 } 429 if (after_loop == nullptr) { 430 // Uses are only in the loop. 431 first_range_ = last_range_ = range_search_start_ = 432 new (allocator_) LiveRange(start, end, nullptr); 433 } else if (after_loop->GetStart() <= end) { 434 first_range_ = range_search_start_ = after_loop; 435 // There are uses after the loop. 436 first_range_->start_ = start; 437 } else { 438 // The use after the loop is after a lifetime hole. 439 DCHECK(last_in_loop != nullptr); 440 first_range_ = range_search_start_ = last_in_loop; 441 first_range_->start_ = start; 442 first_range_->end_ = end; 443 } 444 } 445 446 bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; } 447 void SetSpillSlot(int slot) { 448 DCHECK(!is_fixed_); 449 DCHECK(!is_temp_); 450 spill_slot_ = slot; 451 } 452 int GetSpillSlot() const { return spill_slot_; } 453 454 void SetFrom(size_t from) { 455 if (first_range_ != nullptr) { 456 first_range_->start_ = from; 457 } else { 458 // Instruction without uses. 459 DCHECK(uses_.empty()); 460 DCHECK(from == defined_by_->GetLifetimePosition()); 461 first_range_ = last_range_ = range_search_start_ = 462 new (allocator_) LiveRange(from, from + 2, nullptr); 463 } 464 } 465 466 LiveInterval* GetParent() const { return parent_; } 467 468 // Returns whether this interval is the parent interval, that is, the interval 469 // that starts where the HInstruction is defined. 470 bool IsParent() const { return parent_ == this; } 471 472 LiveRange* GetFirstRange() const { return first_range_; } 473 LiveRange* GetLastRange() const { return last_range_; } 474 475 int GetRegister() const { return register_; } 476 void SetRegister(int reg) { register_ = reg; } 477 void ClearRegister() { register_ = kNoRegister; } 478 bool HasRegister() const { return register_ != kNoRegister; } 479 480 bool IsDeadAt(size_t position) const { 481 return GetEnd() <= position; 482 } 483 484 bool IsDefinedAt(size_t position) const { 485 return GetStart() <= position && !IsDeadAt(position); 486 } 487 488 // Returns true if the interval contains a LiveRange covering `position`. 489 // The range at or immediately after the current position of linear scan 490 // is cached for better performance. If `position` can be smaller than 491 // that, CoversSlow should be used instead. 492 bool Covers(size_t position) { 493 LiveRange* candidate = FindRangeAtOrAfter(position, range_search_start_); 494 range_search_start_ = candidate; 495 return (candidate != nullptr && candidate->GetStart() <= position); 496 } 497 498 // Same as Covers but always tests all ranges. 499 bool CoversSlow(size_t position) const { 500 LiveRange* candidate = FindRangeAtOrAfter(position, first_range_); 501 return candidate != nullptr && candidate->GetStart() <= position; 502 } 503 504 // Returns the first intersection of this interval with `current`, which 505 // must be the interval currently being allocated by linear scan. 506 size_t FirstIntersectionWith(LiveInterval* current) const { 507 // Find the first range after the start of `current`. We use the search 508 // cache to improve performance. 509 DCHECK(GetStart() <= current->GetStart() || IsFixed()); 510 LiveRange* other_range = current->first_range_; 511 LiveRange* my_range = FindRangeAtOrAfter(other_range->GetStart(), range_search_start_); 512 if (my_range == nullptr) { 513 return kNoLifetime; 514 } 515 516 // Advance both intervals and find the first matching range start in 517 // this interval. 518 do { 519 if (my_range->IsBefore(*other_range)) { 520 my_range = my_range->GetNext(); 521 if (my_range == nullptr) { 522 return kNoLifetime; 523 } 524 } else if (other_range->IsBefore(*my_range)) { 525 other_range = other_range->GetNext(); 526 if (other_range == nullptr) { 527 return kNoLifetime; 528 } 529 } else { 530 DCHECK(my_range->IntersectsWith(*other_range)); 531 return std::max(my_range->GetStart(), other_range->GetStart()); 532 } 533 } while (true); 534 } 535 536 size_t GetStart() const { 537 return first_range_->GetStart(); 538 } 539 540 size_t GetEnd() const { 541 return last_range_->GetEnd(); 542 } 543 544 size_t GetLength() const { 545 return GetEnd() - GetStart(); 546 } 547 548 size_t FirstRegisterUseAfter(size_t position) const { 549 if (is_temp_) { 550 return position == GetStart() ? position : kNoLifetime; 551 } 552 553 if (IsDefiningPosition(position) && DefinitionRequiresRegister()) { 554 return position; 555 } 556 557 size_t end = GetEnd(); 558 for (const UsePosition& use : GetUses()) { 559 size_t use_position = use.GetPosition(); 560 if (use_position > end) { 561 break; 562 } 563 if (use_position > position) { 564 if (use.RequiresRegister()) { 565 return use_position; 566 } 567 } 568 } 569 return kNoLifetime; 570 } 571 572 // Returns the location of the first register use for this live interval, 573 // including a register definition if applicable. 574 size_t FirstRegisterUse() const { 575 return FirstRegisterUseAfter(GetStart()); 576 } 577 578 // Whether the interval requires a register rather than a stack location. 579 // If needed for performance, this could be cached. 580 bool RequiresRegister() const { 581 return !HasRegister() && FirstRegisterUse() != kNoLifetime; 582 } 583 584 size_t FirstUseAfter(size_t position) const { 585 if (is_temp_) { 586 return position == GetStart() ? position : kNoLifetime; 587 } 588 589 if (IsDefiningPosition(position)) { 590 DCHECK(defined_by_->GetLocations()->Out().IsValid()); 591 return position; 592 } 593 594 size_t end = GetEnd(); 595 for (const UsePosition& use : GetUses()) { 596 size_t use_position = use.GetPosition(); 597 if (use_position > end) { 598 break; 599 } 600 if (use_position > position) { 601 return use_position; 602 } 603 } 604 return kNoLifetime; 605 } 606 607 const UsePositionList& GetUses() const { 608 return parent_->uses_; 609 } 610 611 const EnvUsePositionList& GetEnvironmentUses() const { 612 return parent_->env_uses_; 613 } 614 615 DataType::Type GetType() const { 616 return type_; 617 } 618 619 HInstruction* GetDefinedBy() const { 620 return defined_by_; 621 } 622 623 bool HasWillCallSafepoint() const { 624 for (SafepointPosition* safepoint = first_safepoint_; 625 safepoint != nullptr; 626 safepoint = safepoint->GetNext()) { 627 if (safepoint->GetLocations()->WillCall()) return true; 628 } 629 return false; 630 } 631 632 SafepointPosition* FindSafepointJustBefore(size_t position) const { 633 for (SafepointPosition* safepoint = first_safepoint_, *previous = nullptr; 634 safepoint != nullptr; 635 previous = safepoint, safepoint = safepoint->GetNext()) { 636 if (safepoint->GetPosition() >= position) return previous; 637 } 638 return last_safepoint_; 639 } 640 641 /** 642 * Split this interval at `position`. This interval is changed to: 643 * [start ... position). 644 * 645 * The new interval covers: 646 * [position ... end) 647 */ 648 LiveInterval* SplitAt(size_t position) { 649 DCHECK(!is_temp_); 650 DCHECK(!is_fixed_); 651 DCHECK_GT(position, GetStart()); 652 653 if (GetEnd() <= position) { 654 // This range dies before `position`, no need to split. 655 return nullptr; 656 } 657 658 LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_); 659 SafepointPosition* new_last_safepoint = FindSafepointJustBefore(position); 660 if (new_last_safepoint == nullptr) { 661 new_interval->first_safepoint_ = first_safepoint_; 662 new_interval->last_safepoint_ = last_safepoint_; 663 first_safepoint_ = last_safepoint_ = nullptr; 664 } else if (last_safepoint_ != new_last_safepoint) { 665 new_interval->last_safepoint_ = last_safepoint_; 666 new_interval->first_safepoint_ = new_last_safepoint->GetNext(); 667 DCHECK(new_interval->first_safepoint_ != nullptr); 668 last_safepoint_ = new_last_safepoint; 669 last_safepoint_->SetNext(nullptr); 670 } 671 672 new_interval->next_sibling_ = next_sibling_; 673 next_sibling_ = new_interval; 674 new_interval->parent_ = parent_; 675 676 LiveRange* current = first_range_; 677 LiveRange* previous = nullptr; 678 // Iterate over the ranges, and either find a range that covers this position, or 679 // two ranges in between this position (that is, the position is in a lifetime hole). 680 do { 681 if (position >= current->GetEnd()) { 682 // Move to next range. 683 previous = current; 684 current = current->next_; 685 } else if (position <= current->GetStart()) { 686 // If the previous range did not cover this position, we know position is in 687 // a lifetime hole. We can just break the first_range_ and last_range_ links 688 // and return the new interval. 689 DCHECK(previous != nullptr); 690 DCHECK(current != first_range_); 691 new_interval->last_range_ = last_range_; 692 last_range_ = previous; 693 previous->next_ = nullptr; 694 new_interval->first_range_ = current; 695 if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) { 696 // Search start point is inside `new_interval`. Change it to null 697 // (i.e. the end of the interval) in the original interval. 698 range_search_start_ = nullptr; 699 } 700 new_interval->range_search_start_ = new_interval->first_range_; 701 return new_interval; 702 } else { 703 // This range covers position. We create a new last_range_ for this interval 704 // that covers last_range_->Start() and position. We also shorten the current 705 // range and make it the first range of the new interval. 706 DCHECK(position < current->GetEnd() && position > current->GetStart()); 707 new_interval->last_range_ = last_range_; 708 last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr); 709 if (previous != nullptr) { 710 previous->next_ = last_range_; 711 } else { 712 first_range_ = last_range_; 713 } 714 new_interval->first_range_ = current; 715 current->start_ = position; 716 if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) { 717 // Search start point is inside `new_interval`. Change it to `last_range` 718 // in the original interval. This is conservative but always correct. 719 range_search_start_ = last_range_; 720 } 721 new_interval->range_search_start_ = new_interval->first_range_; 722 return new_interval; 723 } 724 } while (current != nullptr); 725 726 LOG(FATAL) << "Unreachable"; 727 return nullptr; 728 } 729 730 bool StartsBeforeOrAt(LiveInterval* other) const { 731 return GetStart() <= other->GetStart(); 732 } 733 734 bool StartsAfter(LiveInterval* other) const { 735 return GetStart() > other->GetStart(); 736 } 737 738 void Dump(std::ostream& stream) const { 739 stream << "ranges: { "; 740 LiveRange* current = first_range_; 741 while (current != nullptr) { 742 current->Dump(stream); 743 stream << " "; 744 current = current->GetNext(); 745 } 746 stream << "}, uses: { "; 747 for (const UsePosition& use : GetUses()) { 748 use.Dump(stream); 749 stream << " "; 750 } 751 stream << "}, { "; 752 for (const EnvUsePosition& env_use : GetEnvironmentUses()) { 753 env_use.Dump(stream); 754 stream << " "; 755 } 756 stream << "}"; 757 stream << " is_fixed: " << is_fixed_ << ", is_split: " << IsSplit(); 758 stream << " is_low: " << IsLowInterval(); 759 stream << " is_high: " << IsHighInterval(); 760 } 761 762 // Same as Dump, but adds context such as the instruction defining this interval, and 763 // the register currently assigned to this interval. 764 void DumpWithContext(std::ostream& stream, const CodeGenerator& codegen) const; 765 766 LiveInterval* GetNextSibling() const { return next_sibling_; } 767 LiveInterval* GetLastSibling() { 768 LiveInterval* result = this; 769 while (result->next_sibling_ != nullptr) { 770 result = result->next_sibling_; 771 } 772 return result; 773 } 774 775 // Returns the first register hint that is at least free before 776 // the value contained in `free_until`. If none is found, returns 777 // `kNoRegister`. 778 int FindFirstRegisterHint(size_t* free_until, const SsaLivenessAnalysis& liveness) const; 779 780 // If there is enough at the definition site to find a register (for example 781 // it uses the same input as the first input), returns the register as a hint. 782 // Returns kNoRegister otherwise. 783 int FindHintAtDefinition() const; 784 785 // Returns the number of required spilling slots (measured as a multiple of the 786 // Dex virtual register size `kVRegSize`). 787 size_t NumberOfSpillSlotsNeeded() const; 788 789 bool IsFloatingPoint() const { 790 return type_ == DataType::Type::kFloat32 || type_ == DataType::Type::kFloat64; 791 } 792 793 // Converts the location of the interval to a `Location` object. 794 Location ToLocation() const; 795 796 // Returns the location of the interval following its siblings at `position`. 797 Location GetLocationAt(size_t position); 798 799 // Finds the sibling that is defined at `position`. 800 LiveInterval* GetSiblingAt(size_t position); 801 802 // Returns whether `other` and `this` share the same kind of register. 803 bool SameRegisterKind(Location other) const; 804 bool SameRegisterKind(const LiveInterval& other) const { 805 return IsFloatingPoint() == other.IsFloatingPoint(); 806 } 807 808 bool HasHighInterval() const { 809 return IsLowInterval(); 810 } 811 812 bool HasLowInterval() const { 813 return IsHighInterval(); 814 } 815 816 LiveInterval* GetLowInterval() const { 817 DCHECK(HasLowInterval()); 818 return high_or_low_interval_; 819 } 820 821 LiveInterval* GetHighInterval() const { 822 DCHECK(HasHighInterval()); 823 return high_or_low_interval_; 824 } 825 826 bool IsHighInterval() const { 827 return GetParent()->is_high_interval_; 828 } 829 830 bool IsLowInterval() const { 831 return !IsHighInterval() && (GetParent()->high_or_low_interval_ != nullptr); 832 } 833 834 void SetLowInterval(LiveInterval* low) { 835 DCHECK(IsHighInterval()); 836 high_or_low_interval_ = low; 837 } 838 839 void SetHighInterval(LiveInterval* high) { 840 DCHECK(IsLowInterval()); 841 high_or_low_interval_ = high; 842 } 843 844 void AddHighInterval(bool is_temp = false) { 845 DCHECK(IsParent()); 846 DCHECK(!HasHighInterval()); 847 DCHECK(!HasLowInterval()); 848 high_or_low_interval_ = new (allocator_) LiveInterval( 849 allocator_, type_, defined_by_, false, kNoRegister, is_temp, true); 850 high_or_low_interval_->high_or_low_interval_ = this; 851 if (first_range_ != nullptr) { 852 high_or_low_interval_->first_range_ = first_range_->Dup(allocator_); 853 high_or_low_interval_->last_range_ = high_or_low_interval_->first_range_->GetLastRange(); 854 high_or_low_interval_->range_search_start_ = high_or_low_interval_->first_range_; 855 } 856 auto pos = high_or_low_interval_->uses_.before_begin(); 857 for (const UsePosition& use : uses_) { 858 UsePosition* new_use = use.Clone(allocator_); 859 pos = high_or_low_interval_->uses_.insert_after(pos, *new_use); 860 } 861 862 auto env_pos = high_or_low_interval_->env_uses_.before_begin(); 863 for (const EnvUsePosition& env_use : env_uses_) { 864 EnvUsePosition* new_env_use = env_use.Clone(allocator_); 865 env_pos = high_or_low_interval_->env_uses_.insert_after(env_pos, *new_env_use); 866 } 867 } 868 869 // Returns whether an interval, when it is non-split, is using 870 // the same register of one of its input. 871 bool IsUsingInputRegister() const { 872 CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs"; 873 if (defined_by_ != nullptr && !IsSplit()) { 874 for (const HInstruction* input : defined_by_->GetInputs()) { 875 LiveInterval* interval = input->GetLiveInterval(); 876 877 // Find the interval that covers `defined_by`_. Calls to this function 878 // are made outside the linear scan, hence we need to use CoversSlow. 879 while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) { 880 interval = interval->GetNextSibling(); 881 } 882 883 // Check if both intervals have the same register of the same kind. 884 if (interval != nullptr 885 && interval->SameRegisterKind(*this) 886 && interval->GetRegister() == GetRegister()) { 887 return true; 888 } 889 } 890 } 891 return false; 892 } 893 894 // Returns whether an interval, when it is non-split, can safely use 895 // the same register of one of its input. Note that this method requires 896 // IsUsingInputRegister() to be true. 897 bool CanUseInputRegister() const { 898 CHECK(kIsDebugBuild) << "Function should be used only for DCHECKs"; 899 DCHECK(IsUsingInputRegister()); 900 if (defined_by_ != nullptr && !IsSplit()) { 901 LocationSummary* locations = defined_by_->GetLocations(); 902 if (locations->OutputCanOverlapWithInputs()) { 903 return false; 904 } 905 for (const HInstruction* input : defined_by_->GetInputs()) { 906 LiveInterval* interval = input->GetLiveInterval(); 907 908 // Find the interval that covers `defined_by`_. Calls to this function 909 // are made outside the linear scan, hence we need to use CoversSlow. 910 while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) { 911 interval = interval->GetNextSibling(); 912 } 913 914 if (interval != nullptr 915 && interval->SameRegisterKind(*this) 916 && interval->GetRegister() == GetRegister()) { 917 // We found the input that has the same register. Check if it is live after 918 // `defined_by`_. 919 return !interval->CoversSlow(defined_by_->GetLifetimePosition() + 1); 920 } 921 } 922 } 923 LOG(FATAL) << "Unreachable"; 924 UNREACHABLE(); 925 } 926 927 void AddSafepoint(HInstruction* instruction) { 928 SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction); 929 if (first_safepoint_ == nullptr) { 930 first_safepoint_ = last_safepoint_ = safepoint; 931 } else { 932 DCHECK_LT(last_safepoint_->GetPosition(), safepoint->GetPosition()); 933 last_safepoint_->SetNext(safepoint); 934 last_safepoint_ = safepoint; 935 } 936 } 937 938 SafepointPosition* GetFirstSafepoint() const { 939 return first_safepoint_; 940 } 941 942 // Resets the starting point for range-searching queries to the first range. 943 // Intervals must be reset prior to starting a new linear scan over them. 944 void ResetSearchCache() { 945 range_search_start_ = first_range_; 946 } 947 948 bool DefinitionRequiresRegister() const { 949 DCHECK(IsParent()); 950 LocationSummary* locations = defined_by_->GetLocations(); 951 Location location = locations->Out(); 952 // This interval is the first interval of the instruction. If the output 953 // of the instruction requires a register, we return the position of that instruction 954 // as the first register use. 955 if (location.IsUnallocated()) { 956 if ((location.GetPolicy() == Location::kRequiresRegister) 957 || (location.GetPolicy() == Location::kSameAsFirstInput 958 && (locations->InAt(0).IsRegister() 959 || locations->InAt(0).IsRegisterPair() 960 || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) { 961 return true; 962 } else if ((location.GetPolicy() == Location::kRequiresFpuRegister) 963 || (location.GetPolicy() == Location::kSameAsFirstInput 964 && (locations->InAt(0).IsFpuRegister() 965 || locations->InAt(0).IsFpuRegisterPair() 966 || locations->InAt(0).GetPolicy() == Location::kRequiresFpuRegister))) { 967 return true; 968 } 969 } else if (location.IsRegister() || location.IsRegisterPair()) { 970 return true; 971 } 972 return false; 973 } 974 975 private: 976 LiveInterval(ScopedArenaAllocator* allocator, 977 DataType::Type type, 978 HInstruction* defined_by = nullptr, 979 bool is_fixed = false, 980 int reg = kNoRegister, 981 bool is_temp = false, 982 bool is_high_interval = false) 983 : allocator_(allocator), 984 first_range_(nullptr), 985 last_range_(nullptr), 986 range_search_start_(nullptr), 987 first_safepoint_(nullptr), 988 last_safepoint_(nullptr), 989 uses_(), 990 env_uses_(), 991 type_(type), 992 next_sibling_(nullptr), 993 parent_(this), 994 register_(reg), 995 spill_slot_(kNoSpillSlot), 996 is_fixed_(is_fixed), 997 is_temp_(is_temp), 998 is_high_interval_(is_high_interval), 999 high_or_low_interval_(nullptr), 1000 defined_by_(defined_by) {} 1001 1002 // Searches for a LiveRange that either covers the given position or is the 1003 // first next LiveRange. Returns null if no such LiveRange exists. Ranges 1004 // known to end before `position` can be skipped with `search_start`. 1005 LiveRange* FindRangeAtOrAfter(size_t position, LiveRange* search_start) const { 1006 if (kIsDebugBuild) { 1007 if (search_start != first_range_) { 1008 // If we are not searching the entire list of ranges, make sure we do 1009 // not skip the range we are searching for. 1010 if (search_start == nullptr) { 1011 DCHECK(IsDeadAt(position)); 1012 } else if (search_start->GetStart() > position) { 1013 DCHECK_EQ(search_start, FindRangeAtOrAfter(position, first_range_)); 1014 } 1015 } 1016 } 1017 1018 LiveRange* range; 1019 for (range = search_start; 1020 range != nullptr && range->GetEnd() <= position; 1021 range = range->GetNext()) { 1022 continue; 1023 } 1024 return range; 1025 } 1026 1027 bool IsDefiningPosition(size_t position) const { 1028 return IsParent() && (position == GetStart()); 1029 } 1030 1031 bool HasSynthesizeUseAt(size_t position) const { 1032 for (const UsePosition& use : GetUses()) { 1033 size_t use_position = use.GetPosition(); 1034 if ((use_position == position) && use.IsSynthesized()) { 1035 return true; 1036 } 1037 if (use_position > position) break; 1038 } 1039 return false; 1040 } 1041 1042 void AddBackEdgeUses(const HBasicBlock& block_at_use) { 1043 DCHECK(block_at_use.IsInLoop()); 1044 if (block_at_use.GetGraph()->HasIrreducibleLoops()) { 1045 // Linear order may not be well formed when irreducible loops are present, 1046 // i.e. loop blocks may not be adjacent and a back edge may not be last, 1047 // which violates assumptions made in this method. 1048 return; 1049 } 1050 1051 // Add synthesized uses at the back edge of loops to help the register allocator. 1052 // Note that this method is called in decreasing liveness order, to faciliate adding 1053 // uses at the head of the `uses_` list. Because below 1054 // we iterate from inner-most to outer-most, which is in increasing liveness order, 1055 // we need to add subsequent entries after the last inserted entry. 1056 const UsePositionList::iterator old_begin = uses_.begin(); 1057 UsePositionList::iterator insert_pos = uses_.before_begin(); 1058 for (HLoopInformationOutwardIterator it(block_at_use); 1059 !it.Done(); 1060 it.Advance()) { 1061 HLoopInformation* current = it.Current(); 1062 if (GetDefinedBy()->GetLifetimePosition() >= current->GetHeader()->GetLifetimeStart()) { 1063 // This interval is defined in the loop. We can stop going outward. 1064 break; 1065 } 1066 1067 // We're only adding a synthesized use at the last back edge. Adding synthesized uses on 1068 // all back edges is not necessary: anything used in the loop will have its use at the 1069 // last back edge. If we want branches in a loop to have better register allocation than 1070 // another branch, then it is the linear order we should change. 1071 size_t back_edge_use_position = current->GetLifetimeEnd(); 1072 if ((old_begin != uses_.end()) && (old_begin->GetPosition() <= back_edge_use_position)) { 1073 // There was a use already seen in this loop. Therefore the previous call to `AddUse` 1074 // already inserted the backedge use. We can stop going outward. 1075 DCHECK(HasSynthesizeUseAt(back_edge_use_position)); 1076 break; 1077 } 1078 1079 DCHECK(insert_pos != uses_.before_begin() 1080 ? back_edge_use_position > insert_pos->GetPosition() 1081 : current == block_at_use.GetLoopInformation()) 1082 << std::distance(uses_.before_begin(), insert_pos); 1083 1084 UsePosition* new_use = new (allocator_) UsePosition(back_edge_use_position); 1085 insert_pos = uses_.insert_after(insert_pos, *new_use); 1086 } 1087 } 1088 1089 ScopedArenaAllocator* const allocator_; 1090 1091 // Ranges of this interval. We need a quick access to the last range to test 1092 // for liveness (see `IsDeadAt`). 1093 LiveRange* first_range_; 1094 LiveRange* last_range_; 1095 1096 // The first range at or after the current position of a linear scan. It is 1097 // used to optimize range-searching queries. 1098 LiveRange* range_search_start_; 1099 1100 // Safepoints where this interval is live. 1101 SafepointPosition* first_safepoint_; 1102 SafepointPosition* last_safepoint_; 1103 1104 // Uses of this interval. Only the parent interval keeps these lists. 1105 UsePositionList uses_; 1106 EnvUsePositionList env_uses_; 1107 1108 // The instruction type this interval corresponds to. 1109 const DataType::Type type_; 1110 1111 // Live interval that is the result of a split. 1112 LiveInterval* next_sibling_; 1113 1114 // The first interval from which split intervals come from. 1115 LiveInterval* parent_; 1116 1117 // The register allocated to this interval. 1118 int register_; 1119 1120 // The spill slot allocated to this interval. 1121 int spill_slot_; 1122 1123 // Whether the interval is for a fixed register. 1124 const bool is_fixed_; 1125 1126 // Whether the interval is for a temporary. 1127 const bool is_temp_; 1128 1129 // Whether this interval is a synthesized interval for register pair. 1130 const bool is_high_interval_; 1131 1132 // If this interval needs a register pair, the high or low equivalent. 1133 // `is_high_interval_` tells whether this holds the low or the high. 1134 LiveInterval* high_or_low_interval_; 1135 1136 // The instruction represented by this interval. 1137 HInstruction* const defined_by_; 1138 1139 static constexpr int kNoRegister = -1; 1140 static constexpr int kNoSpillSlot = -1; 1141 1142 ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive); 1143 1144 DISALLOW_COPY_AND_ASSIGN(LiveInterval); 1145 }; 1146 1147 /** 1148 * Analysis that computes the liveness of instructions: 1149 * 1150 * (a) Non-environment uses of an instruction always make 1151 * the instruction live. 1152 * (b) Environment uses of an instruction whose type is 1153 * object (that is, non-primitive), make the instruction live. 1154 * This is due to having to keep alive objects that have 1155 * finalizers deleting native objects. 1156 * (c) When the graph has the debuggable property, environment uses 1157 * of an instruction that has a primitive type make the instruction live. 1158 * If the graph does not have the debuggable property, the environment 1159 * use has no effect, and may get a 'none' value after register allocation. 1160 * 1161 * (b) and (c) are implemented through SsaLivenessAnalysis::ShouldBeLiveForEnvironment. 1162 */ 1163 class SsaLivenessAnalysis : public ValueObject { 1164 public: 1165 SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen, ScopedArenaAllocator* allocator) 1166 : graph_(graph), 1167 codegen_(codegen), 1168 allocator_(allocator), 1169 block_infos_(graph->GetBlocks().size(), 1170 nullptr, 1171 allocator_->Adapter(kArenaAllocSsaLiveness)), 1172 instructions_from_ssa_index_(allocator_->Adapter(kArenaAllocSsaLiveness)), 1173 instructions_from_lifetime_position_(allocator_->Adapter(kArenaAllocSsaLiveness)), 1174 number_of_ssa_values_(0) { 1175 } 1176 1177 void Analyze(); 1178 1179 BitVector* GetLiveInSet(const HBasicBlock& block) const { 1180 return &block_infos_[block.GetBlockId()]->live_in_; 1181 } 1182 1183 BitVector* GetLiveOutSet(const HBasicBlock& block) const { 1184 return &block_infos_[block.GetBlockId()]->live_out_; 1185 } 1186 1187 BitVector* GetKillSet(const HBasicBlock& block) const { 1188 return &block_infos_[block.GetBlockId()]->kill_; 1189 } 1190 1191 HInstruction* GetInstructionFromSsaIndex(size_t index) const { 1192 return instructions_from_ssa_index_[index]; 1193 } 1194 1195 HInstruction* GetInstructionFromPosition(size_t index) const { 1196 return instructions_from_lifetime_position_[index]; 1197 } 1198 1199 HBasicBlock* GetBlockFromPosition(size_t index) const { 1200 HInstruction* instruction = GetInstructionFromPosition(index); 1201 if (instruction == nullptr) { 1202 // If we are at a block boundary, get the block following. 1203 instruction = GetInstructionFromPosition(index + 1); 1204 } 1205 return instruction->GetBlock(); 1206 } 1207 1208 bool IsAtBlockBoundary(size_t index) const { 1209 return GetInstructionFromPosition(index) == nullptr; 1210 } 1211 1212 HInstruction* GetTempUser(LiveInterval* temp) const { 1213 // A temporary shares the same lifetime start as the instruction that requires it. 1214 DCHECK(temp->IsTemp()); 1215 HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2); 1216 DCHECK_EQ(user, temp->GetUses().front().GetUser()); 1217 return user; 1218 } 1219 1220 size_t GetTempIndex(LiveInterval* temp) const { 1221 // We use the input index to store the index of the temporary in the user's temporary list. 1222 DCHECK(temp->IsTemp()); 1223 return temp->GetUses().front().GetInputIndex(); 1224 } 1225 1226 size_t GetMaxLifetimePosition() const { 1227 return instructions_from_lifetime_position_.size() * 2 - 1; 1228 } 1229 1230 size_t GetNumberOfSsaValues() const { 1231 return number_of_ssa_values_; 1232 } 1233 1234 static constexpr const char* kLivenessPassName = "liveness"; 1235 1236 private: 1237 // Give an SSA number to each instruction that defines a value used by another instruction, 1238 // and setup the lifetime information of each instruction and block. 1239 void NumberInstructions(); 1240 1241 // Compute live ranges of instructions, as well as live_in, live_out and kill sets. 1242 void ComputeLiveness(); 1243 1244 // Compute the live ranges of instructions, as well as the initial live_in, live_out and 1245 // kill sets, that do not take into account backward branches. 1246 void ComputeLiveRanges(); 1247 1248 // After computing the initial sets, this method does a fixed point 1249 // calculation over the live_in and live_out set to take into account 1250 // backwards branches. 1251 void ComputeLiveInAndLiveOutSets(); 1252 1253 // Update the live_in set of the block and returns whether it has changed. 1254 bool UpdateLiveIn(const HBasicBlock& block); 1255 1256 // Update the live_out set of the block and returns whether it has changed. 1257 bool UpdateLiveOut(const HBasicBlock& block); 1258 1259 // Returns whether `instruction` in an HEnvironment held by `env_holder` 1260 // should be kept live by the HEnvironment. 1261 static bool ShouldBeLiveForEnvironment(HInstruction* env_holder, HInstruction* instruction) { 1262 if (instruction == nullptr) return false; 1263 // A value that's not live in compiled code may still be needed in interpreter, 1264 // due to code motion, etc. 1265 if (env_holder->IsDeoptimize()) return true; 1266 // A value live at a throwing instruction in a try block may be copied by 1267 // the exception handler to its location at the top of the catch block. 1268 if (env_holder->CanThrowIntoCatchBlock()) return true; 1269 if (instruction->GetBlock()->GetGraph()->IsDebuggable()) return true; 1270 return instruction->GetType() == DataType::Type::kReference; 1271 } 1272 1273 void CheckNoLiveInIrreducibleLoop(const HBasicBlock& block) const { 1274 if (!block.IsLoopHeader() || !block.GetLoopInformation()->IsIrreducible()) { 1275 return; 1276 } 1277 BitVector* live_in = GetLiveInSet(block); 1278 // To satisfy our liveness algorithm, we need to ensure loop headers of 1279 // irreducible loops do not have any live-in instructions, except constants 1280 // and the current method, which can be trivially re-materialized. 1281 for (uint32_t idx : live_in->Indexes()) { 1282 HInstruction* instruction = GetInstructionFromSsaIndex(idx); 1283 DCHECK(instruction->GetBlock()->IsEntryBlock()) << instruction->DebugName(); 1284 DCHECK(!instruction->IsParameterValue()); 1285 DCHECK(instruction->IsCurrentMethod() || instruction->IsConstant()) 1286 << instruction->DebugName(); 1287 } 1288 } 1289 1290 HGraph* const graph_; 1291 CodeGenerator* const codegen_; 1292 1293 // Use a local ScopedArenaAllocator for allocating memory. 1294 // This allocator must remain alive while doing register allocation. 1295 ScopedArenaAllocator* const allocator_; 1296 1297 ScopedArenaVector<BlockInfo*> block_infos_; 1298 1299 // Temporary array used when computing live_in, live_out, and kill sets. 1300 ScopedArenaVector<HInstruction*> instructions_from_ssa_index_; 1301 1302 // Temporary array used when inserting moves in the graph. 1303 ScopedArenaVector<HInstruction*> instructions_from_lifetime_position_; 1304 size_t number_of_ssa_values_; 1305 1306 ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive); 1307 ART_FRIEND_TEST(RegisterAllocatorTest, FreeUntil); 1308 1309 DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis); 1310 }; 1311 1312 } // namespace art 1313 1314 #endif // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_ 1315