1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_ 18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_ 19 20 #include "nodes.h" 21 22 namespace art { 23 24 class CodeGenerator; 25 26 class BlockInfo : public ArenaObject { 27 public: 28 BlockInfo(ArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values) 29 : block_(block), 30 live_in_(allocator, number_of_ssa_values, false), 31 live_out_(allocator, number_of_ssa_values, false), 32 kill_(allocator, number_of_ssa_values, false) { 33 live_in_.ClearAllBits(); 34 live_out_.ClearAllBits(); 35 kill_.ClearAllBits(); 36 } 37 38 private: 39 const HBasicBlock& block_; 40 ArenaBitVector live_in_; 41 ArenaBitVector live_out_; 42 ArenaBitVector kill_; 43 44 friend class SsaLivenessAnalysis; 45 46 DISALLOW_COPY_AND_ASSIGN(BlockInfo); 47 }; 48 49 /** 50 * A live range contains the start and end of a range where an instruction 51 * is live. 52 */ 53 class LiveRange : public ArenaObject { 54 public: 55 LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) { 56 DCHECK_LT(start, end); 57 DCHECK(next_ == nullptr || next_->GetStart() > GetEnd()); 58 } 59 60 size_t GetStart() const { return start_; } 61 size_t GetEnd() const { return end_; } 62 LiveRange* GetNext() const { return next_; } 63 64 bool IntersectsWith(const LiveRange& other) { 65 return (start_ >= other.start_ && start_ < other.end_) 66 || (other.start_ >= start_ && other.start_ < end_); 67 } 68 69 bool IsBefore(const LiveRange& other) { 70 return end_ <= other.start_; 71 } 72 73 void Dump(std::ostream& stream) { 74 stream << "[" << start_ << ", " << end_ << ")"; 75 } 76 77 private: 78 size_t start_; 79 size_t end_; 80 LiveRange* next_; 81 82 friend class LiveInterval; 83 84 DISALLOW_COPY_AND_ASSIGN(LiveRange); 85 }; 86 87 /** 88 * A use position represents a live interval use at a given position. 89 */ 90 class UsePosition : public ArenaObject { 91 public: 92 UsePosition(HInstruction* user, 93 size_t input_index, 94 bool is_environment, 95 size_t position, 96 UsePosition* next) 97 : user_(user), 98 input_index_(input_index), 99 is_environment_(is_environment), 100 position_(position), 101 next_(next) { 102 DCHECK(user->AsPhi() != nullptr || GetPosition() == user->GetLifetimePosition() + 1); 103 DCHECK(next_ == nullptr || next->GetPosition() >= GetPosition()); 104 } 105 106 size_t GetPosition() const { return position_; } 107 108 UsePosition* GetNext() const { return next_; } 109 110 HInstruction* GetUser() const { return user_; } 111 112 bool GetIsEnvironment() const { return is_environment_; } 113 114 size_t GetInputIndex() const { return input_index_; } 115 116 void Dump(std::ostream& stream) const { 117 stream << position_; 118 } 119 120 private: 121 HInstruction* const user_; 122 const size_t input_index_; 123 const bool is_environment_; 124 const size_t position_; 125 UsePosition* const next_; 126 127 DISALLOW_COPY_AND_ASSIGN(UsePosition); 128 }; 129 130 /** 131 * An interval is a list of disjoint live ranges where an instruction is live. 132 * Each instruction that has uses gets an interval. 133 */ 134 class LiveInterval : public ArenaObject { 135 public: 136 LiveInterval(ArenaAllocator* allocator, Primitive::Type type, HInstruction* defined_by = nullptr) 137 : allocator_(allocator), 138 first_range_(nullptr), 139 last_range_(nullptr), 140 first_use_(nullptr), 141 type_(type), 142 next_sibling_(nullptr), 143 parent_(this), 144 register_(kNoRegister), 145 spill_slot_(kNoSpillSlot), 146 is_fixed_(false), 147 defined_by_(defined_by) {} 148 149 static LiveInterval* MakeFixedInterval(ArenaAllocator* allocator, int reg, Primitive::Type type) { 150 LiveInterval* interval = new (allocator) LiveInterval(allocator, type); 151 interval->SetRegister(reg); 152 interval->is_fixed_ = true; 153 return interval; 154 } 155 156 bool IsFixed() const { return is_fixed_; } 157 158 void AddUse(HInstruction* instruction, size_t input_index, bool is_environment) { 159 // Set the use within the instruction. 160 // TODO: Use the instruction's location to know whether the instruction can die 161 // at entry, or needs to say alive within the user. 162 size_t position = instruction->GetLifetimePosition() + 1; 163 size_t start_block_position = instruction->GetBlock()->GetLifetimeStart(); 164 size_t end_block_position = instruction->GetBlock()->GetLifetimeEnd(); 165 if (first_range_ == nullptr) { 166 // First time we see a use of that interval. 167 first_range_ = last_range_ = new (allocator_) LiveRange(start_block_position, position, nullptr); 168 } else if (first_range_->GetStart() == start_block_position) { 169 // There is a use later in the same block. 170 DCHECK_LE(position, first_range_->GetEnd()); 171 } else if (first_range_->GetStart() == end_block_position) { 172 // Last use is in the following block. 173 first_range_->start_ = start_block_position; 174 } else { 175 DCHECK(first_range_->GetStart() > position); 176 // There is a hole in the interval. Create a new range. 177 first_range_ = new (allocator_) LiveRange(start_block_position, position, first_range_); 178 } 179 first_use_ = new (allocator_) UsePosition( 180 instruction, input_index, is_environment, position, first_use_); 181 } 182 183 void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) { 184 DCHECK(instruction->AsPhi() != nullptr); 185 first_use_ = new (allocator_) UsePosition( 186 instruction, input_index, false, block->GetLifetimeEnd(), first_use_); 187 } 188 189 void AddRange(size_t start, size_t end) { 190 if (first_range_ == nullptr) { 191 first_range_ = last_range_ = new (allocator_) LiveRange(start, end, first_range_); 192 } else if (first_range_->GetStart() == end) { 193 // There is a use in the following block. 194 first_range_->start_ = start; 195 } else { 196 DCHECK(first_range_->GetStart() > end); 197 // There is a hole in the interval. Create a new range. 198 first_range_ = new (allocator_) LiveRange(start, end, first_range_); 199 } 200 } 201 202 void AddLoopRange(size_t start, size_t end) { 203 DCHECK(first_range_ != nullptr); 204 while (first_range_ != nullptr && first_range_->GetEnd() < end) { 205 DCHECK_LE(start, first_range_->GetStart()); 206 first_range_ = first_range_->GetNext(); 207 } 208 if (first_range_ == nullptr) { 209 // Uses are only in the loop. 210 first_range_ = last_range_ = new (allocator_) LiveRange(start, end, nullptr); 211 } else { 212 // There are uses after the loop. 213 first_range_->start_ = start; 214 } 215 } 216 217 bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; } 218 void SetSpillSlot(int slot) { spill_slot_ = slot; } 219 int GetSpillSlot() const { return spill_slot_; } 220 221 void SetFrom(size_t from) { 222 if (first_range_ != nullptr) { 223 first_range_->start_ = from; 224 } else { 225 // Instruction without uses. 226 DCHECK(!defined_by_->HasUses()); 227 DCHECK(from == defined_by_->GetLifetimePosition()); 228 first_range_ = last_range_ = new (allocator_) LiveRange(from, from + 2, nullptr); 229 } 230 } 231 232 LiveInterval* GetParent() const { return parent_; } 233 234 LiveRange* GetFirstRange() const { return first_range_; } 235 236 int GetRegister() const { return register_; } 237 void SetRegister(int reg) { register_ = reg; } 238 void ClearRegister() { register_ = kNoRegister; } 239 bool HasRegister() const { return register_ != kNoRegister; } 240 241 bool IsDeadAt(size_t position) const { 242 return last_range_->GetEnd() <= position; 243 } 244 245 bool Covers(size_t position) const { 246 LiveRange* current = first_range_; 247 while (current != nullptr) { 248 if (position >= current->GetStart() && position < current->GetEnd()) { 249 return true; 250 } 251 current = current->GetNext(); 252 } 253 return false; 254 } 255 256 /** 257 * Returns the first intersection of this interval with `other`. 258 */ 259 size_t FirstIntersectionWith(LiveInterval* other) const { 260 // Advance both intervals and find the first matching range start in 261 // this interval. 262 LiveRange* my_range = first_range_; 263 LiveRange* other_range = other->first_range_; 264 do { 265 if (my_range->IntersectsWith(*other_range)) { 266 return std::max(my_range->GetStart(), other_range->GetStart()); 267 } else if (my_range->IsBefore(*other_range)) { 268 my_range = my_range->GetNext(); 269 if (my_range == nullptr) { 270 return kNoLifetime; 271 } 272 } else { 273 DCHECK(other_range->IsBefore(*my_range)); 274 other_range = other_range->GetNext(); 275 if (other_range == nullptr) { 276 return kNoLifetime; 277 } 278 } 279 } while (true); 280 } 281 282 size_t GetStart() const { 283 return first_range_->GetStart(); 284 } 285 286 size_t GetEnd() const { 287 return last_range_->GetEnd(); 288 } 289 290 size_t FirstRegisterUseAfter(size_t position) const { 291 if (position == GetStart() && defined_by_ != nullptr) { 292 LocationSummary* locations = defined_by_->GetLocations(); 293 Location location = locations->Out(); 294 // This interval is the first interval of the instruction. If the output 295 // of the instruction requires a register, we return the position of that instruction 296 // as the first register use. 297 if (location.IsUnallocated()) { 298 if ((location.GetPolicy() == Location::kRequiresRegister) 299 || (location.GetPolicy() == Location::kSameAsFirstInput 300 && locations->InAt(0).GetPolicy() == Location::kRequiresRegister)) { 301 return position; 302 } 303 } 304 } 305 306 UsePosition* use = first_use_; 307 size_t end = GetEnd(); 308 while (use != nullptr && use->GetPosition() <= end) { 309 size_t use_position = use->GetPosition(); 310 if (use_position >= position && !use->GetIsEnvironment()) { 311 Location location = use->GetUser()->GetLocations()->InAt(use->GetInputIndex()); 312 if (location.IsUnallocated() && location.GetPolicy() == Location::kRequiresRegister) { 313 return use_position; 314 } 315 } 316 use = use->GetNext(); 317 } 318 return kNoLifetime; 319 } 320 321 size_t FirstRegisterUse() const { 322 return FirstRegisterUseAfter(GetStart()); 323 } 324 325 UsePosition* GetFirstUse() const { 326 return first_use_; 327 } 328 329 Primitive::Type GetType() const { 330 return type_; 331 } 332 333 HInstruction* GetDefinedBy() const { 334 return defined_by_; 335 } 336 337 /** 338 * Split this interval at `position`. This interval is changed to: 339 * [start ... position). 340 * 341 * The new interval covers: 342 * [position ... end) 343 */ 344 LiveInterval* SplitAt(size_t position) { 345 DCHECK(!is_fixed_); 346 DCHECK_GT(position, GetStart()); 347 348 if (last_range_->GetEnd() <= position) { 349 // This range dies before `position`, no need to split. 350 return nullptr; 351 } 352 353 LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_); 354 new_interval->next_sibling_ = next_sibling_; 355 next_sibling_ = new_interval; 356 new_interval->parent_ = parent_; 357 358 new_interval->first_use_ = first_use_; 359 LiveRange* current = first_range_; 360 LiveRange* previous = nullptr; 361 // Iterate over the ranges, and either find a range that covers this position, or 362 // a two ranges in between this position (that is, the position is in a lifetime hole). 363 do { 364 if (position >= current->GetEnd()) { 365 // Move to next range. 366 previous = current; 367 current = current->next_; 368 } else if (position <= current->GetStart()) { 369 // If the previous range did not cover this position, we know position is in 370 // a lifetime hole. We can just break the first_range_ and last_range_ links 371 // and return the new interval. 372 DCHECK(previous != nullptr); 373 DCHECK(current != first_range_); 374 new_interval->last_range_ = last_range_; 375 last_range_ = previous; 376 previous->next_ = nullptr; 377 new_interval->first_range_ = current; 378 return new_interval; 379 } else { 380 // This range covers position. We create a new last_range_ for this interval 381 // that covers last_range_->Start() and position. We also shorten the current 382 // range and make it the first range of the new interval. 383 DCHECK(position < current->GetEnd() && position > current->GetStart()); 384 new_interval->last_range_ = last_range_; 385 last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr); 386 if (previous != nullptr) { 387 previous->next_ = last_range_; 388 } else { 389 first_range_ = last_range_; 390 } 391 new_interval->first_range_ = current; 392 current->start_ = position; 393 return new_interval; 394 } 395 } while (current != nullptr); 396 397 LOG(FATAL) << "Unreachable"; 398 return nullptr; 399 } 400 401 bool StartsBefore(LiveInterval* other) const { 402 return GetStart() <= other->GetStart(); 403 } 404 405 bool StartsAfter(LiveInterval* other) const { 406 return GetStart() >= other->GetStart(); 407 } 408 409 void Dump(std::ostream& stream) const { 410 stream << "ranges: { "; 411 LiveRange* current = first_range_; 412 do { 413 current->Dump(stream); 414 stream << " "; 415 } while ((current = current->GetNext()) != nullptr); 416 stream << "}, uses: { "; 417 UsePosition* use = first_use_; 418 if (use != nullptr) { 419 do { 420 use->Dump(stream); 421 stream << " "; 422 } while ((use = use->GetNext()) != nullptr); 423 } 424 stream << "}"; 425 } 426 427 LiveInterval* GetNextSibling() const { return next_sibling_; } 428 429 private: 430 ArenaAllocator* const allocator_; 431 432 // Ranges of this interval. We need a quick access to the last range to test 433 // for liveness (see `IsDeadAt`). 434 LiveRange* first_range_; 435 LiveRange* last_range_; 436 437 // Uses of this interval. Note that this linked list is shared amongst siblings. 438 UsePosition* first_use_; 439 440 // The instruction type this interval corresponds to. 441 const Primitive::Type type_; 442 443 // Live interval that is the result of a split. 444 LiveInterval* next_sibling_; 445 446 // The first interval from which split intervals come from. 447 LiveInterval* parent_; 448 449 // The register allocated to this interval. 450 int register_; 451 452 // The spill slot allocated to this interval. 453 int spill_slot_; 454 455 // Whether the interval is for a fixed register. 456 bool is_fixed_; 457 458 // The instruction represented by this interval. 459 HInstruction* const defined_by_; 460 461 static constexpr int kNoRegister = -1; 462 static constexpr int kNoSpillSlot = -1; 463 464 DISALLOW_COPY_AND_ASSIGN(LiveInterval); 465 }; 466 467 class SsaLivenessAnalysis : public ValueObject { 468 public: 469 SsaLivenessAnalysis(const HGraph& graph, CodeGenerator* codegen) 470 : graph_(graph), 471 codegen_(codegen), 472 linear_post_order_(graph.GetArena(), graph.GetBlocks().Size()), 473 block_infos_(graph.GetArena(), graph.GetBlocks().Size()), 474 instructions_from_ssa_index_(graph.GetArena(), 0), 475 instructions_from_lifetime_position_(graph.GetArena(), 0), 476 number_of_ssa_values_(0) { 477 block_infos_.SetSize(graph.GetBlocks().Size()); 478 } 479 480 void Analyze(); 481 482 BitVector* GetLiveInSet(const HBasicBlock& block) const { 483 return &block_infos_.Get(block.GetBlockId())->live_in_; 484 } 485 486 BitVector* GetLiveOutSet(const HBasicBlock& block) const { 487 return &block_infos_.Get(block.GetBlockId())->live_out_; 488 } 489 490 BitVector* GetKillSet(const HBasicBlock& block) const { 491 return &block_infos_.Get(block.GetBlockId())->kill_; 492 } 493 494 const GrowableArray<HBasicBlock*>& GetLinearPostOrder() const { 495 return linear_post_order_; 496 } 497 498 HInstruction* GetInstructionFromSsaIndex(size_t index) const { 499 return instructions_from_ssa_index_.Get(index); 500 } 501 502 HInstruction* GetInstructionFromPosition(size_t index) const { 503 return instructions_from_lifetime_position_.Get(index); 504 } 505 506 size_t GetMaxLifetimePosition() const { 507 return instructions_from_lifetime_position_.Size() * 2 - 1; 508 } 509 510 size_t GetNumberOfSsaValues() const { 511 return number_of_ssa_values_; 512 } 513 514 private: 515 // Linearize the graph so that: 516 // (1): a block is always after its dominator, 517 // (2): blocks of loops are contiguous. 518 // This creates a natural and efficient ordering when visualizing live ranges. 519 void LinearizeGraph(); 520 521 // Give an SSA number to each instruction that defines a value used by another instruction, 522 // and setup the lifetime information of each instruction and block. 523 void NumberInstructions(); 524 525 // Compute live ranges of instructions, as well as live_in, live_out and kill sets. 526 void ComputeLiveness(); 527 528 // Compute the live ranges of instructions, as well as the initial live_in, live_out and 529 // kill sets, that do not take into account backward branches. 530 void ComputeLiveRanges(); 531 532 // After computing the initial sets, this method does a fixed point 533 // calculation over the live_in and live_out set to take into account 534 // backwards branches. 535 void ComputeLiveInAndLiveOutSets(); 536 537 // Update the live_in set of the block and returns whether it has changed. 538 bool UpdateLiveIn(const HBasicBlock& block); 539 540 // Update the live_out set of the block and returns whether it has changed. 541 bool UpdateLiveOut(const HBasicBlock& block); 542 543 const HGraph& graph_; 544 CodeGenerator* const codegen_; 545 GrowableArray<HBasicBlock*> linear_post_order_; 546 GrowableArray<BlockInfo*> block_infos_; 547 548 // Temporary array used when computing live_in, live_out, and kill sets. 549 GrowableArray<HInstruction*> instructions_from_ssa_index_; 550 551 // Temporary array used when inserting moves in the graph. 552 GrowableArray<HInstruction*> instructions_from_lifetime_position_; 553 size_t number_of_ssa_values_; 554 555 DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis); 556 }; 557 558 class HLinearOrderIterator : public ValueObject { 559 public: 560 explicit HLinearOrderIterator(const SsaLivenessAnalysis& liveness) 561 : post_order_(liveness.GetLinearPostOrder()), index_(liveness.GetLinearPostOrder().Size()) {} 562 563 bool Done() const { return index_ == 0; } 564 HBasicBlock* Current() const { return post_order_.Get(index_ -1); } 565 void Advance() { --index_; DCHECK_GE(index_, 0U); } 566 567 private: 568 const GrowableArray<HBasicBlock*>& post_order_; 569 size_t index_; 570 571 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); 572 }; 573 574 class HLinearPostOrderIterator : public ValueObject { 575 public: 576 explicit HLinearPostOrderIterator(const SsaLivenessAnalysis& liveness) 577 : post_order_(liveness.GetLinearPostOrder()), index_(0) {} 578 579 bool Done() const { return index_ == post_order_.Size(); } 580 HBasicBlock* Current() const { return post_order_.Get(index_); } 581 void Advance() { ++index_; } 582 583 private: 584 const GrowableArray<HBasicBlock*>& post_order_; 585 size_t index_; 586 587 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); 588 }; 589 590 } // namespace art 591 592 #endif // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_ 593