1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_LITHIUM_ALLOCATOR_H_ 6 #define V8_LITHIUM_ALLOCATOR_H_ 7 8 #include "src/v8.h" 9 10 #include "src/allocation.h" 11 #include "src/lithium.h" 12 #include "src/zone.h" 13 14 namespace v8 { 15 namespace internal { 16 17 // Forward declarations. 18 class HBasicBlock; 19 class HGraph; 20 class HInstruction; 21 class HPhi; 22 class HTracer; 23 class HValue; 24 class BitVector; 25 class StringStream; 26 27 class LPlatformChunk; 28 class LOperand; 29 class LUnallocated; 30 class LGap; 31 class LParallelMove; 32 class LPointerMap; 33 34 35 // This class represents a single point of a LOperand's lifetime. 36 // For each lithium instruction there are exactly two lifetime positions: 37 // the beginning and the end of the instruction. Lifetime positions for 38 // different lithium instructions are disjoint. 39 class LifetimePosition { 40 public: 41 // Return the lifetime position that corresponds to the beginning of 42 // the instruction with the given index. 43 static LifetimePosition FromInstructionIndex(int index) { 44 return LifetimePosition(index * kStep); 45 } 46 47 // Returns a numeric representation of this lifetime position. 48 int Value() const { 49 return value_; 50 } 51 52 // Returns the index of the instruction to which this lifetime position 53 // corresponds. 54 int InstructionIndex() const { 55 ASSERT(IsValid()); 56 return value_ / kStep; 57 } 58 59 // Returns true if this lifetime position corresponds to the instruction 60 // start. 61 bool IsInstructionStart() const { 62 return (value_ & (kStep - 1)) == 0; 63 } 64 65 // Returns the lifetime position for the start of the instruction which 66 // corresponds to this lifetime position. 67 LifetimePosition InstructionStart() const { 68 ASSERT(IsValid()); 69 return LifetimePosition(value_ & ~(kStep - 1)); 70 } 71 72 // Returns the lifetime position for the end of the instruction which 73 // corresponds to this lifetime position. 74 LifetimePosition InstructionEnd() const { 75 ASSERT(IsValid()); 76 return LifetimePosition(InstructionStart().Value() + kStep/2); 77 } 78 79 // Returns the lifetime position for the beginning of the next instruction. 80 LifetimePosition NextInstruction() const { 81 ASSERT(IsValid()); 82 return LifetimePosition(InstructionStart().Value() + kStep); 83 } 84 85 // Returns the lifetime position for the beginning of the previous 86 // instruction. 87 LifetimePosition PrevInstruction() const { 88 ASSERT(IsValid()); 89 ASSERT(value_ > 1); 90 return LifetimePosition(InstructionStart().Value() - kStep); 91 } 92 93 // Constructs the lifetime position which does not correspond to any 94 // instruction. 95 LifetimePosition() : value_(-1) {} 96 97 // Returns true if this lifetime positions corrensponds to some 98 // instruction. 99 bool IsValid() const { return value_ != -1; } 100 101 static inline LifetimePosition Invalid() { return LifetimePosition(); } 102 103 static inline LifetimePosition MaxPosition() { 104 // We have to use this kind of getter instead of static member due to 105 // crash bug in GDB. 106 return LifetimePosition(kMaxInt); 107 } 108 109 private: 110 static const int kStep = 2; 111 112 // Code relies on kStep being a power of two. 113 STATIC_ASSERT(IS_POWER_OF_TWO(kStep)); 114 115 explicit LifetimePosition(int value) : value_(value) { } 116 117 int value_; 118 }; 119 120 121 enum RegisterKind { 122 UNALLOCATED_REGISTERS, 123 GENERAL_REGISTERS, 124 DOUBLE_REGISTERS 125 }; 126 127 128 // A register-allocator view of a Lithium instruction. It contains the id of 129 // the output operand and a list of input operand uses. 130 131 class LInstruction; 132 class LEnvironment; 133 134 // Iterator for non-null temp operands. 135 class TempIterator BASE_EMBEDDED { 136 public: 137 inline explicit TempIterator(LInstruction* instr); 138 inline bool Done(); 139 inline LOperand* Current(); 140 inline void Advance(); 141 142 private: 143 inline void SkipUninteresting(); 144 LInstruction* instr_; 145 int limit_; 146 int current_; 147 }; 148 149 150 // Iterator for non-constant input operands. 151 class InputIterator BASE_EMBEDDED { 152 public: 153 inline explicit InputIterator(LInstruction* instr); 154 inline bool Done(); 155 inline LOperand* Current(); 156 inline void Advance(); 157 158 private: 159 inline void SkipUninteresting(); 160 LInstruction* instr_; 161 int limit_; 162 int current_; 163 }; 164 165 166 class UseIterator BASE_EMBEDDED { 167 public: 168 inline explicit UseIterator(LInstruction* instr); 169 inline bool Done(); 170 inline LOperand* Current(); 171 inline void Advance(); 172 173 private: 174 InputIterator input_iterator_; 175 DeepIterator env_iterator_; 176 }; 177 178 179 // Representation of the non-empty interval [start,end[. 180 class UseInterval: public ZoneObject { 181 public: 182 UseInterval(LifetimePosition start, LifetimePosition end) 183 : start_(start), end_(end), next_(NULL) { 184 ASSERT(start.Value() < end.Value()); 185 } 186 187 LifetimePosition start() const { return start_; } 188 LifetimePosition end() const { return end_; } 189 UseInterval* next() const { return next_; } 190 191 // Split this interval at the given position without effecting the 192 // live range that owns it. The interval must contain the position. 193 void SplitAt(LifetimePosition pos, Zone* zone); 194 195 // If this interval intersects with other return smallest position 196 // that belongs to both of them. 197 LifetimePosition Intersect(const UseInterval* other) const { 198 if (other->start().Value() < start_.Value()) return other->Intersect(this); 199 if (other->start().Value() < end_.Value()) return other->start(); 200 return LifetimePosition::Invalid(); 201 } 202 203 bool Contains(LifetimePosition point) const { 204 return start_.Value() <= point.Value() && point.Value() < end_.Value(); 205 } 206 207 private: 208 void set_start(LifetimePosition start) { start_ = start; } 209 void set_next(UseInterval* next) { next_ = next; } 210 211 LifetimePosition start_; 212 LifetimePosition end_; 213 UseInterval* next_; 214 215 friend class LiveRange; // Assigns to start_. 216 }; 217 218 // Representation of a use position. 219 class UsePosition: public ZoneObject { 220 public: 221 UsePosition(LifetimePosition pos, LOperand* operand, LOperand* hint); 222 223 LOperand* operand() const { return operand_; } 224 bool HasOperand() const { return operand_ != NULL; } 225 226 LOperand* hint() const { return hint_; } 227 bool HasHint() const; 228 bool RequiresRegister() const; 229 bool RegisterIsBeneficial() const; 230 231 LifetimePosition pos() const { return pos_; } 232 UsePosition* next() const { return next_; } 233 234 private: 235 void set_next(UsePosition* next) { next_ = next; } 236 237 LOperand* const operand_; 238 LOperand* const hint_; 239 LifetimePosition const pos_; 240 UsePosition* next_; 241 bool requires_reg_; 242 bool register_beneficial_; 243 244 friend class LiveRange; 245 }; 246 247 // Representation of SSA values' live ranges as a collection of (continuous) 248 // intervals over the instruction ordering. 249 class LiveRange: public ZoneObject { 250 public: 251 static const int kInvalidAssignment = 0x7fffffff; 252 253 LiveRange(int id, Zone* zone); 254 255 UseInterval* first_interval() const { return first_interval_; } 256 UsePosition* first_pos() const { return first_pos_; } 257 LiveRange* parent() const { return parent_; } 258 LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; } 259 LiveRange* next() const { return next_; } 260 bool IsChild() const { return parent() != NULL; } 261 int id() const { return id_; } 262 bool IsFixed() const { return id_ < 0; } 263 bool IsEmpty() const { return first_interval() == NULL; } 264 LOperand* CreateAssignedOperand(Zone* zone); 265 int assigned_register() const { return assigned_register_; } 266 int spill_start_index() const { return spill_start_index_; } 267 void set_assigned_register(int reg, Zone* zone); 268 void MakeSpilled(Zone* zone); 269 270 // Returns use position in this live range that follows both start 271 // and last processed use position. 272 // Modifies internal state of live range! 273 UsePosition* NextUsePosition(LifetimePosition start); 274 275 // Returns use position for which register is required in this live 276 // range and which follows both start and last processed use position 277 // Modifies internal state of live range! 278 UsePosition* NextRegisterPosition(LifetimePosition start); 279 280 // Returns use position for which register is beneficial in this live 281 // range and which follows both start and last processed use position 282 // Modifies internal state of live range! 283 UsePosition* NextUsePositionRegisterIsBeneficial(LifetimePosition start); 284 285 // Returns use position for which register is beneficial in this live 286 // range and which precedes start. 287 UsePosition* PreviousUsePositionRegisterIsBeneficial(LifetimePosition start); 288 289 // Can this live range be spilled at this position. 290 bool CanBeSpilled(LifetimePosition pos); 291 292 // Split this live range at the given position which must follow the start of 293 // the range. 294 // All uses following the given position will be moved from this 295 // live range to the result live range. 296 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); 297 298 RegisterKind Kind() const { return kind_; } 299 bool HasRegisterAssigned() const { 300 return assigned_register_ != kInvalidAssignment; 301 } 302 bool IsSpilled() const { return spilled_; } 303 304 LOperand* current_hint_operand() const { 305 ASSERT(current_hint_operand_ == FirstHint()); 306 return current_hint_operand_; 307 } 308 LOperand* FirstHint() const { 309 UsePosition* pos = first_pos_; 310 while (pos != NULL && !pos->HasHint()) pos = pos->next(); 311 if (pos != NULL) return pos->hint(); 312 return NULL; 313 } 314 315 LifetimePosition Start() const { 316 ASSERT(!IsEmpty()); 317 return first_interval()->start(); 318 } 319 320 LifetimePosition End() const { 321 ASSERT(!IsEmpty()); 322 return last_interval_->end(); 323 } 324 325 bool HasAllocatedSpillOperand() const; 326 LOperand* GetSpillOperand() const { return spill_operand_; } 327 void SetSpillOperand(LOperand* operand); 328 329 void SetSpillStartIndex(int start) { 330 spill_start_index_ = Min(start, spill_start_index_); 331 } 332 333 bool ShouldBeAllocatedBefore(const LiveRange* other) const; 334 bool CanCover(LifetimePosition position) const; 335 bool Covers(LifetimePosition position); 336 LifetimePosition FirstIntersection(LiveRange* other); 337 338 // Add a new interval or a new use position to this live range. 339 void EnsureInterval(LifetimePosition start, 340 LifetimePosition end, 341 Zone* zone); 342 void AddUseInterval(LifetimePosition start, 343 LifetimePosition end, 344 Zone* zone); 345 void AddUsePosition(LifetimePosition pos, 346 LOperand* operand, 347 LOperand* hint, 348 Zone* zone); 349 350 // Shorten the most recently added interval by setting a new start. 351 void ShortenTo(LifetimePosition start); 352 353 #ifdef DEBUG 354 // True if target overlaps an existing interval. 355 bool HasOverlap(UseInterval* target) const; 356 void Verify() const; 357 #endif 358 359 private: 360 void ConvertOperands(Zone* zone); 361 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; 362 void AdvanceLastProcessedMarker(UseInterval* to_start_of, 363 LifetimePosition but_not_past) const; 364 365 int id_; 366 bool spilled_; 367 RegisterKind kind_; 368 int assigned_register_; 369 UseInterval* last_interval_; 370 UseInterval* first_interval_; 371 UsePosition* first_pos_; 372 LiveRange* parent_; 373 LiveRange* next_; 374 // This is used as a cache, it doesn't affect correctness. 375 mutable UseInterval* current_interval_; 376 UsePosition* last_processed_use_; 377 // This is used as a cache, it's invalid outside of BuildLiveRanges. 378 LOperand* current_hint_operand_; 379 LOperand* spill_operand_; 380 int spill_start_index_; 381 382 friend class LAllocator; // Assigns to kind_. 383 }; 384 385 386 class LAllocator BASE_EMBEDDED { 387 public: 388 LAllocator(int first_virtual_register, HGraph* graph); 389 390 static void TraceAlloc(const char* msg, ...); 391 392 // Checks whether the value of a given virtual register is tagged. 393 bool HasTaggedValue(int virtual_register) const; 394 395 // Returns the register kind required by the given virtual register. 396 RegisterKind RequiredRegisterKind(int virtual_register) const; 397 398 bool Allocate(LChunk* chunk); 399 400 const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; } 401 const Vector<LiveRange*>* fixed_live_ranges() const { 402 return &fixed_live_ranges_; 403 } 404 const Vector<LiveRange*>* fixed_double_live_ranges() const { 405 return &fixed_double_live_ranges_; 406 } 407 408 LPlatformChunk* chunk() const { return chunk_; } 409 HGraph* graph() const { return graph_; } 410 Isolate* isolate() const { return graph_->isolate(); } 411 Zone* zone() { return &zone_; } 412 413 int GetVirtualRegister() { 414 if (next_virtual_register_ >= LUnallocated::kMaxVirtualRegisters) { 415 allocation_ok_ = false; 416 // Maintain the invariant that we return something below the maximum. 417 return 0; 418 } 419 return next_virtual_register_++; 420 } 421 422 bool AllocationOk() { return allocation_ok_; } 423 424 void MarkAsOsrEntry() { 425 // There can be only one. 426 ASSERT(!has_osr_entry_); 427 // Simply set a flag to find and process instruction later. 428 has_osr_entry_ = true; 429 } 430 431 #ifdef DEBUG 432 void Verify() const; 433 #endif 434 435 BitVector* assigned_registers() { 436 return assigned_registers_; 437 } 438 BitVector* assigned_double_registers() { 439 return assigned_double_registers_; 440 } 441 442 private: 443 void MeetRegisterConstraints(); 444 void ResolvePhis(); 445 void BuildLiveRanges(); 446 void AllocateGeneralRegisters(); 447 void AllocateDoubleRegisters(); 448 void ConnectRanges(); 449 void ResolveControlFlow(); 450 void PopulatePointerMaps(); 451 void AllocateRegisters(); 452 bool CanEagerlyResolveControlFlow(HBasicBlock* block) const; 453 inline bool SafePointsAreInOrder() const; 454 455 // Liveness analysis support. 456 void InitializeLivenessAnalysis(); 457 BitVector* ComputeLiveOut(HBasicBlock* block); 458 void AddInitialIntervals(HBasicBlock* block, BitVector* live_out); 459 void ProcessInstructions(HBasicBlock* block, BitVector* live); 460 void MeetRegisterConstraints(HBasicBlock* block); 461 void MeetConstraintsBetween(LInstruction* first, 462 LInstruction* second, 463 int gap_index); 464 void ResolvePhis(HBasicBlock* block); 465 466 // Helper methods for building intervals. 467 LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged); 468 LiveRange* LiveRangeFor(LOperand* operand); 469 void Define(LifetimePosition position, LOperand* operand, LOperand* hint); 470 void Use(LifetimePosition block_start, 471 LifetimePosition position, 472 LOperand* operand, 473 LOperand* hint); 474 void AddConstraintsGapMove(int index, LOperand* from, LOperand* to); 475 476 // Helper methods for updating the life range lists. 477 void AddToActive(LiveRange* range); 478 void AddToInactive(LiveRange* range); 479 void AddToUnhandledSorted(LiveRange* range); 480 void AddToUnhandledUnsorted(LiveRange* range); 481 void SortUnhandled(); 482 bool UnhandledIsSorted(); 483 void ActiveToHandled(LiveRange* range); 484 void ActiveToInactive(LiveRange* range); 485 void InactiveToHandled(LiveRange* range); 486 void InactiveToActive(LiveRange* range); 487 void FreeSpillSlot(LiveRange* range); 488 LOperand* TryReuseSpillSlot(LiveRange* range); 489 490 // Helper methods for allocating registers. 491 bool TryAllocateFreeReg(LiveRange* range); 492 void AllocateBlockedReg(LiveRange* range); 493 494 // Live range splitting helpers. 495 496 // Split the given range at the given position. 497 // If range starts at or after the given position then the 498 // original range is returned. 499 // Otherwise returns the live range that starts at pos and contains 500 // all uses from the original range that follow pos. Uses at pos will 501 // still be owned by the original range after splitting. 502 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos); 503 504 // Split the given range in a position from the interval [start, end]. 505 LiveRange* SplitBetween(LiveRange* range, 506 LifetimePosition start, 507 LifetimePosition end); 508 509 // Find a lifetime position in the interval [start, end] which 510 // is optimal for splitting: it is either header of the outermost 511 // loop covered by this interval or the latest possible position. 512 LifetimePosition FindOptimalSplitPos(LifetimePosition start, 513 LifetimePosition end); 514 515 // Spill the given life range after position pos. 516 void SpillAfter(LiveRange* range, LifetimePosition pos); 517 518 // Spill the given life range after position [start] and up to position [end]. 519 void SpillBetween(LiveRange* range, 520 LifetimePosition start, 521 LifetimePosition end); 522 523 // Spill the given life range after position [start] and up to position [end]. 524 // Range is guaranteed to be spilled at least until position [until]. 525 void SpillBetweenUntil(LiveRange* range, 526 LifetimePosition start, 527 LifetimePosition until, 528 LifetimePosition end); 529 530 void SplitAndSpillIntersecting(LiveRange* range); 531 532 // If we are trying to spill a range inside the loop try to 533 // hoist spill position out to the point just before the loop. 534 LifetimePosition FindOptimalSpillingPos(LiveRange* range, 535 LifetimePosition pos); 536 537 void Spill(LiveRange* range); 538 bool IsBlockBoundary(LifetimePosition pos); 539 540 // Helper methods for resolving control flow. 541 void ResolveControlFlow(LiveRange* range, 542 HBasicBlock* block, 543 HBasicBlock* pred); 544 545 inline void SetLiveRangeAssignedRegister(LiveRange* range, int reg); 546 547 // Return parallel move that should be used to connect ranges split at the 548 // given position. 549 LParallelMove* GetConnectingParallelMove(LifetimePosition pos); 550 551 // Return the block which contains give lifetime position. 552 HBasicBlock* GetBlock(LifetimePosition pos); 553 554 // Helper methods for the fixed registers. 555 int RegisterCount() const; 556 static int FixedLiveRangeID(int index) { return -index - 1; } 557 static int FixedDoubleLiveRangeID(int index); 558 LiveRange* FixedLiveRangeFor(int index); 559 LiveRange* FixedDoubleLiveRangeFor(int index); 560 LiveRange* LiveRangeFor(int index); 561 HPhi* LookupPhi(LOperand* operand) const; 562 LGap* GetLastGap(HBasicBlock* block); 563 564 const char* RegisterName(int allocation_index); 565 566 inline bool IsGapAt(int index); 567 568 inline LInstruction* InstructionAt(int index); 569 570 inline LGap* GapAt(int index); 571 572 Zone zone_; 573 574 LPlatformChunk* chunk_; 575 576 // During liveness analysis keep a mapping from block id to live_in sets 577 // for blocks already analyzed. 578 ZoneList<BitVector*> live_in_sets_; 579 580 // Liveness analysis results. 581 ZoneList<LiveRange*> live_ranges_; 582 583 // Lists of live ranges 584 EmbeddedVector<LiveRange*, Register::kMaxNumAllocatableRegisters> 585 fixed_live_ranges_; 586 EmbeddedVector<LiveRange*, DoubleRegister::kMaxNumAllocatableRegisters> 587 fixed_double_live_ranges_; 588 ZoneList<LiveRange*> unhandled_live_ranges_; 589 ZoneList<LiveRange*> active_live_ranges_; 590 ZoneList<LiveRange*> inactive_live_ranges_; 591 ZoneList<LiveRange*> reusable_slots_; 592 593 // Next virtual register number to be assigned to temporaries. 594 int next_virtual_register_; 595 int first_artificial_register_; 596 GrowableBitVector double_artificial_registers_; 597 598 RegisterKind mode_; 599 int num_registers_; 600 601 BitVector* assigned_registers_; 602 BitVector* assigned_double_registers_; 603 604 HGraph* graph_; 605 606 bool has_osr_entry_; 607 608 // Indicates success or failure during register allocation. 609 bool allocation_ok_; 610 611 #ifdef DEBUG 612 LifetimePosition allocation_finger_; 613 #endif 614 615 DISALLOW_COPY_AND_ASSIGN(LAllocator); 616 }; 617 618 619 class LAllocatorPhase : public CompilationPhase { 620 public: 621 LAllocatorPhase(const char* name, LAllocator* allocator); 622 ~LAllocatorPhase(); 623 624 private: 625 LAllocator* allocator_; 626 unsigned allocator_zone_start_allocation_size_; 627 628 DISALLOW_COPY_AND_ASSIGN(LAllocatorPhase); 629 }; 630 631 632 } } // namespace v8::internal 633 634 #endif // V8_LITHIUM_ALLOCATOR_H_ 635