1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/hydrogen.h" 6 #include "src/hydrogen-gvn.h" 7 #include "src/v8.h" 8 9 namespace v8 { 10 namespace internal { 11 12 class HInstructionMap FINAL : public ZoneObject { 13 public: 14 HInstructionMap(Zone* zone, SideEffectsTracker* side_effects_tracker) 15 : array_size_(0), 16 lists_size_(0), 17 count_(0), 18 array_(NULL), 19 lists_(NULL), 20 free_list_head_(kNil), 21 side_effects_tracker_(side_effects_tracker) { 22 ResizeLists(kInitialSize, zone); 23 Resize(kInitialSize, zone); 24 } 25 26 void Kill(SideEffects side_effects); 27 28 void Add(HInstruction* instr, Zone* zone) { 29 present_depends_on_.Add(side_effects_tracker_->ComputeDependsOn(instr)); 30 Insert(instr, zone); 31 } 32 33 HInstruction* Lookup(HInstruction* instr) const; 34 35 HInstructionMap* Copy(Zone* zone) const { 36 return new(zone) HInstructionMap(zone, this); 37 } 38 39 bool IsEmpty() const { return count_ == 0; } 40 41 private: 42 // A linked list of HInstruction* values. Stored in arrays. 43 struct HInstructionMapListElement { 44 HInstruction* instr; 45 int next; // Index in the array of the next list element. 46 }; 47 static const int kNil = -1; // The end of a linked list 48 49 // Must be a power of 2. 50 static const int kInitialSize = 16; 51 52 HInstructionMap(Zone* zone, const HInstructionMap* other); 53 54 void Resize(int new_size, Zone* zone); 55 void ResizeLists(int new_size, Zone* zone); 56 void Insert(HInstruction* instr, Zone* zone); 57 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); } 58 59 int array_size_; 60 int lists_size_; 61 int count_; // The number of values stored in the HInstructionMap. 62 SideEffects present_depends_on_; 63 HInstructionMapListElement* array_; 64 // Primary store - contains the first value 65 // with a given hash. Colliding elements are stored in linked lists. 66 HInstructionMapListElement* lists_; 67 // The linked lists containing hash collisions. 68 int free_list_head_; // Unused elements in lists_ are on the free list. 69 SideEffectsTracker* side_effects_tracker_; 70 }; 71 72 73 class HSideEffectMap FINAL BASE_EMBEDDED { 74 public: 75 HSideEffectMap(); 76 explicit HSideEffectMap(HSideEffectMap* other); 77 HSideEffectMap& operator= (const HSideEffectMap& other); 78 79 void Kill(SideEffects side_effects); 80 81 void Store(SideEffects side_effects, HInstruction* instr); 82 83 bool IsEmpty() const { return count_ == 0; } 84 85 inline HInstruction* operator[](int i) const { 86 DCHECK(0 <= i); 87 DCHECK(i < kNumberOfTrackedSideEffects); 88 return data_[i]; 89 } 90 inline HInstruction* at(int i) const { return operator[](i); } 91 92 private: 93 int count_; 94 HInstruction* data_[kNumberOfTrackedSideEffects]; 95 }; 96 97 98 void TraceGVN(const char* msg, ...) { 99 va_list arguments; 100 va_start(arguments, msg); 101 base::OS::VPrint(msg, arguments); 102 va_end(arguments); 103 } 104 105 106 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when 107 // --trace-gvn is off. 108 #define TRACE_GVN_1(msg, a1) \ 109 if (FLAG_trace_gvn) { \ 110 TraceGVN(msg, a1); \ 111 } 112 113 #define TRACE_GVN_2(msg, a1, a2) \ 114 if (FLAG_trace_gvn) { \ 115 TraceGVN(msg, a1, a2); \ 116 } 117 118 #define TRACE_GVN_3(msg, a1, a2, a3) \ 119 if (FLAG_trace_gvn) { \ 120 TraceGVN(msg, a1, a2, a3); \ 121 } 122 123 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \ 124 if (FLAG_trace_gvn) { \ 125 TraceGVN(msg, a1, a2, a3, a4); \ 126 } 127 128 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \ 129 if (FLAG_trace_gvn) { \ 130 TraceGVN(msg, a1, a2, a3, a4, a5); \ 131 } 132 133 134 HInstructionMap::HInstructionMap(Zone* zone, const HInstructionMap* other) 135 : array_size_(other->array_size_), 136 lists_size_(other->lists_size_), 137 count_(other->count_), 138 present_depends_on_(other->present_depends_on_), 139 array_(zone->NewArray<HInstructionMapListElement>(other->array_size_)), 140 lists_(zone->NewArray<HInstructionMapListElement>(other->lists_size_)), 141 free_list_head_(other->free_list_head_), 142 side_effects_tracker_(other->side_effects_tracker_) { 143 MemCopy(array_, other->array_, 144 array_size_ * sizeof(HInstructionMapListElement)); 145 MemCopy(lists_, other->lists_, 146 lists_size_ * sizeof(HInstructionMapListElement)); 147 } 148 149 150 void HInstructionMap::Kill(SideEffects changes) { 151 if (!present_depends_on_.ContainsAnyOf(changes)) return; 152 present_depends_on_.RemoveAll(); 153 for (int i = 0; i < array_size_; ++i) { 154 HInstruction* instr = array_[i].instr; 155 if (instr != NULL) { 156 // Clear list of collisions first, so we know if it becomes empty. 157 int kept = kNil; // List of kept elements. 158 int next; 159 for (int current = array_[i].next; current != kNil; current = next) { 160 next = lists_[current].next; 161 HInstruction* instr = lists_[current].instr; 162 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr); 163 if (depends_on.ContainsAnyOf(changes)) { 164 // Drop it. 165 count_--; 166 lists_[current].next = free_list_head_; 167 free_list_head_ = current; 168 } else { 169 // Keep it. 170 lists_[current].next = kept; 171 kept = current; 172 present_depends_on_.Add(depends_on); 173 } 174 } 175 array_[i].next = kept; 176 177 // Now possibly drop directly indexed element. 178 instr = array_[i].instr; 179 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr); 180 if (depends_on.ContainsAnyOf(changes)) { // Drop it. 181 count_--; 182 int head = array_[i].next; 183 if (head == kNil) { 184 array_[i].instr = NULL; 185 } else { 186 array_[i].instr = lists_[head].instr; 187 array_[i].next = lists_[head].next; 188 lists_[head].next = free_list_head_; 189 free_list_head_ = head; 190 } 191 } else { 192 present_depends_on_.Add(depends_on); // Keep it. 193 } 194 } 195 } 196 } 197 198 199 HInstruction* HInstructionMap::Lookup(HInstruction* instr) const { 200 uint32_t hash = static_cast<uint32_t>(instr->Hashcode()); 201 uint32_t pos = Bound(hash); 202 if (array_[pos].instr != NULL) { 203 if (array_[pos].instr->Equals(instr)) return array_[pos].instr; 204 int next = array_[pos].next; 205 while (next != kNil) { 206 if (lists_[next].instr->Equals(instr)) return lists_[next].instr; 207 next = lists_[next].next; 208 } 209 } 210 return NULL; 211 } 212 213 214 void HInstructionMap::Resize(int new_size, Zone* zone) { 215 DCHECK(new_size > count_); 216 // Hashing the values into the new array has no more collisions than in the 217 // old hash map, so we can use the existing lists_ array, if we are careful. 218 219 // Make sure we have at least one free element. 220 if (free_list_head_ == kNil) { 221 ResizeLists(lists_size_ << 1, zone); 222 } 223 224 HInstructionMapListElement* new_array = 225 zone->NewArray<HInstructionMapListElement>(new_size); 226 memset(new_array, 0, sizeof(HInstructionMapListElement) * new_size); 227 228 HInstructionMapListElement* old_array = array_; 229 int old_size = array_size_; 230 231 int old_count = count_; 232 count_ = 0; 233 // Do not modify present_depends_on_. It is currently correct. 234 array_size_ = new_size; 235 array_ = new_array; 236 237 if (old_array != NULL) { 238 // Iterate over all the elements in lists, rehashing them. 239 for (int i = 0; i < old_size; ++i) { 240 if (old_array[i].instr != NULL) { 241 int current = old_array[i].next; 242 while (current != kNil) { 243 Insert(lists_[current].instr, zone); 244 int next = lists_[current].next; 245 lists_[current].next = free_list_head_; 246 free_list_head_ = current; 247 current = next; 248 } 249 // Rehash the directly stored instruction. 250 Insert(old_array[i].instr, zone); 251 } 252 } 253 } 254 USE(old_count); 255 DCHECK(count_ == old_count); 256 } 257 258 259 void HInstructionMap::ResizeLists(int new_size, Zone* zone) { 260 DCHECK(new_size > lists_size_); 261 262 HInstructionMapListElement* new_lists = 263 zone->NewArray<HInstructionMapListElement>(new_size); 264 memset(new_lists, 0, sizeof(HInstructionMapListElement) * new_size); 265 266 HInstructionMapListElement* old_lists = lists_; 267 int old_size = lists_size_; 268 269 lists_size_ = new_size; 270 lists_ = new_lists; 271 272 if (old_lists != NULL) { 273 MemCopy(lists_, old_lists, old_size * sizeof(HInstructionMapListElement)); 274 } 275 for (int i = old_size; i < lists_size_; ++i) { 276 lists_[i].next = free_list_head_; 277 free_list_head_ = i; 278 } 279 } 280 281 282 void HInstructionMap::Insert(HInstruction* instr, Zone* zone) { 283 DCHECK(instr != NULL); 284 // Resizing when half of the hashtable is filled up. 285 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone); 286 DCHECK(count_ < array_size_); 287 count_++; 288 uint32_t pos = Bound(static_cast<uint32_t>(instr->Hashcode())); 289 if (array_[pos].instr == NULL) { 290 array_[pos].instr = instr; 291 array_[pos].next = kNil; 292 } else { 293 if (free_list_head_ == kNil) { 294 ResizeLists(lists_size_ << 1, zone); 295 } 296 int new_element_pos = free_list_head_; 297 DCHECK(new_element_pos != kNil); 298 free_list_head_ = lists_[free_list_head_].next; 299 lists_[new_element_pos].instr = instr; 300 lists_[new_element_pos].next = array_[pos].next; 301 DCHECK(array_[pos].next == kNil || lists_[array_[pos].next].instr != NULL); 302 array_[pos].next = new_element_pos; 303 } 304 } 305 306 307 HSideEffectMap::HSideEffectMap() : count_(0) { 308 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize); 309 } 310 311 312 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) { 313 *this = *other; // Calls operator=. 314 } 315 316 317 HSideEffectMap& HSideEffectMap::operator=(const HSideEffectMap& other) { 318 if (this != &other) { 319 MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize); 320 } 321 return *this; 322 } 323 324 325 void HSideEffectMap::Kill(SideEffects side_effects) { 326 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { 327 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) { 328 if (data_[i] != NULL) count_--; 329 data_[i] = NULL; 330 } 331 } 332 } 333 334 335 void HSideEffectMap::Store(SideEffects side_effects, HInstruction* instr) { 336 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { 337 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) { 338 if (data_[i] == NULL) count_++; 339 data_[i] = instr; 340 } 341 } 342 } 343 344 345 SideEffects SideEffectsTracker::ComputeChanges(HInstruction* instr) { 346 int index; 347 SideEffects result(instr->ChangesFlags()); 348 if (result.ContainsFlag(kGlobalVars)) { 349 if (instr->IsStoreGlobalCell() && 350 ComputeGlobalVar(HStoreGlobalCell::cast(instr)->cell(), &index)) { 351 result.RemoveFlag(kGlobalVars); 352 result.AddSpecial(GlobalVar(index)); 353 } else { 354 for (index = 0; index < kNumberOfGlobalVars; ++index) { 355 result.AddSpecial(GlobalVar(index)); 356 } 357 } 358 } 359 if (result.ContainsFlag(kInobjectFields)) { 360 if (instr->IsStoreNamedField() && 361 ComputeInobjectField(HStoreNamedField::cast(instr)->access(), &index)) { 362 result.RemoveFlag(kInobjectFields); 363 result.AddSpecial(InobjectField(index)); 364 } else { 365 for (index = 0; index < kNumberOfInobjectFields; ++index) { 366 result.AddSpecial(InobjectField(index)); 367 } 368 } 369 } 370 return result; 371 } 372 373 374 SideEffects SideEffectsTracker::ComputeDependsOn(HInstruction* instr) { 375 int index; 376 SideEffects result(instr->DependsOnFlags()); 377 if (result.ContainsFlag(kGlobalVars)) { 378 if (instr->IsLoadGlobalCell() && 379 ComputeGlobalVar(HLoadGlobalCell::cast(instr)->cell(), &index)) { 380 result.RemoveFlag(kGlobalVars); 381 result.AddSpecial(GlobalVar(index)); 382 } else { 383 for (index = 0; index < kNumberOfGlobalVars; ++index) { 384 result.AddSpecial(GlobalVar(index)); 385 } 386 } 387 } 388 if (result.ContainsFlag(kInobjectFields)) { 389 if (instr->IsLoadNamedField() && 390 ComputeInobjectField(HLoadNamedField::cast(instr)->access(), &index)) { 391 result.RemoveFlag(kInobjectFields); 392 result.AddSpecial(InobjectField(index)); 393 } else { 394 for (index = 0; index < kNumberOfInobjectFields; ++index) { 395 result.AddSpecial(InobjectField(index)); 396 } 397 } 398 } 399 return result; 400 } 401 402 403 OStream& operator<<(OStream& os, const TrackedEffects& te) { 404 SideEffectsTracker* t = te.tracker; 405 const char* separator = ""; 406 os << "["; 407 for (int bit = 0; bit < kNumberOfFlags; ++bit) { 408 GVNFlag flag = GVNFlagFromInt(bit); 409 if (te.effects.ContainsFlag(flag)) { 410 os << separator; 411 separator = ", "; 412 switch (flag) { 413 #define DECLARE_FLAG(Type) \ 414 case k##Type: \ 415 os << #Type; \ 416 break; 417 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) 418 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) 419 #undef DECLARE_FLAG 420 default: 421 break; 422 } 423 } 424 } 425 for (int index = 0; index < t->num_global_vars_; ++index) { 426 if (te.effects.ContainsSpecial(t->GlobalVar(index))) { 427 os << separator << "[" << *t->global_vars_[index].handle() << "]"; 428 separator = ", "; 429 } 430 } 431 for (int index = 0; index < t->num_inobject_fields_; ++index) { 432 if (te.effects.ContainsSpecial(t->InobjectField(index))) { 433 os << separator << t->inobject_fields_[index]; 434 separator = ", "; 435 } 436 } 437 os << "]"; 438 return os; 439 } 440 441 442 bool SideEffectsTracker::ComputeGlobalVar(Unique<Cell> cell, int* index) { 443 for (int i = 0; i < num_global_vars_; ++i) { 444 if (cell == global_vars_[i]) { 445 *index = i; 446 return true; 447 } 448 } 449 if (num_global_vars_ < kNumberOfGlobalVars) { 450 if (FLAG_trace_gvn) { 451 OFStream os(stdout); 452 os << "Tracking global var [" << *cell.handle() << "] " 453 << "(mapped to index " << num_global_vars_ << ")" << endl; 454 } 455 *index = num_global_vars_; 456 global_vars_[num_global_vars_++] = cell; 457 return true; 458 } 459 return false; 460 } 461 462 463 bool SideEffectsTracker::ComputeInobjectField(HObjectAccess access, 464 int* index) { 465 for (int i = 0; i < num_inobject_fields_; ++i) { 466 if (access.Equals(inobject_fields_[i])) { 467 *index = i; 468 return true; 469 } 470 } 471 if (num_inobject_fields_ < kNumberOfInobjectFields) { 472 if (FLAG_trace_gvn) { 473 OFStream os(stdout); 474 os << "Tracking inobject field access " << access << " (mapped to index " 475 << num_inobject_fields_ << ")" << endl; 476 } 477 *index = num_inobject_fields_; 478 inobject_fields_[num_inobject_fields_++] = access; 479 return true; 480 } 481 return false; 482 } 483 484 485 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph) 486 : HPhase("H_Global value numbering", graph), 487 removed_side_effects_(false), 488 block_side_effects_(graph->blocks()->length(), zone()), 489 loop_side_effects_(graph->blocks()->length(), zone()), 490 visited_on_paths_(graph->blocks()->length(), zone()) { 491 DCHECK(!AllowHandleAllocation::IsAllowed()); 492 block_side_effects_.AddBlock( 493 SideEffects(), graph->blocks()->length(), zone()); 494 loop_side_effects_.AddBlock( 495 SideEffects(), graph->blocks()->length(), zone()); 496 } 497 498 499 void HGlobalValueNumberingPhase::Run() { 500 DCHECK(!removed_side_effects_); 501 for (int i = FLAG_gvn_iterations; i > 0; --i) { 502 // Compute the side effects. 503 ComputeBlockSideEffects(); 504 505 // Perform loop invariant code motion if requested. 506 if (FLAG_loop_invariant_code_motion) LoopInvariantCodeMotion(); 507 508 // Perform the actual value numbering. 509 AnalyzeGraph(); 510 511 // Continue GVN if we removed any side effects. 512 if (!removed_side_effects_) break; 513 removed_side_effects_ = false; 514 515 // Clear all side effects. 516 DCHECK_EQ(block_side_effects_.length(), graph()->blocks()->length()); 517 DCHECK_EQ(loop_side_effects_.length(), graph()->blocks()->length()); 518 for (int i = 0; i < graph()->blocks()->length(); ++i) { 519 block_side_effects_[i].RemoveAll(); 520 loop_side_effects_[i].RemoveAll(); 521 } 522 visited_on_paths_.Clear(); 523 } 524 } 525 526 527 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() { 528 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { 529 // Compute side effects for the block. 530 HBasicBlock* block = graph()->blocks()->at(i); 531 SideEffects side_effects; 532 if (block->IsReachable() && !block->IsDeoptimizing()) { 533 int id = block->block_id(); 534 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 535 HInstruction* instr = it.Current(); 536 side_effects.Add(side_effects_tracker_.ComputeChanges(instr)); 537 } 538 block_side_effects_[id].Add(side_effects); 539 540 // Loop headers are part of their loop. 541 if (block->IsLoopHeader()) { 542 loop_side_effects_[id].Add(side_effects); 543 } 544 545 // Propagate loop side effects upwards. 546 if (block->HasParentLoopHeader()) { 547 HBasicBlock* with_parent = block; 548 if (block->IsLoopHeader()) side_effects = loop_side_effects_[id]; 549 do { 550 HBasicBlock* parent_block = with_parent->parent_loop_header(); 551 loop_side_effects_[parent_block->block_id()].Add(side_effects); 552 with_parent = parent_block; 553 } while (with_parent->HasParentLoopHeader()); 554 } 555 } 556 } 557 } 558 559 560 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() { 561 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n", 562 graph()->use_optimistic_licm() ? "yes" : "no"); 563 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { 564 HBasicBlock* block = graph()->blocks()->at(i); 565 if (block->IsLoopHeader()) { 566 SideEffects side_effects = loop_side_effects_[block->block_id()]; 567 if (FLAG_trace_gvn) { 568 OFStream os(stdout); 569 os << "Try loop invariant motion for " << *block << " changes " 570 << Print(side_effects) << endl; 571 } 572 HBasicBlock* last = block->loop_information()->GetLastBackEdge(); 573 for (int j = block->block_id(); j <= last->block_id(); ++j) { 574 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects); 575 } 576 } 577 } 578 } 579 580 581 void HGlobalValueNumberingPhase::ProcessLoopBlock( 582 HBasicBlock* block, 583 HBasicBlock* loop_header, 584 SideEffects loop_kills) { 585 HBasicBlock* pre_header = loop_header->predecessors()->at(0); 586 if (FLAG_trace_gvn) { 587 OFStream os(stdout); 588 os << "Loop invariant code motion for " << *block << " depends on " 589 << Print(loop_kills) << endl; 590 } 591 HInstruction* instr = block->first(); 592 while (instr != NULL) { 593 HInstruction* next = instr->next(); 594 if (instr->CheckFlag(HValue::kUseGVN)) { 595 SideEffects changes = side_effects_tracker_.ComputeChanges(instr); 596 SideEffects depends_on = side_effects_tracker_.ComputeDependsOn(instr); 597 if (FLAG_trace_gvn) { 598 OFStream os(stdout); 599 os << "Checking instruction i" << instr->id() << " (" 600 << instr->Mnemonic() << ") changes " << Print(changes) 601 << ", depends on " << Print(depends_on) << ". Loop changes " 602 << Print(loop_kills) << endl; 603 } 604 bool can_hoist = !depends_on.ContainsAnyOf(loop_kills); 605 if (can_hoist && !graph()->use_optimistic_licm()) { 606 can_hoist = block->IsLoopSuccessorDominator(); 607 } 608 609 if (can_hoist) { 610 bool inputs_loop_invariant = true; 611 for (int i = 0; i < instr->OperandCount(); ++i) { 612 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { 613 inputs_loop_invariant = false; 614 } 615 } 616 617 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { 618 TRACE_GVN_2("Hoisting loop invariant instruction i%d to block B%d\n", 619 instr->id(), pre_header->block_id()); 620 // Move the instruction out of the loop. 621 instr->Unlink(); 622 instr->InsertBefore(pre_header->end()); 623 if (instr->HasSideEffects()) removed_side_effects_ = true; 624 } 625 } 626 } 627 instr = next; 628 } 629 } 630 631 632 bool HGlobalValueNumberingPhase::AllowCodeMotion() { 633 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count; 634 } 635 636 637 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr, 638 HBasicBlock* loop_header) { 639 // If we've disabled code motion or we're in a block that unconditionally 640 // deoptimizes, don't move any instructions. 641 return AllowCodeMotion() && !instr->block()->IsDeoptimizing() && 642 instr->block()->IsReachable(); 643 } 644 645 646 SideEffects 647 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock( 648 HBasicBlock* dominator, HBasicBlock* dominated) { 649 SideEffects side_effects; 650 for (int i = 0; i < dominated->predecessors()->length(); ++i) { 651 HBasicBlock* block = dominated->predecessors()->at(i); 652 if (dominator->block_id() < block->block_id() && 653 block->block_id() < dominated->block_id() && 654 !visited_on_paths_.Contains(block->block_id())) { 655 visited_on_paths_.Add(block->block_id()); 656 side_effects.Add(block_side_effects_[block->block_id()]); 657 if (block->IsLoopHeader()) { 658 side_effects.Add(loop_side_effects_[block->block_id()]); 659 } 660 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( 661 dominator, block)); 662 } 663 } 664 return side_effects; 665 } 666 667 668 // Each instance of this class is like a "stack frame" for the recursive 669 // traversal of the dominator tree done during GVN (the stack is handled 670 // as a double linked list). 671 // We reuse frames when possible so the list length is limited by the depth 672 // of the dominator tree but this forces us to initialize each frame calling 673 // an explicit "Initialize" method instead of a using constructor. 674 class GvnBasicBlockState: public ZoneObject { 675 public: 676 static GvnBasicBlockState* CreateEntry(Zone* zone, 677 HBasicBlock* entry_block, 678 HInstructionMap* entry_map) { 679 return new(zone) 680 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone); 681 } 682 683 HBasicBlock* block() { return block_; } 684 HInstructionMap* map() { return map_; } 685 HSideEffectMap* dominators() { return &dominators_; } 686 687 GvnBasicBlockState* next_in_dominator_tree_traversal( 688 Zone* zone, 689 HBasicBlock** dominator) { 690 // This assignment needs to happen before calling next_dominated() because 691 // that call can reuse "this" if we are at the last dominated block. 692 *dominator = block(); 693 GvnBasicBlockState* result = next_dominated(zone); 694 if (result == NULL) { 695 GvnBasicBlockState* dominator_state = pop(); 696 if (dominator_state != NULL) { 697 // This branch is guaranteed not to return NULL because pop() never 698 // returns a state where "is_done() == true". 699 *dominator = dominator_state->block(); 700 result = dominator_state->next_dominated(zone); 701 } else { 702 // Unnecessary (we are returning NULL) but done for cleanness. 703 *dominator = NULL; 704 } 705 } 706 return result; 707 } 708 709 private: 710 void Initialize(HBasicBlock* block, 711 HInstructionMap* map, 712 HSideEffectMap* dominators, 713 bool copy_map, 714 Zone* zone) { 715 block_ = block; 716 map_ = copy_map ? map->Copy(zone) : map; 717 dominated_index_ = -1; 718 length_ = block->dominated_blocks()->length(); 719 if (dominators != NULL) { 720 dominators_ = *dominators; 721 } 722 } 723 bool is_done() { return dominated_index_ >= length_; } 724 725 GvnBasicBlockState(GvnBasicBlockState* previous, 726 HBasicBlock* block, 727 HInstructionMap* map, 728 HSideEffectMap* dominators, 729 Zone* zone) 730 : previous_(previous), next_(NULL) { 731 Initialize(block, map, dominators, true, zone); 732 } 733 734 GvnBasicBlockState* next_dominated(Zone* zone) { 735 dominated_index_++; 736 if (dominated_index_ == length_ - 1) { 737 // No need to copy the map for the last child in the dominator tree. 738 Initialize(block_->dominated_blocks()->at(dominated_index_), 739 map(), 740 dominators(), 741 false, 742 zone); 743 return this; 744 } else if (dominated_index_ < length_) { 745 return push(zone, block_->dominated_blocks()->at(dominated_index_)); 746 } else { 747 return NULL; 748 } 749 } 750 751 GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) { 752 if (next_ == NULL) { 753 next_ = 754 new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone); 755 } else { 756 next_->Initialize(block, map(), dominators(), true, zone); 757 } 758 return next_; 759 } 760 GvnBasicBlockState* pop() { 761 GvnBasicBlockState* result = previous_; 762 while (result != NULL && result->is_done()) { 763 TRACE_GVN_2("Backtracking from block B%d to block b%d\n", 764 block()->block_id(), 765 previous_->block()->block_id()) 766 result = result->previous_; 767 } 768 return result; 769 } 770 771 GvnBasicBlockState* previous_; 772 GvnBasicBlockState* next_; 773 HBasicBlock* block_; 774 HInstructionMap* map_; 775 HSideEffectMap dominators_; 776 int dominated_index_; 777 int length_; 778 }; 779 780 781 // This is a recursive traversal of the dominator tree but it has been turned 782 // into a loop to avoid stack overflows. 783 // The logical "stack frames" of the recursion are kept in a list of 784 // GvnBasicBlockState instances. 785 void HGlobalValueNumberingPhase::AnalyzeGraph() { 786 HBasicBlock* entry_block = graph()->entry_block(); 787 HInstructionMap* entry_map = 788 new(zone()) HInstructionMap(zone(), &side_effects_tracker_); 789 GvnBasicBlockState* current = 790 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map); 791 792 while (current != NULL) { 793 HBasicBlock* block = current->block(); 794 HInstructionMap* map = current->map(); 795 HSideEffectMap* dominators = current->dominators(); 796 797 TRACE_GVN_2("Analyzing block B%d%s\n", 798 block->block_id(), 799 block->IsLoopHeader() ? " (loop header)" : ""); 800 801 // If this is a loop header kill everything killed by the loop. 802 if (block->IsLoopHeader()) { 803 map->Kill(loop_side_effects_[block->block_id()]); 804 dominators->Kill(loop_side_effects_[block->block_id()]); 805 } 806 807 // Go through all instructions of the current block. 808 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 809 HInstruction* instr = it.Current(); 810 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) { 811 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { 812 HValue* other = dominators->at(i); 813 GVNFlag flag = GVNFlagFromInt(i); 814 if (instr->DependsOnFlags().Contains(flag) && other != NULL) { 815 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n", 816 i, 817 instr->id(), 818 instr->Mnemonic(), 819 other->id(), 820 other->Mnemonic()); 821 if (instr->HandleSideEffectDominator(flag, other)) { 822 removed_side_effects_ = true; 823 } 824 } 825 } 826 } 827 // Instruction was unlinked during graph traversal. 828 if (!instr->IsLinked()) continue; 829 830 SideEffects changes = side_effects_tracker_.ComputeChanges(instr); 831 if (!changes.IsEmpty()) { 832 // Clear all instructions in the map that are affected by side effects. 833 // Store instruction as the dominating one for tracked side effects. 834 map->Kill(changes); 835 dominators->Store(changes, instr); 836 if (FLAG_trace_gvn) { 837 OFStream os(stdout); 838 os << "Instruction i" << instr->id() << " changes " << Print(changes) 839 << endl; 840 } 841 } 842 if (instr->CheckFlag(HValue::kUseGVN) && 843 !instr->CheckFlag(HValue::kCantBeReplaced)) { 844 DCHECK(!instr->HasObservableSideEffects()); 845 HInstruction* other = map->Lookup(instr); 846 if (other != NULL) { 847 DCHECK(instr->Equals(other) && other->Equals(instr)); 848 TRACE_GVN_4("Replacing instruction i%d (%s) with i%d (%s)\n", 849 instr->id(), 850 instr->Mnemonic(), 851 other->id(), 852 other->Mnemonic()); 853 if (instr->HasSideEffects()) removed_side_effects_ = true; 854 instr->DeleteAndReplaceWith(other); 855 } else { 856 map->Add(instr, zone()); 857 } 858 } 859 } 860 861 HBasicBlock* dominator_block; 862 GvnBasicBlockState* next = 863 current->next_in_dominator_tree_traversal(zone(), 864 &dominator_block); 865 866 if (next != NULL) { 867 HBasicBlock* dominated = next->block(); 868 HInstructionMap* successor_map = next->map(); 869 HSideEffectMap* successor_dominators = next->dominators(); 870 871 // Kill everything killed on any path between this block and the 872 // dominated block. We don't have to traverse these paths if the 873 // value map and the dominators list is already empty. If the range 874 // of block ids (block_id, dominated_id) is empty there are no such 875 // paths. 876 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) && 877 dominator_block->block_id() + 1 < dominated->block_id()) { 878 visited_on_paths_.Clear(); 879 SideEffects side_effects_on_all_paths = 880 CollectSideEffectsOnPathsToDominatedBlock(dominator_block, 881 dominated); 882 successor_map->Kill(side_effects_on_all_paths); 883 successor_dominators->Kill(side_effects_on_all_paths); 884 } 885 } 886 current = next; 887 } 888 } 889 890 } } // namespace v8::internal 891