1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/crankshaft/hydrogen-gvn.h" 6 7 #include "src/crankshaft/hydrogen.h" 8 #include "src/v8.h" 9 10 namespace v8 { 11 namespace internal { 12 13 class HInstructionMap final : public ZoneObject { 14 public: 15 HInstructionMap(Zone* zone, SideEffectsTracker* side_effects_tracker) 16 : array_size_(0), 17 lists_size_(0), 18 count_(0), 19 array_(NULL), 20 lists_(NULL), 21 free_list_head_(kNil), 22 side_effects_tracker_(side_effects_tracker) { 23 ResizeLists(kInitialSize, zone); 24 Resize(kInitialSize, zone); 25 } 26 27 void Kill(SideEffects side_effects); 28 29 void Add(HInstruction* instr, Zone* zone) { 30 present_depends_on_.Add(side_effects_tracker_->ComputeDependsOn(instr)); 31 Insert(instr, zone); 32 } 33 34 HInstruction* Lookup(HInstruction* instr) const; 35 36 HInstructionMap* Copy(Zone* zone) const { 37 return new(zone) HInstructionMap(zone, this); 38 } 39 40 bool IsEmpty() const { return count_ == 0; } 41 42 private: 43 // A linked list of HInstruction* values. Stored in arrays. 44 struct HInstructionMapListElement { 45 HInstruction* instr; 46 int next; // Index in the array of the next list element. 47 }; 48 static const int kNil = -1; // The end of a linked list 49 50 // Must be a power of 2. 51 static const int kInitialSize = 16; 52 53 HInstructionMap(Zone* zone, const HInstructionMap* other); 54 55 void Resize(int new_size, Zone* zone); 56 void ResizeLists(int new_size, Zone* zone); 57 void Insert(HInstruction* instr, Zone* zone); 58 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); } 59 60 int array_size_; 61 int lists_size_; 62 int count_; // The number of values stored in the HInstructionMap. 63 SideEffects present_depends_on_; 64 HInstructionMapListElement* array_; 65 // Primary store - contains the first value 66 // with a given hash. Colliding elements are stored in linked lists. 67 HInstructionMapListElement* lists_; 68 // The linked lists containing hash collisions. 69 int free_list_head_; // Unused elements in lists_ are on the free list. 70 SideEffectsTracker* side_effects_tracker_; 71 }; 72 73 74 class HSideEffectMap final BASE_EMBEDDED { 75 public: 76 HSideEffectMap(); 77 explicit HSideEffectMap(HSideEffectMap* other); 78 HSideEffectMap& operator= (const HSideEffectMap& other); 79 80 void Kill(SideEffects side_effects); 81 82 void Store(SideEffects side_effects, HInstruction* instr); 83 84 bool IsEmpty() const { return count_ == 0; } 85 86 inline HInstruction* operator[](int i) const { 87 DCHECK(0 <= i); 88 DCHECK(i < kNumberOfTrackedSideEffects); 89 return data_[i]; 90 } 91 inline HInstruction* at(int i) const { return operator[](i); } 92 93 private: 94 int count_; 95 HInstruction* data_[kNumberOfTrackedSideEffects]; 96 }; 97 98 99 void TraceGVN(const char* msg, ...) { 100 va_list arguments; 101 va_start(arguments, msg); 102 base::OS::VPrint(msg, arguments); 103 va_end(arguments); 104 } 105 106 107 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when 108 // --trace-gvn is off. 109 #define TRACE_GVN_1(msg, a1) \ 110 if (FLAG_trace_gvn) { \ 111 TraceGVN(msg, a1); \ 112 } 113 114 #define TRACE_GVN_2(msg, a1, a2) \ 115 if (FLAG_trace_gvn) { \ 116 TraceGVN(msg, a1, a2); \ 117 } 118 119 #define TRACE_GVN_3(msg, a1, a2, a3) \ 120 if (FLAG_trace_gvn) { \ 121 TraceGVN(msg, a1, a2, a3); \ 122 } 123 124 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \ 125 if (FLAG_trace_gvn) { \ 126 TraceGVN(msg, a1, a2, a3, a4); \ 127 } 128 129 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \ 130 if (FLAG_trace_gvn) { \ 131 TraceGVN(msg, a1, a2, a3, a4, a5); \ 132 } 133 134 135 HInstructionMap::HInstructionMap(Zone* zone, const HInstructionMap* other) 136 : array_size_(other->array_size_), 137 lists_size_(other->lists_size_), 138 count_(other->count_), 139 present_depends_on_(other->present_depends_on_), 140 array_(zone->NewArray<HInstructionMapListElement>(other->array_size_)), 141 lists_(zone->NewArray<HInstructionMapListElement>(other->lists_size_)), 142 free_list_head_(other->free_list_head_), 143 side_effects_tracker_(other->side_effects_tracker_) { 144 MemCopy(array_, other->array_, 145 array_size_ * sizeof(HInstructionMapListElement)); 146 MemCopy(lists_, other->lists_, 147 lists_size_ * sizeof(HInstructionMapListElement)); 148 } 149 150 151 void HInstructionMap::Kill(SideEffects changes) { 152 if (!present_depends_on_.ContainsAnyOf(changes)) return; 153 present_depends_on_.RemoveAll(); 154 for (int i = 0; i < array_size_; ++i) { 155 HInstruction* instr = array_[i].instr; 156 if (instr != NULL) { 157 // Clear list of collisions first, so we know if it becomes empty. 158 int kept = kNil; // List of kept elements. 159 int next; 160 for (int current = array_[i].next; current != kNil; current = next) { 161 next = lists_[current].next; 162 HInstruction* instr = lists_[current].instr; 163 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr); 164 if (depends_on.ContainsAnyOf(changes)) { 165 // Drop it. 166 count_--; 167 lists_[current].next = free_list_head_; 168 free_list_head_ = current; 169 } else { 170 // Keep it. 171 lists_[current].next = kept; 172 kept = current; 173 present_depends_on_.Add(depends_on); 174 } 175 } 176 array_[i].next = kept; 177 178 // Now possibly drop directly indexed element. 179 instr = array_[i].instr; 180 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr); 181 if (depends_on.ContainsAnyOf(changes)) { // Drop it. 182 count_--; 183 int head = array_[i].next; 184 if (head == kNil) { 185 array_[i].instr = NULL; 186 } else { 187 array_[i].instr = lists_[head].instr; 188 array_[i].next = lists_[head].next; 189 lists_[head].next = free_list_head_; 190 free_list_head_ = head; 191 } 192 } else { 193 present_depends_on_.Add(depends_on); // Keep it. 194 } 195 } 196 } 197 } 198 199 200 HInstruction* HInstructionMap::Lookup(HInstruction* instr) const { 201 uint32_t hash = static_cast<uint32_t>(instr->Hashcode()); 202 uint32_t pos = Bound(hash); 203 if (array_[pos].instr != NULL) { 204 if (array_[pos].instr->Equals(instr)) return array_[pos].instr; 205 int next = array_[pos].next; 206 while (next != kNil) { 207 if (lists_[next].instr->Equals(instr)) return lists_[next].instr; 208 next = lists_[next].next; 209 } 210 } 211 return NULL; 212 } 213 214 215 void HInstructionMap::Resize(int new_size, Zone* zone) { 216 DCHECK(new_size > count_); 217 // Hashing the values into the new array has no more collisions than in the 218 // old hash map, so we can use the existing lists_ array, if we are careful. 219 220 // Make sure we have at least one free element. 221 if (free_list_head_ == kNil) { 222 ResizeLists(lists_size_ << 1, zone); 223 } 224 225 HInstructionMapListElement* new_array = 226 zone->NewArray<HInstructionMapListElement>(new_size); 227 memset(new_array, 0, sizeof(HInstructionMapListElement) * new_size); 228 229 HInstructionMapListElement* old_array = array_; 230 int old_size = array_size_; 231 232 int old_count = count_; 233 count_ = 0; 234 // Do not modify present_depends_on_. It is currently correct. 235 array_size_ = new_size; 236 array_ = new_array; 237 238 if (old_array != NULL) { 239 // Iterate over all the elements in lists, rehashing them. 240 for (int i = 0; i < old_size; ++i) { 241 if (old_array[i].instr != NULL) { 242 int current = old_array[i].next; 243 while (current != kNil) { 244 Insert(lists_[current].instr, zone); 245 int next = lists_[current].next; 246 lists_[current].next = free_list_head_; 247 free_list_head_ = current; 248 current = next; 249 } 250 // Rehash the directly stored instruction. 251 Insert(old_array[i].instr, zone); 252 } 253 } 254 } 255 USE(old_count); 256 DCHECK(count_ == old_count); 257 } 258 259 260 void HInstructionMap::ResizeLists(int new_size, Zone* zone) { 261 DCHECK(new_size > lists_size_); 262 263 HInstructionMapListElement* new_lists = 264 zone->NewArray<HInstructionMapListElement>(new_size); 265 memset(new_lists, 0, sizeof(HInstructionMapListElement) * new_size); 266 267 HInstructionMapListElement* old_lists = lists_; 268 int old_size = lists_size_; 269 270 lists_size_ = new_size; 271 lists_ = new_lists; 272 273 if (old_lists != NULL) { 274 MemCopy(lists_, old_lists, old_size * sizeof(HInstructionMapListElement)); 275 } 276 for (int i = old_size; i < lists_size_; ++i) { 277 lists_[i].next = free_list_head_; 278 free_list_head_ = i; 279 } 280 } 281 282 283 void HInstructionMap::Insert(HInstruction* instr, Zone* zone) { 284 DCHECK(instr != NULL); 285 // Resizing when half of the hashtable is filled up. 286 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone); 287 DCHECK(count_ < array_size_); 288 count_++; 289 uint32_t pos = Bound(static_cast<uint32_t>(instr->Hashcode())); 290 if (array_[pos].instr == NULL) { 291 array_[pos].instr = instr; 292 array_[pos].next = kNil; 293 } else { 294 if (free_list_head_ == kNil) { 295 ResizeLists(lists_size_ << 1, zone); 296 } 297 int new_element_pos = free_list_head_; 298 DCHECK(new_element_pos != kNil); 299 free_list_head_ = lists_[free_list_head_].next; 300 lists_[new_element_pos].instr = instr; 301 lists_[new_element_pos].next = array_[pos].next; 302 DCHECK(array_[pos].next == kNil || lists_[array_[pos].next].instr != NULL); 303 array_[pos].next = new_element_pos; 304 } 305 } 306 307 308 HSideEffectMap::HSideEffectMap() : count_(0) { 309 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize); 310 } 311 312 313 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) { 314 *this = *other; // Calls operator=. 315 } 316 317 318 HSideEffectMap& HSideEffectMap::operator=(const HSideEffectMap& other) { 319 if (this != &other) { 320 MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize); 321 } 322 return *this; 323 } 324 325 326 void HSideEffectMap::Kill(SideEffects side_effects) { 327 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { 328 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) { 329 if (data_[i] != NULL) count_--; 330 data_[i] = NULL; 331 } 332 } 333 } 334 335 336 void HSideEffectMap::Store(SideEffects side_effects, HInstruction* instr) { 337 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { 338 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) { 339 if (data_[i] == NULL) count_++; 340 data_[i] = instr; 341 } 342 } 343 } 344 345 346 SideEffects SideEffectsTracker::ComputeChanges(HInstruction* instr) { 347 int index; 348 SideEffects result(instr->ChangesFlags()); 349 if (result.ContainsFlag(kGlobalVars)) { 350 if (instr->IsStoreNamedField()) { 351 HStoreNamedField* store = HStoreNamedField::cast(instr); 352 HConstant* target = HConstant::cast(store->object()); 353 if (ComputeGlobalVar(Unique<PropertyCell>::cast(target->GetUnique()), 354 &index)) { 355 result.RemoveFlag(kGlobalVars); 356 result.AddSpecial(GlobalVar(index)); 357 return result; 358 } 359 } 360 for (index = 0; index < kNumberOfGlobalVars; ++index) { 361 result.AddSpecial(GlobalVar(index)); 362 } 363 } else if (result.ContainsFlag(kInobjectFields)) { 364 if (instr->IsStoreNamedField() && 365 ComputeInobjectField(HStoreNamedField::cast(instr)->access(), &index)) { 366 result.RemoveFlag(kInobjectFields); 367 result.AddSpecial(InobjectField(index)); 368 } else { 369 for (index = 0; index < kNumberOfInobjectFields; ++index) { 370 result.AddSpecial(InobjectField(index)); 371 } 372 } 373 } 374 return result; 375 } 376 377 378 SideEffects SideEffectsTracker::ComputeDependsOn(HInstruction* instr) { 379 int index; 380 SideEffects result(instr->DependsOnFlags()); 381 if (result.ContainsFlag(kGlobalVars)) { 382 if (instr->IsLoadNamedField()) { 383 HLoadNamedField* load = HLoadNamedField::cast(instr); 384 HConstant* target = HConstant::cast(load->object()); 385 if (ComputeGlobalVar(Unique<PropertyCell>::cast(target->GetUnique()), 386 &index)) { 387 result.RemoveFlag(kGlobalVars); 388 result.AddSpecial(GlobalVar(index)); 389 return result; 390 } 391 } 392 for (index = 0; index < kNumberOfGlobalVars; ++index) { 393 result.AddSpecial(GlobalVar(index)); 394 } 395 } else if (result.ContainsFlag(kInobjectFields)) { 396 if (instr->IsLoadNamedField() && 397 ComputeInobjectField(HLoadNamedField::cast(instr)->access(), &index)) { 398 result.RemoveFlag(kInobjectFields); 399 result.AddSpecial(InobjectField(index)); 400 } else { 401 for (index = 0; index < kNumberOfInobjectFields; ++index) { 402 result.AddSpecial(InobjectField(index)); 403 } 404 } 405 } 406 return result; 407 } 408 409 410 std::ostream& operator<<(std::ostream& os, const TrackedEffects& te) { 411 SideEffectsTracker* t = te.tracker; 412 const char* separator = ""; 413 os << "["; 414 for (int bit = 0; bit < kNumberOfFlags; ++bit) { 415 GVNFlag flag = GVNFlagFromInt(bit); 416 if (te.effects.ContainsFlag(flag)) { 417 os << separator; 418 separator = ", "; 419 switch (flag) { 420 #define DECLARE_FLAG(Type) \ 421 case k##Type: \ 422 os << #Type; \ 423 break; 424 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) 425 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) 426 #undef DECLARE_FLAG 427 default: 428 break; 429 } 430 } 431 } 432 for (int index = 0; index < t->num_global_vars_; ++index) { 433 if (te.effects.ContainsSpecial(t->GlobalVar(index))) { 434 os << separator << "[" << *t->global_vars_[index].handle() << "]"; 435 separator = ", "; 436 } 437 } 438 for (int index = 0; index < t->num_inobject_fields_; ++index) { 439 if (te.effects.ContainsSpecial(t->InobjectField(index))) { 440 os << separator << t->inobject_fields_[index]; 441 separator = ", "; 442 } 443 } 444 os << "]"; 445 return os; 446 } 447 448 449 bool SideEffectsTracker::ComputeGlobalVar(Unique<PropertyCell> cell, 450 int* index) { 451 for (int i = 0; i < num_global_vars_; ++i) { 452 if (cell == global_vars_[i]) { 453 *index = i; 454 return true; 455 } 456 } 457 if (num_global_vars_ < kNumberOfGlobalVars) { 458 if (FLAG_trace_gvn) { 459 OFStream os(stdout); 460 os << "Tracking global var [" << *cell.handle() << "] " 461 << "(mapped to index " << num_global_vars_ << ")" << std::endl; 462 } 463 *index = num_global_vars_; 464 global_vars_[num_global_vars_++] = cell; 465 return true; 466 } 467 return false; 468 } 469 470 471 bool SideEffectsTracker::ComputeInobjectField(HObjectAccess access, 472 int* index) { 473 for (int i = 0; i < num_inobject_fields_; ++i) { 474 if (access.Equals(inobject_fields_[i])) { 475 *index = i; 476 return true; 477 } 478 } 479 if (num_inobject_fields_ < kNumberOfInobjectFields) { 480 if (FLAG_trace_gvn) { 481 OFStream os(stdout); 482 os << "Tracking inobject field access " << access << " (mapped to index " 483 << num_inobject_fields_ << ")" << std::endl; 484 } 485 *index = num_inobject_fields_; 486 inobject_fields_[num_inobject_fields_++] = access; 487 return true; 488 } 489 return false; 490 } 491 492 493 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph) 494 : HPhase("H_Global value numbering", graph), 495 removed_side_effects_(false), 496 block_side_effects_(graph->blocks()->length(), zone()), 497 loop_side_effects_(graph->blocks()->length(), zone()), 498 visited_on_paths_(graph->blocks()->length(), zone()) { 499 DCHECK(!AllowHandleAllocation::IsAllowed()); 500 block_side_effects_.AddBlock( 501 SideEffects(), graph->blocks()->length(), zone()); 502 loop_side_effects_.AddBlock( 503 SideEffects(), graph->blocks()->length(), zone()); 504 } 505 506 507 void HGlobalValueNumberingPhase::Run() { 508 DCHECK(!removed_side_effects_); 509 for (int i = FLAG_gvn_iterations; i > 0; --i) { 510 // Compute the side effects. 511 ComputeBlockSideEffects(); 512 513 // Perform loop invariant code motion if requested. 514 if (FLAG_loop_invariant_code_motion) LoopInvariantCodeMotion(); 515 516 // Perform the actual value numbering. 517 AnalyzeGraph(); 518 519 // Continue GVN if we removed any side effects. 520 if (!removed_side_effects_) break; 521 removed_side_effects_ = false; 522 523 // Clear all side effects. 524 DCHECK_EQ(block_side_effects_.length(), graph()->blocks()->length()); 525 DCHECK_EQ(loop_side_effects_.length(), graph()->blocks()->length()); 526 for (int i = 0; i < graph()->blocks()->length(); ++i) { 527 block_side_effects_[i].RemoveAll(); 528 loop_side_effects_[i].RemoveAll(); 529 } 530 visited_on_paths_.Clear(); 531 } 532 } 533 534 535 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() { 536 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { 537 // Compute side effects for the block. 538 HBasicBlock* block = graph()->blocks()->at(i); 539 SideEffects side_effects; 540 if (block->IsReachable() && !block->IsDeoptimizing()) { 541 int id = block->block_id(); 542 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 543 HInstruction* instr = it.Current(); 544 side_effects.Add(side_effects_tracker_.ComputeChanges(instr)); 545 } 546 block_side_effects_[id].Add(side_effects); 547 548 // Loop headers are part of their loop. 549 if (block->IsLoopHeader()) { 550 loop_side_effects_[id].Add(side_effects); 551 } 552 553 // Propagate loop side effects upwards. 554 if (block->HasParentLoopHeader()) { 555 HBasicBlock* with_parent = block; 556 if (block->IsLoopHeader()) side_effects = loop_side_effects_[id]; 557 do { 558 HBasicBlock* parent_block = with_parent->parent_loop_header(); 559 loop_side_effects_[parent_block->block_id()].Add(side_effects); 560 with_parent = parent_block; 561 } while (with_parent->HasParentLoopHeader()); 562 } 563 } 564 } 565 } 566 567 568 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() { 569 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n", 570 graph()->use_optimistic_licm() ? "yes" : "no"); 571 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { 572 HBasicBlock* block = graph()->blocks()->at(i); 573 if (block->IsLoopHeader()) { 574 SideEffects side_effects = loop_side_effects_[block->block_id()]; 575 if (FLAG_trace_gvn) { 576 OFStream os(stdout); 577 os << "Try loop invariant motion for " << *block << " changes " 578 << Print(side_effects) << std::endl; 579 } 580 HBasicBlock* last = block->loop_information()->GetLastBackEdge(); 581 for (int j = block->block_id(); j <= last->block_id(); ++j) { 582 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects); 583 } 584 } 585 } 586 } 587 588 589 void HGlobalValueNumberingPhase::ProcessLoopBlock( 590 HBasicBlock* block, 591 HBasicBlock* loop_header, 592 SideEffects loop_kills) { 593 HBasicBlock* pre_header = loop_header->predecessors()->at(0); 594 if (FLAG_trace_gvn) { 595 OFStream os(stdout); 596 os << "Loop invariant code motion for " << *block << " depends on " 597 << Print(loop_kills) << std::endl; 598 } 599 HInstruction* instr = block->first(); 600 while (instr != NULL) { 601 HInstruction* next = instr->next(); 602 if (instr->CheckFlag(HValue::kUseGVN)) { 603 SideEffects changes = side_effects_tracker_.ComputeChanges(instr); 604 SideEffects depends_on = side_effects_tracker_.ComputeDependsOn(instr); 605 if (FLAG_trace_gvn) { 606 OFStream os(stdout); 607 os << "Checking instruction i" << instr->id() << " (" 608 << instr->Mnemonic() << ") changes " << Print(changes) 609 << ", depends on " << Print(depends_on) << ". Loop changes " 610 << Print(loop_kills) << std::endl; 611 } 612 bool can_hoist = !depends_on.ContainsAnyOf(loop_kills); 613 if (can_hoist && !graph()->use_optimistic_licm()) { 614 can_hoist = block->IsLoopSuccessorDominator(); 615 } 616 617 if (can_hoist) { 618 bool inputs_loop_invariant = true; 619 for (int i = 0; i < instr->OperandCount(); ++i) { 620 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { 621 inputs_loop_invariant = false; 622 } 623 } 624 625 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { 626 TRACE_GVN_2("Hoisting loop invariant instruction i%d to block B%d\n", 627 instr->id(), pre_header->block_id()); 628 // Move the instruction out of the loop. 629 instr->Unlink(); 630 instr->InsertBefore(pre_header->end()); 631 if (instr->HasSideEffects()) removed_side_effects_ = true; 632 } 633 } 634 } 635 instr = next; 636 } 637 } 638 639 640 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr, 641 HBasicBlock* loop_header) { 642 // If we've disabled code motion or we're in a block that unconditionally 643 // deoptimizes, don't move any instructions. 644 return graph()->allow_code_motion() && !instr->block()->IsDeoptimizing() && 645 instr->block()->IsReachable(); 646 } 647 648 649 SideEffects 650 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock( 651 HBasicBlock* dominator, HBasicBlock* dominated) { 652 SideEffects side_effects; 653 for (int i = 0; i < dominated->predecessors()->length(); ++i) { 654 HBasicBlock* block = dominated->predecessors()->at(i); 655 if (dominator->block_id() < block->block_id() && 656 block->block_id() < dominated->block_id() && 657 !visited_on_paths_.Contains(block->block_id())) { 658 visited_on_paths_.Add(block->block_id()); 659 side_effects.Add(block_side_effects_[block->block_id()]); 660 if (block->IsLoopHeader()) { 661 side_effects.Add(loop_side_effects_[block->block_id()]); 662 } 663 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( 664 dominator, block)); 665 } 666 } 667 return side_effects; 668 } 669 670 671 // Each instance of this class is like a "stack frame" for the recursive 672 // traversal of the dominator tree done during GVN (the stack is handled 673 // as a double linked list). 674 // We reuse frames when possible so the list length is limited by the depth 675 // of the dominator tree but this forces us to initialize each frame calling 676 // an explicit "Initialize" method instead of a using constructor. 677 class GvnBasicBlockState: public ZoneObject { 678 public: 679 static GvnBasicBlockState* CreateEntry(Zone* zone, 680 HBasicBlock* entry_block, 681 HInstructionMap* entry_map) { 682 return new(zone) 683 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone); 684 } 685 686 HBasicBlock* block() { return block_; } 687 HInstructionMap* map() { return map_; } 688 HSideEffectMap* dominators() { return &dominators_; } 689 690 GvnBasicBlockState* next_in_dominator_tree_traversal( 691 Zone* zone, 692 HBasicBlock** dominator) { 693 // This assignment needs to happen before calling next_dominated() because 694 // that call can reuse "this" if we are at the last dominated block. 695 *dominator = block(); 696 GvnBasicBlockState* result = next_dominated(zone); 697 if (result == NULL) { 698 GvnBasicBlockState* dominator_state = pop(); 699 if (dominator_state != NULL) { 700 // This branch is guaranteed not to return NULL because pop() never 701 // returns a state where "is_done() == true". 702 *dominator = dominator_state->block(); 703 result = dominator_state->next_dominated(zone); 704 } else { 705 // Unnecessary (we are returning NULL) but done for cleanness. 706 *dominator = NULL; 707 } 708 } 709 return result; 710 } 711 712 private: 713 void Initialize(HBasicBlock* block, 714 HInstructionMap* map, 715 HSideEffectMap* dominators, 716 bool copy_map, 717 Zone* zone) { 718 block_ = block; 719 map_ = copy_map ? map->Copy(zone) : map; 720 dominated_index_ = -1; 721 length_ = block->dominated_blocks()->length(); 722 if (dominators != NULL) { 723 dominators_ = *dominators; 724 } 725 } 726 bool is_done() { return dominated_index_ >= length_; } 727 728 GvnBasicBlockState(GvnBasicBlockState* previous, 729 HBasicBlock* block, 730 HInstructionMap* map, 731 HSideEffectMap* dominators, 732 Zone* zone) 733 : previous_(previous), next_(NULL) { 734 Initialize(block, map, dominators, true, zone); 735 } 736 737 GvnBasicBlockState* next_dominated(Zone* zone) { 738 dominated_index_++; 739 if (dominated_index_ == length_ - 1) { 740 // No need to copy the map for the last child in the dominator tree. 741 Initialize(block_->dominated_blocks()->at(dominated_index_), 742 map(), 743 dominators(), 744 false, 745 zone); 746 return this; 747 } else if (dominated_index_ < length_) { 748 return push(zone, block_->dominated_blocks()->at(dominated_index_)); 749 } else { 750 return NULL; 751 } 752 } 753 754 GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) { 755 if (next_ == NULL) { 756 next_ = 757 new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone); 758 } else { 759 next_->Initialize(block, map(), dominators(), true, zone); 760 } 761 return next_; 762 } 763 GvnBasicBlockState* pop() { 764 GvnBasicBlockState* result = previous_; 765 while (result != NULL && result->is_done()) { 766 TRACE_GVN_2("Backtracking from block B%d to block b%d\n", 767 block()->block_id(), 768 previous_->block()->block_id()) 769 result = result->previous_; 770 } 771 return result; 772 } 773 774 GvnBasicBlockState* previous_; 775 GvnBasicBlockState* next_; 776 HBasicBlock* block_; 777 HInstructionMap* map_; 778 HSideEffectMap dominators_; 779 int dominated_index_; 780 int length_; 781 }; 782 783 784 // This is a recursive traversal of the dominator tree but it has been turned 785 // into a loop to avoid stack overflows. 786 // The logical "stack frames" of the recursion are kept in a list of 787 // GvnBasicBlockState instances. 788 void HGlobalValueNumberingPhase::AnalyzeGraph() { 789 HBasicBlock* entry_block = graph()->entry_block(); 790 HInstructionMap* entry_map = 791 new(zone()) HInstructionMap(zone(), &side_effects_tracker_); 792 GvnBasicBlockState* current = 793 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map); 794 795 while (current != NULL) { 796 HBasicBlock* block = current->block(); 797 HInstructionMap* map = current->map(); 798 HSideEffectMap* dominators = current->dominators(); 799 800 TRACE_GVN_2("Analyzing block B%d%s\n", 801 block->block_id(), 802 block->IsLoopHeader() ? " (loop header)" : ""); 803 804 // If this is a loop header kill everything killed by the loop. 805 if (block->IsLoopHeader()) { 806 map->Kill(loop_side_effects_[block->block_id()]); 807 dominators->Kill(loop_side_effects_[block->block_id()]); 808 } 809 810 // Go through all instructions of the current block. 811 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 812 HInstruction* instr = it.Current(); 813 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) { 814 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { 815 HValue* other = dominators->at(i); 816 GVNFlag flag = GVNFlagFromInt(i); 817 if (instr->DependsOnFlags().Contains(flag) && other != NULL) { 818 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n", 819 i, 820 instr->id(), 821 instr->Mnemonic(), 822 other->id(), 823 other->Mnemonic()); 824 if (instr->HandleSideEffectDominator(flag, other)) { 825 removed_side_effects_ = true; 826 } 827 } 828 } 829 } 830 // Instruction was unlinked during graph traversal. 831 if (!instr->IsLinked()) continue; 832 833 SideEffects changes = side_effects_tracker_.ComputeChanges(instr); 834 if (!changes.IsEmpty()) { 835 // Clear all instructions in the map that are affected by side effects. 836 // Store instruction as the dominating one for tracked side effects. 837 map->Kill(changes); 838 dominators->Store(changes, instr); 839 if (FLAG_trace_gvn) { 840 OFStream os(stdout); 841 os << "Instruction i" << instr->id() << " changes " << Print(changes) 842 << std::endl; 843 } 844 } 845 if (instr->CheckFlag(HValue::kUseGVN) && 846 !instr->CheckFlag(HValue::kCantBeReplaced)) { 847 DCHECK(!instr->HasObservableSideEffects()); 848 HInstruction* other = map->Lookup(instr); 849 if (other != NULL) { 850 DCHECK(instr->Equals(other) && other->Equals(instr)); 851 TRACE_GVN_4("Replacing instruction i%d (%s) with i%d (%s)\n", 852 instr->id(), 853 instr->Mnemonic(), 854 other->id(), 855 other->Mnemonic()); 856 if (instr->HasSideEffects()) removed_side_effects_ = true; 857 instr->DeleteAndReplaceWith(other); 858 } else { 859 map->Add(instr, zone()); 860 } 861 } 862 } 863 864 HBasicBlock* dominator_block; 865 GvnBasicBlockState* next = 866 current->next_in_dominator_tree_traversal(zone(), 867 &dominator_block); 868 869 if (next != NULL) { 870 HBasicBlock* dominated = next->block(); 871 HInstructionMap* successor_map = next->map(); 872 HSideEffectMap* successor_dominators = next->dominators(); 873 874 // Kill everything killed on any path between this block and the 875 // dominated block. We don't have to traverse these paths if the 876 // value map and the dominators list is already empty. If the range 877 // of block ids (block_id, dominated_id) is empty there are no such 878 // paths. 879 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) && 880 dominator_block->block_id() + 1 < dominated->block_id()) { 881 visited_on_paths_.Clear(); 882 SideEffects side_effects_on_all_paths = 883 CollectSideEffectsOnPathsToDominatedBlock(dominator_block, 884 dominated); 885 successor_map->Kill(side_effects_on_all_paths); 886 successor_dominators->Kill(side_effects_on_all_paths); 887 } 888 } 889 current = next; 890 } 891 } 892 893 } // namespace internal 894 } // namespace v8 895