1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "hydrogen.h" 29 #include "hydrogen-gvn.h" 30 #include "v8.h" 31 32 namespace v8 { 33 namespace internal { 34 35 class HValueMap: public ZoneObject { 36 public: 37 explicit HValueMap(Zone* zone) 38 : array_size_(0), 39 lists_size_(0), 40 count_(0), 41 present_flags_(0), 42 array_(NULL), 43 lists_(NULL), 44 free_list_head_(kNil) { 45 ResizeLists(kInitialSize, zone); 46 Resize(kInitialSize, zone); 47 } 48 49 void Kill(GVNFlagSet flags); 50 51 void Add(HValue* value, Zone* zone) { 52 present_flags_.Add(value->gvn_flags()); 53 Insert(value, zone); 54 } 55 56 HValue* Lookup(HValue* value) const; 57 58 HValueMap* Copy(Zone* zone) const { 59 return new(zone) HValueMap(zone, this); 60 } 61 62 bool IsEmpty() const { return count_ == 0; } 63 64 private: 65 // A linked list of HValue* values. Stored in arrays. 66 struct HValueMapListElement { 67 HValue* value; 68 int next; // Index in the array of the next list element. 69 }; 70 static const int kNil = -1; // The end of a linked list 71 72 // Must be a power of 2. 73 static const int kInitialSize = 16; 74 75 HValueMap(Zone* zone, const HValueMap* other); 76 77 void Resize(int new_size, Zone* zone); 78 void ResizeLists(int new_size, Zone* zone); 79 void Insert(HValue* value, Zone* zone); 80 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); } 81 82 int array_size_; 83 int lists_size_; 84 int count_; // The number of values stored in the HValueMap. 85 GVNFlagSet present_flags_; // All flags that are in any value in the 86 // HValueMap. 87 HValueMapListElement* array_; // Primary store - contains the first value 88 // with a given hash. Colliding elements are stored in linked lists. 89 HValueMapListElement* lists_; // The linked lists containing hash collisions. 90 int free_list_head_; // Unused elements in lists_ are on the free list. 91 }; 92 93 94 class HSideEffectMap BASE_EMBEDDED { 95 public: 96 HSideEffectMap(); 97 explicit HSideEffectMap(HSideEffectMap* other); 98 HSideEffectMap& operator= (const HSideEffectMap& other); 99 100 void Kill(GVNFlagSet flags); 101 102 void Store(GVNFlagSet flags, HInstruction* instr); 103 104 bool IsEmpty() const { return count_ == 0; } 105 106 inline HInstruction* operator[](int i) const { 107 ASSERT(0 <= i); 108 ASSERT(i < kNumberOfTrackedSideEffects); 109 return data_[i]; 110 } 111 inline HInstruction* at(int i) const { return operator[](i); } 112 113 private: 114 int count_; 115 HInstruction* data_[kNumberOfTrackedSideEffects]; 116 }; 117 118 119 void TraceGVN(const char* msg, ...) { 120 va_list arguments; 121 va_start(arguments, msg); 122 OS::VPrint(msg, arguments); 123 va_end(arguments); 124 } 125 126 127 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when 128 // --trace-gvn is off. 129 #define TRACE_GVN_1(msg, a1) \ 130 if (FLAG_trace_gvn) { \ 131 TraceGVN(msg, a1); \ 132 } 133 134 #define TRACE_GVN_2(msg, a1, a2) \ 135 if (FLAG_trace_gvn) { \ 136 TraceGVN(msg, a1, a2); \ 137 } 138 139 #define TRACE_GVN_3(msg, a1, a2, a3) \ 140 if (FLAG_trace_gvn) { \ 141 TraceGVN(msg, a1, a2, a3); \ 142 } 143 144 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \ 145 if (FLAG_trace_gvn) { \ 146 TraceGVN(msg, a1, a2, a3, a4); \ 147 } 148 149 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \ 150 if (FLAG_trace_gvn) { \ 151 TraceGVN(msg, a1, a2, a3, a4, a5); \ 152 } 153 154 155 HValueMap::HValueMap(Zone* zone, const HValueMap* other) 156 : array_size_(other->array_size_), 157 lists_size_(other->lists_size_), 158 count_(other->count_), 159 present_flags_(other->present_flags_), 160 array_(zone->NewArray<HValueMapListElement>(other->array_size_)), 161 lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)), 162 free_list_head_(other->free_list_head_) { 163 OS::MemCopy( 164 array_, other->array_, array_size_ * sizeof(HValueMapListElement)); 165 OS::MemCopy( 166 lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement)); 167 } 168 169 170 void HValueMap::Kill(GVNFlagSet flags) { 171 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags); 172 if (!present_flags_.ContainsAnyOf(depends_flags)) return; 173 present_flags_.RemoveAll(); 174 for (int i = 0; i < array_size_; ++i) { 175 HValue* value = array_[i].value; 176 if (value != NULL) { 177 // Clear list of collisions first, so we know if it becomes empty. 178 int kept = kNil; // List of kept elements. 179 int next; 180 for (int current = array_[i].next; current != kNil; current = next) { 181 next = lists_[current].next; 182 HValue* value = lists_[current].value; 183 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { 184 // Drop it. 185 count_--; 186 lists_[current].next = free_list_head_; 187 free_list_head_ = current; 188 } else { 189 // Keep it. 190 lists_[current].next = kept; 191 kept = current; 192 present_flags_.Add(value->gvn_flags()); 193 } 194 } 195 array_[i].next = kept; 196 197 // Now possibly drop directly indexed element. 198 value = array_[i].value; 199 if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it. 200 count_--; 201 int head = array_[i].next; 202 if (head == kNil) { 203 array_[i].value = NULL; 204 } else { 205 array_[i].value = lists_[head].value; 206 array_[i].next = lists_[head].next; 207 lists_[head].next = free_list_head_; 208 free_list_head_ = head; 209 } 210 } else { 211 present_flags_.Add(value->gvn_flags()); // Keep it. 212 } 213 } 214 } 215 } 216 217 218 HValue* HValueMap::Lookup(HValue* value) const { 219 uint32_t hash = static_cast<uint32_t>(value->Hashcode()); 220 uint32_t pos = Bound(hash); 221 if (array_[pos].value != NULL) { 222 if (array_[pos].value->Equals(value)) return array_[pos].value; 223 int next = array_[pos].next; 224 while (next != kNil) { 225 if (lists_[next].value->Equals(value)) return lists_[next].value; 226 next = lists_[next].next; 227 } 228 } 229 return NULL; 230 } 231 232 233 void HValueMap::Resize(int new_size, Zone* zone) { 234 ASSERT(new_size > count_); 235 // Hashing the values into the new array has no more collisions than in the 236 // old hash map, so we can use the existing lists_ array, if we are careful. 237 238 // Make sure we have at least one free element. 239 if (free_list_head_ == kNil) { 240 ResizeLists(lists_size_ << 1, zone); 241 } 242 243 HValueMapListElement* new_array = 244 zone->NewArray<HValueMapListElement>(new_size); 245 memset(new_array, 0, sizeof(HValueMapListElement) * new_size); 246 247 HValueMapListElement* old_array = array_; 248 int old_size = array_size_; 249 250 int old_count = count_; 251 count_ = 0; 252 // Do not modify present_flags_. It is currently correct. 253 array_size_ = new_size; 254 array_ = new_array; 255 256 if (old_array != NULL) { 257 // Iterate over all the elements in lists, rehashing them. 258 for (int i = 0; i < old_size; ++i) { 259 if (old_array[i].value != NULL) { 260 int current = old_array[i].next; 261 while (current != kNil) { 262 Insert(lists_[current].value, zone); 263 int next = lists_[current].next; 264 lists_[current].next = free_list_head_; 265 free_list_head_ = current; 266 current = next; 267 } 268 // Rehash the directly stored value. 269 Insert(old_array[i].value, zone); 270 } 271 } 272 } 273 USE(old_count); 274 ASSERT(count_ == old_count); 275 } 276 277 278 void HValueMap::ResizeLists(int new_size, Zone* zone) { 279 ASSERT(new_size > lists_size_); 280 281 HValueMapListElement* new_lists = 282 zone->NewArray<HValueMapListElement>(new_size); 283 memset(new_lists, 0, sizeof(HValueMapListElement) * new_size); 284 285 HValueMapListElement* old_lists = lists_; 286 int old_size = lists_size_; 287 288 lists_size_ = new_size; 289 lists_ = new_lists; 290 291 if (old_lists != NULL) { 292 OS::MemCopy(lists_, old_lists, old_size * sizeof(HValueMapListElement)); 293 } 294 for (int i = old_size; i < lists_size_; ++i) { 295 lists_[i].next = free_list_head_; 296 free_list_head_ = i; 297 } 298 } 299 300 301 void HValueMap::Insert(HValue* value, Zone* zone) { 302 ASSERT(value != NULL); 303 // Resizing when half of the hashtable is filled up. 304 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone); 305 ASSERT(count_ < array_size_); 306 count_++; 307 uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode())); 308 if (array_[pos].value == NULL) { 309 array_[pos].value = value; 310 array_[pos].next = kNil; 311 } else { 312 if (free_list_head_ == kNil) { 313 ResizeLists(lists_size_ << 1, zone); 314 } 315 int new_element_pos = free_list_head_; 316 ASSERT(new_element_pos != kNil); 317 free_list_head_ = lists_[free_list_head_].next; 318 lists_[new_element_pos].value = value; 319 lists_[new_element_pos].next = array_[pos].next; 320 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL); 321 array_[pos].next = new_element_pos; 322 } 323 } 324 325 326 HSideEffectMap::HSideEffectMap() : count_(0) { 327 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize); 328 } 329 330 331 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) { 332 *this = *other; // Calls operator=. 333 } 334 335 336 HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) { 337 if (this != &other) { 338 OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize); 339 } 340 return *this; 341 } 342 343 344 void HSideEffectMap::Kill(GVNFlagSet flags) { 345 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { 346 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); 347 if (flags.Contains(changes_flag)) { 348 if (data_[i] != NULL) count_--; 349 data_[i] = NULL; 350 } 351 } 352 } 353 354 355 void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) { 356 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { 357 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); 358 if (flags.Contains(changes_flag)) { 359 if (data_[i] == NULL) count_++; 360 data_[i] = instr; 361 } 362 } 363 } 364 365 366 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph) 367 : HPhase("H_Global value numbering", graph), 368 removed_side_effects_(false), 369 block_side_effects_(graph->blocks()->length(), zone()), 370 loop_side_effects_(graph->blocks()->length(), zone()), 371 visited_on_paths_(graph->blocks()->length(), zone()) { 372 ASSERT(!AllowHandleAllocation::IsAllowed()); 373 block_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(), 374 zone()); 375 loop_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(), 376 zone()); 377 } 378 379 void HGlobalValueNumberingPhase::Analyze() { 380 removed_side_effects_ = false; 381 ComputeBlockSideEffects(); 382 if (FLAG_loop_invariant_code_motion) { 383 LoopInvariantCodeMotion(); 384 } 385 AnalyzeGraph(); 386 } 387 388 389 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() { 390 // The Analyze phase of GVN can be called multiple times. Clear loop side 391 // effects before computing them to erase the contents from previous Analyze 392 // passes. 393 for (int i = 0; i < loop_side_effects_.length(); ++i) { 394 loop_side_effects_[i].RemoveAll(); 395 } 396 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { 397 // Compute side effects for the block. 398 HBasicBlock* block = graph()->blocks()->at(i); 399 GVNFlagSet side_effects; 400 if (block->IsReachable() && !block->IsDeoptimizing()) { 401 int id = block->block_id(); 402 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 403 HInstruction* instr = it.Current(); 404 side_effects.Add(instr->ChangesFlags()); 405 } 406 block_side_effects_[id].Add(side_effects); 407 408 // Loop headers are part of their loop. 409 if (block->IsLoopHeader()) { 410 loop_side_effects_[id].Add(side_effects); 411 } 412 413 // Propagate loop side effects upwards. 414 if (block->HasParentLoopHeader()) { 415 HBasicBlock* with_parent = block; 416 if (block->IsLoopHeader()) side_effects = loop_side_effects_[id]; 417 do { 418 HBasicBlock* parent_block = with_parent->parent_loop_header(); 419 loop_side_effects_[parent_block->block_id()].Add(side_effects); 420 with_parent = parent_block; 421 } while (with_parent->HasParentLoopHeader()); 422 } 423 } 424 } 425 } 426 427 428 SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) { 429 char underlying_buffer[kLastFlag * 128]; 430 Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer)); 431 #if DEBUG 432 int offset = 0; 433 const char* separator = ""; 434 const char* comma = ", "; 435 buffer[0] = 0; 436 uint32_t set_depends_on = 0; 437 uint32_t set_changes = 0; 438 for (int bit = 0; bit < kLastFlag; ++bit) { 439 if (flags.Contains(static_cast<GVNFlag>(bit))) { 440 if (bit % 2 == 0) { 441 set_changes++; 442 } else { 443 set_depends_on++; 444 } 445 } 446 } 447 bool positive_changes = set_changes < (kLastFlag / 2); 448 bool positive_depends_on = set_depends_on < (kLastFlag / 2); 449 if (set_changes > 0) { 450 if (positive_changes) { 451 offset += OS::SNPrintF(buffer + offset, "changes ["); 452 } else { 453 offset += OS::SNPrintF(buffer + offset, "changes all except ["); 454 } 455 for (int bit = 0; bit < kLastFlag; ++bit) { 456 if (flags.Contains(static_cast<GVNFlag>(bit)) == positive_changes) { 457 switch (static_cast<GVNFlag>(bit)) { 458 #define DECLARE_FLAG(type) \ 459 case kChanges##type: \ 460 offset += OS::SNPrintF(buffer + offset, separator); \ 461 offset += OS::SNPrintF(buffer + offset, #type); \ 462 separator = comma; \ 463 break; 464 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) 465 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) 466 #undef DECLARE_FLAG 467 default: 468 break; 469 } 470 } 471 } 472 offset += OS::SNPrintF(buffer + offset, "]"); 473 } 474 if (set_depends_on > 0) { 475 separator = ""; 476 if (set_changes > 0) { 477 offset += OS::SNPrintF(buffer + offset, ", "); 478 } 479 if (positive_depends_on) { 480 offset += OS::SNPrintF(buffer + offset, "depends on ["); 481 } else { 482 offset += OS::SNPrintF(buffer + offset, "depends on all except ["); 483 } 484 for (int bit = 0; bit < kLastFlag; ++bit) { 485 if (flags.Contains(static_cast<GVNFlag>(bit)) == positive_depends_on) { 486 switch (static_cast<GVNFlag>(bit)) { 487 #define DECLARE_FLAG(type) \ 488 case kDependsOn##type: \ 489 offset += OS::SNPrintF(buffer + offset, separator); \ 490 offset += OS::SNPrintF(buffer + offset, #type); \ 491 separator = comma; \ 492 break; 493 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) 494 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) 495 #undef DECLARE_FLAG 496 default: 497 break; 498 } 499 } 500 } 501 offset += OS::SNPrintF(buffer + offset, "]"); 502 } 503 #else 504 OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral()); 505 #endif 506 size_t string_len = strlen(underlying_buffer) + 1; 507 ASSERT(string_len <= sizeof(underlying_buffer)); 508 char* result = new char[strlen(underlying_buffer) + 1]; 509 OS::MemCopy(result, underlying_buffer, string_len); 510 return SmartArrayPointer<char>(result); 511 } 512 513 514 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() { 515 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n", 516 graph()->use_optimistic_licm() ? "yes" : "no"); 517 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { 518 HBasicBlock* block = graph()->blocks()->at(i); 519 if (block->IsLoopHeader()) { 520 GVNFlagSet side_effects = loop_side_effects_[block->block_id()]; 521 TRACE_GVN_2("Try loop invariant motion for block B%d %s\n", 522 block->block_id(), 523 *GetGVNFlagsString(side_effects)); 524 525 GVNFlagSet accumulated_first_time_depends; 526 GVNFlagSet accumulated_first_time_changes; 527 HBasicBlock* last = block->loop_information()->GetLastBackEdge(); 528 for (int j = block->block_id(); j <= last->block_id(); ++j) { 529 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects, 530 &accumulated_first_time_depends, 531 &accumulated_first_time_changes); 532 } 533 } 534 } 535 } 536 537 538 void HGlobalValueNumberingPhase::ProcessLoopBlock( 539 HBasicBlock* block, 540 HBasicBlock* loop_header, 541 GVNFlagSet loop_kills, 542 GVNFlagSet* first_time_depends, 543 GVNFlagSet* first_time_changes) { 544 HBasicBlock* pre_header = loop_header->predecessors()->at(0); 545 GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills); 546 TRACE_GVN_2("Loop invariant motion for B%d %s\n", 547 block->block_id(), 548 *GetGVNFlagsString(depends_flags)); 549 HInstruction* instr = block->first(); 550 while (instr != NULL) { 551 HInstruction* next = instr->next(); 552 bool hoisted = false; 553 if (instr->CheckFlag(HValue::kUseGVN)) { 554 TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n", 555 instr->id(), 556 instr->Mnemonic(), 557 *GetGVNFlagsString(instr->gvn_flags()), 558 *GetGVNFlagsString(loop_kills)); 559 bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags); 560 if (can_hoist && !graph()->use_optimistic_licm()) { 561 can_hoist = block->IsLoopSuccessorDominator(); 562 } 563 564 if (can_hoist) { 565 bool inputs_loop_invariant = true; 566 for (int i = 0; i < instr->OperandCount(); ++i) { 567 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { 568 inputs_loop_invariant = false; 569 } 570 } 571 572 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { 573 TRACE_GVN_2("Hoisting loop invariant instruction i%d to block B%d\n", 574 instr->id(), pre_header->block_id()); 575 // Move the instruction out of the loop. 576 instr->Unlink(); 577 instr->InsertBefore(pre_header->end()); 578 if (instr->HasSideEffects()) removed_side_effects_ = true; 579 hoisted = true; 580 } 581 } 582 } 583 if (!hoisted) { 584 // If an instruction is not hoisted, we have to account for its side 585 // effects when hoisting later HTransitionElementsKind instructions. 586 GVNFlagSet previous_depends = *first_time_depends; 587 GVNFlagSet previous_changes = *first_time_changes; 588 first_time_depends->Add(instr->DependsOnFlags()); 589 first_time_changes->Add(instr->ChangesFlags()); 590 if (!(previous_depends == *first_time_depends)) { 591 TRACE_GVN_1("Updated first-time accumulated %s\n", 592 *GetGVNFlagsString(*first_time_depends)); 593 } 594 if (!(previous_changes == *first_time_changes)) { 595 TRACE_GVN_1("Updated first-time accumulated %s\n", 596 *GetGVNFlagsString(*first_time_changes)); 597 } 598 } 599 instr = next; 600 } 601 } 602 603 604 bool HGlobalValueNumberingPhase::AllowCodeMotion() { 605 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count; 606 } 607 608 609 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr, 610 HBasicBlock* loop_header) { 611 // If we've disabled code motion or we're in a block that unconditionally 612 // deoptimizes, don't move any instructions. 613 return AllowCodeMotion() && !instr->block()->IsDeoptimizing() && 614 instr->block()->IsReachable(); 615 } 616 617 618 GVNFlagSet 619 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock( 620 HBasicBlock* dominator, HBasicBlock* dominated) { 621 GVNFlagSet side_effects; 622 for (int i = 0; i < dominated->predecessors()->length(); ++i) { 623 HBasicBlock* block = dominated->predecessors()->at(i); 624 if (dominator->block_id() < block->block_id() && 625 block->block_id() < dominated->block_id() && 626 !visited_on_paths_.Contains(block->block_id())) { 627 visited_on_paths_.Add(block->block_id()); 628 side_effects.Add(block_side_effects_[block->block_id()]); 629 if (block->IsLoopHeader()) { 630 side_effects.Add(loop_side_effects_[block->block_id()]); 631 } 632 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( 633 dominator, block)); 634 } 635 } 636 return side_effects; 637 } 638 639 640 // Each instance of this class is like a "stack frame" for the recursive 641 // traversal of the dominator tree done during GVN (the stack is handled 642 // as a double linked list). 643 // We reuse frames when possible so the list length is limited by the depth 644 // of the dominator tree but this forces us to initialize each frame calling 645 // an explicit "Initialize" method instead of a using constructor. 646 class GvnBasicBlockState: public ZoneObject { 647 public: 648 static GvnBasicBlockState* CreateEntry(Zone* zone, 649 HBasicBlock* entry_block, 650 HValueMap* entry_map) { 651 return new(zone) 652 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone); 653 } 654 655 HBasicBlock* block() { return block_; } 656 HValueMap* map() { return map_; } 657 HSideEffectMap* dominators() { return &dominators_; } 658 659 GvnBasicBlockState* next_in_dominator_tree_traversal( 660 Zone* zone, 661 HBasicBlock** dominator) { 662 // This assignment needs to happen before calling next_dominated() because 663 // that call can reuse "this" if we are at the last dominated block. 664 *dominator = block(); 665 GvnBasicBlockState* result = next_dominated(zone); 666 if (result == NULL) { 667 GvnBasicBlockState* dominator_state = pop(); 668 if (dominator_state != NULL) { 669 // This branch is guaranteed not to return NULL because pop() never 670 // returns a state where "is_done() == true". 671 *dominator = dominator_state->block(); 672 result = dominator_state->next_dominated(zone); 673 } else { 674 // Unnecessary (we are returning NULL) but done for cleanness. 675 *dominator = NULL; 676 } 677 } 678 return result; 679 } 680 681 private: 682 void Initialize(HBasicBlock* block, 683 HValueMap* map, 684 HSideEffectMap* dominators, 685 bool copy_map, 686 Zone* zone) { 687 block_ = block; 688 map_ = copy_map ? map->Copy(zone) : map; 689 dominated_index_ = -1; 690 length_ = block->dominated_blocks()->length(); 691 if (dominators != NULL) { 692 dominators_ = *dominators; 693 } 694 } 695 bool is_done() { return dominated_index_ >= length_; } 696 697 GvnBasicBlockState(GvnBasicBlockState* previous, 698 HBasicBlock* block, 699 HValueMap* map, 700 HSideEffectMap* dominators, 701 Zone* zone) 702 : previous_(previous), next_(NULL) { 703 Initialize(block, map, dominators, true, zone); 704 } 705 706 GvnBasicBlockState* next_dominated(Zone* zone) { 707 dominated_index_++; 708 if (dominated_index_ == length_ - 1) { 709 // No need to copy the map for the last child in the dominator tree. 710 Initialize(block_->dominated_blocks()->at(dominated_index_), 711 map(), 712 dominators(), 713 false, 714 zone); 715 return this; 716 } else if (dominated_index_ < length_) { 717 return push(zone, block_->dominated_blocks()->at(dominated_index_)); 718 } else { 719 return NULL; 720 } 721 } 722 723 GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) { 724 if (next_ == NULL) { 725 next_ = 726 new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone); 727 } else { 728 next_->Initialize(block, map(), dominators(), true, zone); 729 } 730 return next_; 731 } 732 GvnBasicBlockState* pop() { 733 GvnBasicBlockState* result = previous_; 734 while (result != NULL && result->is_done()) { 735 TRACE_GVN_2("Backtracking from block B%d to block b%d\n", 736 block()->block_id(), 737 previous_->block()->block_id()) 738 result = result->previous_; 739 } 740 return result; 741 } 742 743 GvnBasicBlockState* previous_; 744 GvnBasicBlockState* next_; 745 HBasicBlock* block_; 746 HValueMap* map_; 747 HSideEffectMap dominators_; 748 int dominated_index_; 749 int length_; 750 }; 751 752 753 // This is a recursive traversal of the dominator tree but it has been turned 754 // into a loop to avoid stack overflows. 755 // The logical "stack frames" of the recursion are kept in a list of 756 // GvnBasicBlockState instances. 757 void HGlobalValueNumberingPhase::AnalyzeGraph() { 758 HBasicBlock* entry_block = graph()->entry_block(); 759 HValueMap* entry_map = new(zone()) HValueMap(zone()); 760 GvnBasicBlockState* current = 761 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map); 762 763 while (current != NULL) { 764 HBasicBlock* block = current->block(); 765 HValueMap* map = current->map(); 766 HSideEffectMap* dominators = current->dominators(); 767 768 TRACE_GVN_2("Analyzing block B%d%s\n", 769 block->block_id(), 770 block->IsLoopHeader() ? " (loop header)" : ""); 771 772 // If this is a loop header kill everything killed by the loop. 773 if (block->IsLoopHeader()) { 774 map->Kill(loop_side_effects_[block->block_id()]); 775 dominators->Kill(loop_side_effects_[block->block_id()]); 776 } 777 778 // Go through all instructions of the current block. 779 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 780 HInstruction* instr = it.Current(); 781 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) { 782 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { 783 HValue* other = dominators->at(i); 784 GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); 785 GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i); 786 if (instr->DependsOnFlags().Contains(depends_on_flag) && 787 (other != NULL)) { 788 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n", 789 i, 790 instr->id(), 791 instr->Mnemonic(), 792 other->id(), 793 other->Mnemonic()); 794 instr->HandleSideEffectDominator(changes_flag, other); 795 } 796 } 797 } 798 // Instruction was unlinked during graph traversal. 799 if (!instr->IsLinked()) continue; 800 801 GVNFlagSet flags = instr->ChangesFlags(); 802 if (!flags.IsEmpty()) { 803 // Clear all instructions in the map that are affected by side effects. 804 // Store instruction as the dominating one for tracked side effects. 805 map->Kill(flags); 806 dominators->Store(flags, instr); 807 TRACE_GVN_2("Instruction %d %s\n", instr->id(), 808 *GetGVNFlagsString(flags)); 809 } 810 if (instr->CheckFlag(HValue::kUseGVN)) { 811 ASSERT(!instr->HasObservableSideEffects()); 812 HValue* other = map->Lookup(instr); 813 if (other != NULL) { 814 ASSERT(instr->Equals(other) && other->Equals(instr)); 815 TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n", 816 instr->id(), 817 instr->Mnemonic(), 818 other->id(), 819 other->Mnemonic()); 820 if (instr->HasSideEffects()) removed_side_effects_ = true; 821 instr->DeleteAndReplaceWith(other); 822 } else { 823 map->Add(instr, zone()); 824 } 825 } 826 } 827 828 HBasicBlock* dominator_block; 829 GvnBasicBlockState* next = 830 current->next_in_dominator_tree_traversal(zone(), 831 &dominator_block); 832 833 if (next != NULL) { 834 HBasicBlock* dominated = next->block(); 835 HValueMap* successor_map = next->map(); 836 HSideEffectMap* successor_dominators = next->dominators(); 837 838 // Kill everything killed on any path between this block and the 839 // dominated block. We don't have to traverse these paths if the 840 // value map and the dominators list is already empty. If the range 841 // of block ids (block_id, dominated_id) is empty there are no such 842 // paths. 843 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) && 844 dominator_block->block_id() + 1 < dominated->block_id()) { 845 visited_on_paths_.Clear(); 846 GVNFlagSet side_effects_on_all_paths = 847 CollectSideEffectsOnPathsToDominatedBlock(dominator_block, 848 dominated); 849 successor_map->Kill(side_effects_on_all_paths); 850 successor_dominators->Kill(side_effects_on_all_paths); 851 } 852 } 853 current = next; 854 } 855 } 856 857 } } // namespace v8::internal 858