1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "double.h" 31 #include "factory.h" 32 #include "hydrogen-infer-representation.h" 33 34 #if V8_TARGET_ARCH_IA32 35 #include "ia32/lithium-ia32.h" 36 #elif V8_TARGET_ARCH_X64 37 #include "x64/lithium-x64.h" 38 #elif V8_TARGET_ARCH_ARM 39 #include "arm/lithium-arm.h" 40 #elif V8_TARGET_ARCH_MIPS 41 #include "mips/lithium-mips.h" 42 #else 43 #error Unsupported target architecture. 44 #endif 45 46 namespace v8 { 47 namespace internal { 48 49 #define DEFINE_COMPILE(type) \ 50 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \ 51 return builder->Do##type(this); \ 52 } 53 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) 54 #undef DEFINE_COMPILE 55 56 57 int HValue::LoopWeight() const { 58 const int w = FLAG_loop_weight; 59 static const int weights[] = { 1, w, w*w, w*w*w, w*w*w*w }; 60 return weights[Min(block()->LoopNestingDepth(), 61 static_cast<int>(ARRAY_SIZE(weights)-1))]; 62 } 63 64 65 Isolate* HValue::isolate() const { 66 ASSERT(block() != NULL); 67 return block()->isolate(); 68 } 69 70 71 void HValue::AssumeRepresentation(Representation r) { 72 if (CheckFlag(kFlexibleRepresentation)) { 73 ChangeRepresentation(r); 74 // The representation of the value is dictated by type feedback and 75 // will not be changed later. 76 ClearFlag(kFlexibleRepresentation); 77 } 78 } 79 80 81 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) { 82 ASSERT(CheckFlag(kFlexibleRepresentation)); 83 Representation new_rep = RepresentationFromInputs(); 84 UpdateRepresentation(new_rep, h_infer, "inputs"); 85 new_rep = RepresentationFromUses(); 86 UpdateRepresentation(new_rep, h_infer, "uses"); 87 if (representation().IsSmi() && HasNonSmiUse()) { 88 UpdateRepresentation( 89 Representation::Integer32(), h_infer, "use requirements"); 90 } 91 } 92 93 94 Representation HValue::RepresentationFromUses() { 95 if (HasNoUses()) return Representation::None(); 96 97 // Array of use counts for each representation. 98 int use_count[Representation::kNumRepresentations] = { 0 }; 99 100 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 101 HValue* use = it.value(); 102 Representation rep = use->observed_input_representation(it.index()); 103 if (rep.IsNone()) continue; 104 if (FLAG_trace_representation) { 105 PrintF("#%d %s is used by #%d %s as %s%s\n", 106 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(), 107 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : "")); 108 } 109 use_count[rep.kind()] += use->LoopWeight(); 110 } 111 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]); 112 int tagged_count = use_count[Representation::kTagged]; 113 int double_count = use_count[Representation::kDouble]; 114 int int32_count = use_count[Representation::kInteger32]; 115 int smi_count = use_count[Representation::kSmi]; 116 117 if (tagged_count > 0) return Representation::Tagged(); 118 if (double_count > 0) return Representation::Double(); 119 if (int32_count > 0) return Representation::Integer32(); 120 if (smi_count > 0) return Representation::Smi(); 121 122 return Representation::None(); 123 } 124 125 126 void HValue::UpdateRepresentation(Representation new_rep, 127 HInferRepresentationPhase* h_infer, 128 const char* reason) { 129 Representation r = representation(); 130 if (new_rep.is_more_general_than(r)) { 131 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return; 132 if (FLAG_trace_representation) { 133 PrintF("Changing #%d %s representation %s -> %s based on %s\n", 134 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason); 135 } 136 ChangeRepresentation(new_rep); 137 AddDependantsToWorklist(h_infer); 138 } 139 } 140 141 142 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) { 143 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 144 h_infer->AddToWorklist(it.value()); 145 } 146 for (int i = 0; i < OperandCount(); ++i) { 147 h_infer->AddToWorklist(OperandAt(i)); 148 } 149 } 150 151 152 static int32_t ConvertAndSetOverflow(Representation r, 153 int64_t result, 154 bool* overflow) { 155 if (r.IsSmi()) { 156 if (result > Smi::kMaxValue) { 157 *overflow = true; 158 return Smi::kMaxValue; 159 } 160 if (result < Smi::kMinValue) { 161 *overflow = true; 162 return Smi::kMinValue; 163 } 164 } else { 165 if (result > kMaxInt) { 166 *overflow = true; 167 return kMaxInt; 168 } 169 if (result < kMinInt) { 170 *overflow = true; 171 return kMinInt; 172 } 173 } 174 return static_cast<int32_t>(result); 175 } 176 177 178 static int32_t AddWithoutOverflow(Representation r, 179 int32_t a, 180 int32_t b, 181 bool* overflow) { 182 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b); 183 return ConvertAndSetOverflow(r, result, overflow); 184 } 185 186 187 static int32_t SubWithoutOverflow(Representation r, 188 int32_t a, 189 int32_t b, 190 bool* overflow) { 191 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b); 192 return ConvertAndSetOverflow(r, result, overflow); 193 } 194 195 196 static int32_t MulWithoutOverflow(const Representation& r, 197 int32_t a, 198 int32_t b, 199 bool* overflow) { 200 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b); 201 return ConvertAndSetOverflow(r, result, overflow); 202 } 203 204 205 int32_t Range::Mask() const { 206 if (lower_ == upper_) return lower_; 207 if (lower_ >= 0) { 208 int32_t res = 1; 209 while (res < upper_) { 210 res = (res << 1) | 1; 211 } 212 return res; 213 } 214 return 0xffffffff; 215 } 216 217 218 void Range::AddConstant(int32_t value) { 219 if (value == 0) return; 220 bool may_overflow = false; // Overflow is ignored here. 221 Representation r = Representation::Integer32(); 222 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow); 223 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow); 224 #ifdef DEBUG 225 Verify(); 226 #endif 227 } 228 229 230 void Range::Intersect(Range* other) { 231 upper_ = Min(upper_, other->upper_); 232 lower_ = Max(lower_, other->lower_); 233 bool b = CanBeMinusZero() && other->CanBeMinusZero(); 234 set_can_be_minus_zero(b); 235 } 236 237 238 void Range::Union(Range* other) { 239 upper_ = Max(upper_, other->upper_); 240 lower_ = Min(lower_, other->lower_); 241 bool b = CanBeMinusZero() || other->CanBeMinusZero(); 242 set_can_be_minus_zero(b); 243 } 244 245 246 void Range::CombinedMax(Range* other) { 247 upper_ = Max(upper_, other->upper_); 248 lower_ = Max(lower_, other->lower_); 249 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero()); 250 } 251 252 253 void Range::CombinedMin(Range* other) { 254 upper_ = Min(upper_, other->upper_); 255 lower_ = Min(lower_, other->lower_); 256 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero()); 257 } 258 259 260 void Range::Sar(int32_t value) { 261 int32_t bits = value & 0x1F; 262 lower_ = lower_ >> bits; 263 upper_ = upper_ >> bits; 264 set_can_be_minus_zero(false); 265 } 266 267 268 void Range::Shl(int32_t value) { 269 int32_t bits = value & 0x1F; 270 int old_lower = lower_; 271 int old_upper = upper_; 272 lower_ = lower_ << bits; 273 upper_ = upper_ << bits; 274 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) { 275 upper_ = kMaxInt; 276 lower_ = kMinInt; 277 } 278 set_can_be_minus_zero(false); 279 } 280 281 282 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) { 283 bool may_overflow = false; 284 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow); 285 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow); 286 KeepOrder(); 287 #ifdef DEBUG 288 Verify(); 289 #endif 290 return may_overflow; 291 } 292 293 294 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) { 295 bool may_overflow = false; 296 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow); 297 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow); 298 KeepOrder(); 299 #ifdef DEBUG 300 Verify(); 301 #endif 302 return may_overflow; 303 } 304 305 306 void Range::KeepOrder() { 307 if (lower_ > upper_) { 308 int32_t tmp = lower_; 309 lower_ = upper_; 310 upper_ = tmp; 311 } 312 } 313 314 315 #ifdef DEBUG 316 void Range::Verify() const { 317 ASSERT(lower_ <= upper_); 318 } 319 #endif 320 321 322 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) { 323 bool may_overflow = false; 324 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow); 325 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow); 326 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow); 327 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow); 328 lower_ = Min(Min(v1, v2), Min(v3, v4)); 329 upper_ = Max(Max(v1, v2), Max(v3, v4)); 330 #ifdef DEBUG 331 Verify(); 332 #endif 333 return may_overflow; 334 } 335 336 337 const char* HType::ToString() { 338 // Note: The c1visualizer syntax for locals allows only a sequence of the 339 // following characters: A-Za-z0-9_-|: 340 switch (type_) { 341 case kNone: return "none"; 342 case kTagged: return "tagged"; 343 case kTaggedPrimitive: return "primitive"; 344 case kTaggedNumber: return "number"; 345 case kSmi: return "smi"; 346 case kHeapNumber: return "heap-number"; 347 case kString: return "string"; 348 case kBoolean: return "boolean"; 349 case kNonPrimitive: return "non-primitive"; 350 case kJSArray: return "array"; 351 case kJSObject: return "object"; 352 } 353 UNREACHABLE(); 354 return "unreachable"; 355 } 356 357 358 HType HType::TypeFromValue(Handle<Object> value) { 359 HType result = HType::Tagged(); 360 if (value->IsSmi()) { 361 result = HType::Smi(); 362 } else if (value->IsHeapNumber()) { 363 result = HType::HeapNumber(); 364 } else if (value->IsString()) { 365 result = HType::String(); 366 } else if (value->IsBoolean()) { 367 result = HType::Boolean(); 368 } else if (value->IsJSObject()) { 369 result = HType::JSObject(); 370 } else if (value->IsJSArray()) { 371 result = HType::JSArray(); 372 } 373 return result; 374 } 375 376 377 bool HValue::IsDefinedAfter(HBasicBlock* other) const { 378 return block()->block_id() > other->block_id(); 379 } 380 381 382 HUseListNode* HUseListNode::tail() { 383 // Skip and remove dead items in the use list. 384 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) { 385 tail_ = tail_->tail_; 386 } 387 return tail_; 388 } 389 390 391 bool HValue::CheckUsesForFlag(Flag f) const { 392 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 393 if (it.value()->IsSimulate()) continue; 394 if (!it.value()->CheckFlag(f)) return false; 395 } 396 return true; 397 } 398 399 400 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const { 401 bool return_value = false; 402 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 403 if (it.value()->IsSimulate()) continue; 404 if (!it.value()->CheckFlag(f)) return false; 405 return_value = true; 406 } 407 return return_value; 408 } 409 410 411 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) { 412 Advance(); 413 } 414 415 416 void HUseIterator::Advance() { 417 current_ = next_; 418 if (current_ != NULL) { 419 next_ = current_->tail(); 420 value_ = current_->value(); 421 index_ = current_->index(); 422 } 423 } 424 425 426 int HValue::UseCount() const { 427 int count = 0; 428 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count; 429 return count; 430 } 431 432 433 HUseListNode* HValue::RemoveUse(HValue* value, int index) { 434 HUseListNode* previous = NULL; 435 HUseListNode* current = use_list_; 436 while (current != NULL) { 437 if (current->value() == value && current->index() == index) { 438 if (previous == NULL) { 439 use_list_ = current->tail(); 440 } else { 441 previous->set_tail(current->tail()); 442 } 443 break; 444 } 445 446 previous = current; 447 current = current->tail(); 448 } 449 450 #ifdef DEBUG 451 // Do not reuse use list nodes in debug mode, zap them. 452 if (current != NULL) { 453 HUseListNode* temp = 454 new(block()->zone()) 455 HUseListNode(current->value(), current->index(), NULL); 456 current->Zap(); 457 current = temp; 458 } 459 #endif 460 return current; 461 } 462 463 464 bool HValue::Equals(HValue* other) { 465 if (other->opcode() != opcode()) return false; 466 if (!other->representation().Equals(representation())) return false; 467 if (!other->type_.Equals(type_)) return false; 468 if (other->flags() != flags()) return false; 469 if (OperandCount() != other->OperandCount()) return false; 470 for (int i = 0; i < OperandCount(); ++i) { 471 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false; 472 } 473 bool result = DataEquals(other); 474 ASSERT(!result || Hashcode() == other->Hashcode()); 475 return result; 476 } 477 478 479 intptr_t HValue::Hashcode() { 480 intptr_t result = opcode(); 481 int count = OperandCount(); 482 for (int i = 0; i < count; ++i) { 483 result = result * 19 + OperandAt(i)->id() + (result >> 7); 484 } 485 return result; 486 } 487 488 489 const char* HValue::Mnemonic() const { 490 switch (opcode()) { 491 #define MAKE_CASE(type) case k##type: return #type; 492 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE) 493 #undef MAKE_CASE 494 case kPhi: return "Phi"; 495 default: return ""; 496 } 497 } 498 499 500 bool HValue::IsInteger32Constant() { 501 return IsConstant() && HConstant::cast(this)->HasInteger32Value(); 502 } 503 504 505 int32_t HValue::GetInteger32Constant() { 506 return HConstant::cast(this)->Integer32Value(); 507 } 508 509 510 bool HValue::EqualsInteger32Constant(int32_t value) { 511 return IsInteger32Constant() && GetInteger32Constant() == value; 512 } 513 514 515 void HValue::SetOperandAt(int index, HValue* value) { 516 RegisterUse(index, value); 517 InternalSetOperandAt(index, value); 518 } 519 520 521 void HValue::DeleteAndReplaceWith(HValue* other) { 522 // We replace all uses first, so Delete can assert that there are none. 523 if (other != NULL) ReplaceAllUsesWith(other); 524 Kill(); 525 DeleteFromGraph(); 526 } 527 528 529 void HValue::ReplaceAllUsesWith(HValue* other) { 530 while (use_list_ != NULL) { 531 HUseListNode* list_node = use_list_; 532 HValue* value = list_node->value(); 533 ASSERT(!value->block()->IsStartBlock()); 534 value->InternalSetOperandAt(list_node->index(), other); 535 use_list_ = list_node->tail(); 536 list_node->set_tail(other->use_list_); 537 other->use_list_ = list_node; 538 } 539 } 540 541 542 void HValue::Kill() { 543 // Instead of going through the entire use list of each operand, we only 544 // check the first item in each use list and rely on the tail() method to 545 // skip dead items, removing them lazily next time we traverse the list. 546 SetFlag(kIsDead); 547 for (int i = 0; i < OperandCount(); ++i) { 548 HValue* operand = OperandAt(i); 549 if (operand == NULL) continue; 550 HUseListNode* first = operand->use_list_; 551 if (first != NULL && first->value()->CheckFlag(kIsDead)) { 552 operand->use_list_ = first->tail(); 553 } 554 } 555 } 556 557 558 void HValue::SetBlock(HBasicBlock* block) { 559 ASSERT(block_ == NULL || block == NULL); 560 block_ = block; 561 if (id_ == kNoNumber && block != NULL) { 562 id_ = block->graph()->GetNextValueID(this); 563 } 564 } 565 566 567 void HValue::PrintTypeTo(StringStream* stream) { 568 if (!representation().IsTagged() || type().Equals(HType::Tagged())) return; 569 stream->Add(" type:%s", type().ToString()); 570 } 571 572 573 void HValue::PrintRangeTo(StringStream* stream) { 574 if (range() == NULL || range()->IsMostGeneric()) return; 575 // Note: The c1visualizer syntax for locals allows only a sequence of the 576 // following characters: A-Za-z0-9_-|: 577 stream->Add(" range:%d_%d%s", 578 range()->lower(), 579 range()->upper(), 580 range()->CanBeMinusZero() ? "_m0" : ""); 581 } 582 583 584 void HValue::PrintChangesTo(StringStream* stream) { 585 GVNFlagSet changes_flags = ChangesFlags(); 586 if (changes_flags.IsEmpty()) return; 587 stream->Add(" changes["); 588 if (changes_flags == AllSideEffectsFlagSet()) { 589 stream->Add("*"); 590 } else { 591 bool add_comma = false; 592 #define PRINT_DO(type) \ 593 if (changes_flags.Contains(kChanges##type)) { \ 594 if (add_comma) stream->Add(","); \ 595 add_comma = true; \ 596 stream->Add(#type); \ 597 } 598 GVN_TRACKED_FLAG_LIST(PRINT_DO); 599 GVN_UNTRACKED_FLAG_LIST(PRINT_DO); 600 #undef PRINT_DO 601 } 602 stream->Add("]"); 603 } 604 605 606 void HValue::PrintNameTo(StringStream* stream) { 607 stream->Add("%s%d", representation_.Mnemonic(), id()); 608 } 609 610 611 bool HValue::HasMonomorphicJSObjectType() { 612 return !GetMonomorphicJSObjectMap().is_null(); 613 } 614 615 616 bool HValue::UpdateInferredType() { 617 HType type = CalculateInferredType(); 618 bool result = (!type.Equals(type_)); 619 type_ = type; 620 return result; 621 } 622 623 624 void HValue::RegisterUse(int index, HValue* new_value) { 625 HValue* old_value = OperandAt(index); 626 if (old_value == new_value) return; 627 628 HUseListNode* removed = NULL; 629 if (old_value != NULL) { 630 removed = old_value->RemoveUse(this, index); 631 } 632 633 if (new_value != NULL) { 634 if (removed == NULL) { 635 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode( 636 this, index, new_value->use_list_); 637 } else { 638 removed->set_tail(new_value->use_list_); 639 new_value->use_list_ = removed; 640 } 641 } 642 } 643 644 645 void HValue::AddNewRange(Range* r, Zone* zone) { 646 if (!HasRange()) ComputeInitialRange(zone); 647 if (!HasRange()) range_ = new(zone) Range(); 648 ASSERT(HasRange()); 649 r->StackUpon(range_); 650 range_ = r; 651 } 652 653 654 void HValue::RemoveLastAddedRange() { 655 ASSERT(HasRange()); 656 ASSERT(range_->next() != NULL); 657 range_ = range_->next(); 658 } 659 660 661 void HValue::ComputeInitialRange(Zone* zone) { 662 ASSERT(!HasRange()); 663 range_ = InferRange(zone); 664 ASSERT(HasRange()); 665 } 666 667 668 void HInstruction::PrintTo(StringStream* stream) { 669 PrintMnemonicTo(stream); 670 PrintDataTo(stream); 671 PrintRangeTo(stream); 672 PrintChangesTo(stream); 673 PrintTypeTo(stream); 674 if (CheckFlag(HValue::kHasNoObservableSideEffects)) { 675 stream->Add(" [noOSE]"); 676 } 677 } 678 679 680 void HInstruction::PrintDataTo(StringStream *stream) { 681 for (int i = 0; i < OperandCount(); ++i) { 682 if (i > 0) stream->Add(" "); 683 OperandAt(i)->PrintNameTo(stream); 684 } 685 } 686 687 688 void HInstruction::PrintMnemonicTo(StringStream* stream) { 689 stream->Add("%s ", Mnemonic()); 690 } 691 692 693 void HInstruction::Unlink() { 694 ASSERT(IsLinked()); 695 ASSERT(!IsControlInstruction()); // Must never move control instructions. 696 ASSERT(!IsBlockEntry()); // Doesn't make sense to delete these. 697 ASSERT(previous_ != NULL); 698 previous_->next_ = next_; 699 if (next_ == NULL) { 700 ASSERT(block()->last() == this); 701 block()->set_last(previous_); 702 } else { 703 next_->previous_ = previous_; 704 } 705 clear_block(); 706 } 707 708 709 void HInstruction::InsertBefore(HInstruction* next) { 710 ASSERT(!IsLinked()); 711 ASSERT(!next->IsBlockEntry()); 712 ASSERT(!IsControlInstruction()); 713 ASSERT(!next->block()->IsStartBlock()); 714 ASSERT(next->previous_ != NULL); 715 HInstruction* prev = next->previous(); 716 prev->next_ = this; 717 next->previous_ = this; 718 next_ = next; 719 previous_ = prev; 720 SetBlock(next->block()); 721 } 722 723 724 void HInstruction::InsertAfter(HInstruction* previous) { 725 ASSERT(!IsLinked()); 726 ASSERT(!previous->IsControlInstruction()); 727 ASSERT(!IsControlInstruction() || previous->next_ == NULL); 728 HBasicBlock* block = previous->block(); 729 // Never insert anything except constants into the start block after finishing 730 // it. 731 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) { 732 ASSERT(block->end()->SecondSuccessor() == NULL); 733 InsertAfter(block->end()->FirstSuccessor()->first()); 734 return; 735 } 736 737 // If we're inserting after an instruction with side-effects that is 738 // followed by a simulate instruction, we need to insert after the 739 // simulate instruction instead. 740 HInstruction* next = previous->next_; 741 if (previous->HasObservableSideEffects() && next != NULL) { 742 ASSERT(next->IsSimulate()); 743 previous = next; 744 next = previous->next_; 745 } 746 747 previous_ = previous; 748 next_ = next; 749 SetBlock(block); 750 previous->next_ = this; 751 if (next != NULL) next->previous_ = this; 752 if (block->last() == previous) { 753 block->set_last(this); 754 } 755 } 756 757 758 #ifdef DEBUG 759 void HInstruction::Verify() { 760 // Verify that input operands are defined before use. 761 HBasicBlock* cur_block = block(); 762 for (int i = 0; i < OperandCount(); ++i) { 763 HValue* other_operand = OperandAt(i); 764 if (other_operand == NULL) continue; 765 HBasicBlock* other_block = other_operand->block(); 766 if (cur_block == other_block) { 767 if (!other_operand->IsPhi()) { 768 HInstruction* cur = this->previous(); 769 while (cur != NULL) { 770 if (cur == other_operand) break; 771 cur = cur->previous(); 772 } 773 // Must reach other operand in the same block! 774 ASSERT(cur == other_operand); 775 } 776 } else { 777 // If the following assert fires, you may have forgotten an 778 // AddInstruction. 779 ASSERT(other_block->Dominates(cur_block)); 780 } 781 } 782 783 // Verify that instructions that may have side-effects are followed 784 // by a simulate instruction. 785 if (HasObservableSideEffects() && !IsOsrEntry()) { 786 ASSERT(next()->IsSimulate()); 787 } 788 789 // Verify that instructions that can be eliminated by GVN have overridden 790 // HValue::DataEquals. The default implementation is UNREACHABLE. We 791 // don't actually care whether DataEquals returns true or false here. 792 if (CheckFlag(kUseGVN)) DataEquals(this); 793 794 // Verify that all uses are in the graph. 795 for (HUseIterator use = uses(); !use.Done(); use.Advance()) { 796 if (use.value()->IsInstruction()) { 797 ASSERT(HInstruction::cast(use.value())->IsLinked()); 798 } 799 } 800 } 801 #endif 802 803 804 void HDummyUse::PrintDataTo(StringStream* stream) { 805 value()->PrintNameTo(stream); 806 } 807 808 809 void HEnvironmentMarker::PrintDataTo(StringStream* stream) { 810 stream->Add("%s var[%d]", kind() == BIND ? "bind" : "lookup", index()); 811 } 812 813 814 void HUnaryCall::PrintDataTo(StringStream* stream) { 815 value()->PrintNameTo(stream); 816 stream->Add(" "); 817 stream->Add("#%d", argument_count()); 818 } 819 820 821 void HBinaryCall::PrintDataTo(StringStream* stream) { 822 first()->PrintNameTo(stream); 823 stream->Add(" "); 824 second()->PrintNameTo(stream); 825 stream->Add(" "); 826 stream->Add("#%d", argument_count()); 827 } 828 829 830 void HBoundsCheck::ApplyIndexChange() { 831 if (skip_check()) return; 832 833 DecompositionResult decomposition; 834 bool index_is_decomposable = index()->TryDecompose(&decomposition); 835 if (index_is_decomposable) { 836 ASSERT(decomposition.base() == base()); 837 if (decomposition.offset() == offset() && 838 decomposition.scale() == scale()) return; 839 } else { 840 return; 841 } 842 843 ReplaceAllUsesWith(index()); 844 845 HValue* current_index = decomposition.base(); 846 int actual_offset = decomposition.offset() + offset(); 847 int actual_scale = decomposition.scale() + scale(); 848 849 Zone* zone = block()->graph()->zone(); 850 HValue* context = block()->graph()->GetInvalidContext(); 851 if (actual_offset != 0) { 852 HConstant* add_offset = HConstant::New(zone, context, actual_offset); 853 add_offset->InsertBefore(this); 854 HInstruction* add = HAdd::New(zone, context, 855 current_index, add_offset); 856 add->InsertBefore(this); 857 add->AssumeRepresentation(index()->representation()); 858 add->ClearFlag(kCanOverflow); 859 current_index = add; 860 } 861 862 if (actual_scale != 0) { 863 HConstant* sar_scale = HConstant::New(zone, context, actual_scale); 864 sar_scale->InsertBefore(this); 865 HInstruction* sar = HSar::New(zone, context, 866 current_index, sar_scale); 867 sar->InsertBefore(this); 868 sar->AssumeRepresentation(index()->representation()); 869 current_index = sar; 870 } 871 872 SetOperandAt(0, current_index); 873 874 base_ = NULL; 875 offset_ = 0; 876 scale_ = 0; 877 } 878 879 880 void HBoundsCheck::PrintDataTo(StringStream* stream) { 881 index()->PrintNameTo(stream); 882 stream->Add(" "); 883 length()->PrintNameTo(stream); 884 if (base() != NULL && (offset() != 0 || scale() != 0)) { 885 stream->Add(" base: (("); 886 if (base() != index()) { 887 index()->PrintNameTo(stream); 888 } else { 889 stream->Add("index"); 890 } 891 stream->Add(" + %d) >> %d)", offset(), scale()); 892 } 893 if (skip_check()) { 894 stream->Add(" [DISABLED]"); 895 } 896 } 897 898 899 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) { 900 ASSERT(CheckFlag(kFlexibleRepresentation)); 901 HValue* actual_index = index()->ActualValue(); 902 HValue* actual_length = length()->ActualValue(); 903 Representation index_rep = actual_index->representation(); 904 Representation length_rep = actual_length->representation(); 905 if (index_rep.IsTagged() && actual_index->type().IsSmi()) { 906 index_rep = Representation::Smi(); 907 } 908 if (length_rep.IsTagged() && actual_length->type().IsSmi()) { 909 length_rep = Representation::Smi(); 910 } 911 Representation r = index_rep.generalize(length_rep); 912 if (r.is_more_general_than(Representation::Integer32())) { 913 r = Representation::Integer32(); 914 } 915 UpdateRepresentation(r, h_infer, "boundscheck"); 916 } 917 918 919 void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) { 920 stream->Add("base: "); 921 base_index()->PrintNameTo(stream); 922 stream->Add(", check: "); 923 base_index()->PrintNameTo(stream); 924 } 925 926 927 void HCallConstantFunction::PrintDataTo(StringStream* stream) { 928 if (IsApplyFunction()) { 929 stream->Add("optimized apply "); 930 } else { 931 stream->Add("%o ", function()->shared()->DebugName()); 932 } 933 stream->Add("#%d", argument_count()); 934 } 935 936 937 void HCallNamed::PrintDataTo(StringStream* stream) { 938 stream->Add("%o ", *name()); 939 HUnaryCall::PrintDataTo(stream); 940 } 941 942 943 void HCallGlobal::PrintDataTo(StringStream* stream) { 944 stream->Add("%o ", *name()); 945 HUnaryCall::PrintDataTo(stream); 946 } 947 948 949 void HCallKnownGlobal::PrintDataTo(StringStream* stream) { 950 stream->Add("%o ", target()->shared()->DebugName()); 951 stream->Add("#%d", argument_count()); 952 } 953 954 955 void HCallNewArray::PrintDataTo(StringStream* stream) { 956 stream->Add(ElementsKindToString(elements_kind())); 957 stream->Add(" "); 958 HBinaryCall::PrintDataTo(stream); 959 } 960 961 962 void HCallRuntime::PrintDataTo(StringStream* stream) { 963 stream->Add("%o ", *name()); 964 stream->Add("#%d", argument_count()); 965 } 966 967 968 void HClassOfTestAndBranch::PrintDataTo(StringStream* stream) { 969 stream->Add("class_of_test("); 970 value()->PrintNameTo(stream); 971 stream->Add(", \"%o\")", *class_name()); 972 } 973 974 975 void HWrapReceiver::PrintDataTo(StringStream* stream) { 976 receiver()->PrintNameTo(stream); 977 stream->Add(" "); 978 function()->PrintNameTo(stream); 979 } 980 981 982 void HAccessArgumentsAt::PrintDataTo(StringStream* stream) { 983 arguments()->PrintNameTo(stream); 984 stream->Add("["); 985 index()->PrintNameTo(stream); 986 stream->Add("], length "); 987 length()->PrintNameTo(stream); 988 } 989 990 991 void HControlInstruction::PrintDataTo(StringStream* stream) { 992 stream->Add(" goto ("); 993 bool first_block = true; 994 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) { 995 stream->Add(first_block ? "B%d" : ", B%d", it.Current()->block_id()); 996 first_block = false; 997 } 998 stream->Add(")"); 999 } 1000 1001 1002 void HUnaryControlInstruction::PrintDataTo(StringStream* stream) { 1003 value()->PrintNameTo(stream); 1004 HControlInstruction::PrintDataTo(stream); 1005 } 1006 1007 1008 void HReturn::PrintDataTo(StringStream* stream) { 1009 value()->PrintNameTo(stream); 1010 stream->Add(" (pop "); 1011 parameter_count()->PrintNameTo(stream); 1012 stream->Add(" values)"); 1013 } 1014 1015 1016 Representation HBranch::observed_input_representation(int index) { 1017 static const ToBooleanStub::Types tagged_types( 1018 ToBooleanStub::NULL_TYPE | 1019 ToBooleanStub::SPEC_OBJECT | 1020 ToBooleanStub::STRING | 1021 ToBooleanStub::SYMBOL); 1022 if (expected_input_types_.ContainsAnyOf(tagged_types)) { 1023 return Representation::Tagged(); 1024 } 1025 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) { 1026 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) { 1027 return Representation::Double(); 1028 } 1029 return Representation::Tagged(); 1030 } 1031 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) { 1032 return Representation::Double(); 1033 } 1034 if (expected_input_types_.Contains(ToBooleanStub::SMI)) { 1035 return Representation::Smi(); 1036 } 1037 return Representation::None(); 1038 } 1039 1040 1041 void HCompareMap::PrintDataTo(StringStream* stream) { 1042 value()->PrintNameTo(stream); 1043 stream->Add(" (%p)", *map()); 1044 HControlInstruction::PrintDataTo(stream); 1045 } 1046 1047 1048 const char* HUnaryMathOperation::OpName() const { 1049 switch (op()) { 1050 case kMathFloor: return "floor"; 1051 case kMathRound: return "round"; 1052 case kMathAbs: return "abs"; 1053 case kMathLog: return "log"; 1054 case kMathSin: return "sin"; 1055 case kMathCos: return "cos"; 1056 case kMathTan: return "tan"; 1057 case kMathExp: return "exp"; 1058 case kMathSqrt: return "sqrt"; 1059 case kMathPowHalf: return "pow-half"; 1060 default: 1061 UNREACHABLE(); 1062 return NULL; 1063 } 1064 } 1065 1066 1067 Range* HUnaryMathOperation::InferRange(Zone* zone) { 1068 Representation r = representation(); 1069 if (r.IsSmiOrInteger32() && value()->HasRange()) { 1070 if (op() == kMathAbs) { 1071 int upper = value()->range()->upper(); 1072 int lower = value()->range()->lower(); 1073 bool spans_zero = value()->range()->CanBeZero(); 1074 // Math.abs(kMinInt) overflows its representation, on which the 1075 // instruction deopts. Hence clamp it to kMaxInt. 1076 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper); 1077 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower); 1078 Range* result = 1079 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper), 1080 Max(abs_lower, abs_upper)); 1081 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to 1082 // Smi::kMaxValue. 1083 if (r.IsSmi()) result->ClampToSmi(); 1084 return result; 1085 } 1086 } 1087 return HValue::InferRange(zone); 1088 } 1089 1090 1091 void HUnaryMathOperation::PrintDataTo(StringStream* stream) { 1092 const char* name = OpName(); 1093 stream->Add("%s ", name); 1094 value()->PrintNameTo(stream); 1095 } 1096 1097 1098 void HUnaryOperation::PrintDataTo(StringStream* stream) { 1099 value()->PrintNameTo(stream); 1100 } 1101 1102 1103 void HHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) { 1104 value()->PrintNameTo(stream); 1105 switch (from_) { 1106 case FIRST_JS_RECEIVER_TYPE: 1107 if (to_ == LAST_TYPE) stream->Add(" spec_object"); 1108 break; 1109 case JS_REGEXP_TYPE: 1110 if (to_ == JS_REGEXP_TYPE) stream->Add(" reg_exp"); 1111 break; 1112 case JS_ARRAY_TYPE: 1113 if (to_ == JS_ARRAY_TYPE) stream->Add(" array"); 1114 break; 1115 case JS_FUNCTION_TYPE: 1116 if (to_ == JS_FUNCTION_TYPE) stream->Add(" function"); 1117 break; 1118 default: 1119 break; 1120 } 1121 } 1122 1123 1124 void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) { 1125 value()->PrintNameTo(stream); 1126 stream->Add(" == %o", *type_literal_); 1127 HControlInstruction::PrintDataTo(stream); 1128 } 1129 1130 1131 void HCheckMapValue::PrintDataTo(StringStream* stream) { 1132 value()->PrintNameTo(stream); 1133 stream->Add(" "); 1134 map()->PrintNameTo(stream); 1135 } 1136 1137 1138 void HForInPrepareMap::PrintDataTo(StringStream* stream) { 1139 enumerable()->PrintNameTo(stream); 1140 } 1141 1142 1143 void HForInCacheArray::PrintDataTo(StringStream* stream) { 1144 enumerable()->PrintNameTo(stream); 1145 stream->Add(" "); 1146 map()->PrintNameTo(stream); 1147 stream->Add("[%d]", idx_); 1148 } 1149 1150 1151 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) { 1152 object()->PrintNameTo(stream); 1153 stream->Add(" "); 1154 index()->PrintNameTo(stream); 1155 } 1156 1157 1158 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) { 1159 if (!l->EqualsInteger32Constant(~0)) return false; 1160 *negated = r; 1161 return true; 1162 } 1163 1164 1165 static bool MatchNegationViaXor(HValue* instr, HValue** negated) { 1166 if (!instr->IsBitwise()) return false; 1167 HBitwise* b = HBitwise::cast(instr); 1168 return (b->op() == Token::BIT_XOR) && 1169 (MatchLeftIsOnes(b->left(), b->right(), negated) || 1170 MatchLeftIsOnes(b->right(), b->left(), negated)); 1171 } 1172 1173 1174 static bool MatchDoubleNegation(HValue* instr, HValue** arg) { 1175 HValue* negated; 1176 return MatchNegationViaXor(instr, &negated) && 1177 MatchNegationViaXor(negated, arg); 1178 } 1179 1180 1181 HValue* HBitwise::Canonicalize() { 1182 if (!representation().IsSmiOrInteger32()) return this; 1183 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x. 1184 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0; 1185 if (left()->EqualsInteger32Constant(nop_constant) && 1186 !right()->CheckFlag(kUint32)) { 1187 return right(); 1188 } 1189 if (right()->EqualsInteger32Constant(nop_constant) && 1190 !left()->CheckFlag(kUint32)) { 1191 return left(); 1192 } 1193 // Optimize double negation, a common pattern used for ToInt32(x). 1194 HValue* arg; 1195 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) { 1196 return arg; 1197 } 1198 return this; 1199 } 1200 1201 1202 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) { 1203 return arg1->representation().IsSpecialization() && 1204 arg2->EqualsInteger32Constant(identity); 1205 } 1206 1207 1208 HValue* HAdd::Canonicalize() { 1209 if (IsIdentityOperation(left(), right(), 0)) return left(); 1210 if (IsIdentityOperation(right(), left(), 0)) return right(); 1211 return this; 1212 } 1213 1214 1215 HValue* HSub::Canonicalize() { 1216 if (IsIdentityOperation(left(), right(), 0)) return left(); 1217 return this; 1218 } 1219 1220 1221 HValue* HMul::Canonicalize() { 1222 if (IsIdentityOperation(left(), right(), 1)) return left(); 1223 if (IsIdentityOperation(right(), left(), 1)) return right(); 1224 return this; 1225 } 1226 1227 1228 HValue* HMod::Canonicalize() { 1229 return this; 1230 } 1231 1232 1233 HValue* HDiv::Canonicalize() { 1234 return this; 1235 } 1236 1237 1238 HValue* HChange::Canonicalize() { 1239 return (from().Equals(to())) ? value() : this; 1240 } 1241 1242 1243 HValue* HWrapReceiver::Canonicalize() { 1244 if (HasNoUses()) return NULL; 1245 if (receiver()->type().IsJSObject()) { 1246 return receiver(); 1247 } 1248 return this; 1249 } 1250 1251 1252 void HTypeof::PrintDataTo(StringStream* stream) { 1253 value()->PrintNameTo(stream); 1254 } 1255 1256 1257 void HForceRepresentation::PrintDataTo(StringStream* stream) { 1258 stream->Add("%s ", representation().Mnemonic()); 1259 value()->PrintNameTo(stream); 1260 } 1261 1262 1263 void HChange::PrintDataTo(StringStream* stream) { 1264 HUnaryOperation::PrintDataTo(stream); 1265 stream->Add(" %s to %s", from().Mnemonic(), to().Mnemonic()); 1266 1267 if (CanTruncateToInt32()) stream->Add(" truncating-int32"); 1268 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?"); 1269 if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(" allow-undefined-as-nan"); 1270 } 1271 1272 1273 static HValue* SimplifiedDividendForMathFloorOfDiv(HValue* dividend) { 1274 // A value with an integer representation does not need to be transformed. 1275 if (dividend->representation().IsInteger32()) { 1276 return dividend; 1277 } 1278 // A change from an integer32 can be replaced by the integer32 value. 1279 if (dividend->IsChange() && 1280 HChange::cast(dividend)->from().IsInteger32()) { 1281 return HChange::cast(dividend)->value(); 1282 } 1283 return NULL; 1284 } 1285 1286 1287 HValue* HUnaryMathOperation::Canonicalize() { 1288 if (op() == kMathRound || op() == kMathFloor) { 1289 HValue* val = value(); 1290 if (val->IsChange()) val = HChange::cast(val)->value(); 1291 1292 // If the input is smi or integer32 then we replace the instruction with its 1293 // input. 1294 if (val->representation().IsSmiOrInteger32()) { 1295 if (!val->representation().Equals(representation())) { 1296 HChange* result = new(block()->zone()) HChange( 1297 val, representation(), false, false); 1298 result->InsertBefore(this); 1299 return result; 1300 } 1301 return val; 1302 } 1303 } 1304 1305 if (op() == kMathFloor) { 1306 HValue* val = value(); 1307 if (val->IsChange()) val = HChange::cast(val)->value(); 1308 if (val->IsDiv() && (val->UseCount() == 1)) { 1309 HDiv* hdiv = HDiv::cast(val); 1310 HValue* left = hdiv->left(); 1311 HValue* right = hdiv->right(); 1312 // Try to simplify left and right values of the division. 1313 HValue* new_left = SimplifiedDividendForMathFloorOfDiv(left); 1314 if (new_left == NULL && 1315 hdiv->observed_input_representation(1).IsSmiOrInteger32()) { 1316 new_left = new(block()->zone()) HChange( 1317 left, Representation::Integer32(), false, false); 1318 HChange::cast(new_left)->InsertBefore(this); 1319 } 1320 HValue* new_right = 1321 LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(right); 1322 if (new_right == NULL && 1323 #if V8_TARGET_ARCH_ARM 1324 CpuFeatures::IsSupported(SUDIV) && 1325 #endif 1326 hdiv->observed_input_representation(2).IsSmiOrInteger32()) { 1327 new_right = new(block()->zone()) HChange( 1328 right, Representation::Integer32(), false, false); 1329 HChange::cast(new_right)->InsertBefore(this); 1330 } 1331 1332 // Return if left or right are not optimizable. 1333 if ((new_left == NULL) || (new_right == NULL)) return this; 1334 1335 // Insert the new values in the graph. 1336 if (new_left->IsInstruction() && 1337 !HInstruction::cast(new_left)->IsLinked()) { 1338 HInstruction::cast(new_left)->InsertBefore(this); 1339 } 1340 if (new_right->IsInstruction() && 1341 !HInstruction::cast(new_right)->IsLinked()) { 1342 HInstruction::cast(new_right)->InsertBefore(this); 1343 } 1344 HMathFloorOfDiv* instr = 1345 HMathFloorOfDiv::New(block()->zone(), context(), new_left, new_right); 1346 // Replace this HMathFloor instruction by the new HMathFloorOfDiv. 1347 instr->InsertBefore(this); 1348 ReplaceAllUsesWith(instr); 1349 Kill(); 1350 // We know the division had no other uses than this HMathFloor. Delete it. 1351 // Dead code elimination will deal with |left| and |right| if 1352 // appropriate. 1353 hdiv->DeleteAndReplaceWith(NULL); 1354 1355 // Return NULL to remove this instruction from the graph. 1356 return NULL; 1357 } 1358 } 1359 return this; 1360 } 1361 1362 1363 HValue* HCheckInstanceType::Canonicalize() { 1364 if (check_ == IS_STRING && value()->type().IsString()) { 1365 return value(); 1366 } 1367 1368 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) { 1369 if (HConstant::cast(value())->HasInternalizedStringValue()) { 1370 return value(); 1371 } 1372 } 1373 return this; 1374 } 1375 1376 1377 void HCheckInstanceType::GetCheckInterval(InstanceType* first, 1378 InstanceType* last) { 1379 ASSERT(is_interval_check()); 1380 switch (check_) { 1381 case IS_SPEC_OBJECT: 1382 *first = FIRST_SPEC_OBJECT_TYPE; 1383 *last = LAST_SPEC_OBJECT_TYPE; 1384 return; 1385 case IS_JS_ARRAY: 1386 *first = *last = JS_ARRAY_TYPE; 1387 return; 1388 default: 1389 UNREACHABLE(); 1390 } 1391 } 1392 1393 1394 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) { 1395 ASSERT(!is_interval_check()); 1396 switch (check_) { 1397 case IS_STRING: 1398 *mask = kIsNotStringMask; 1399 *tag = kStringTag; 1400 return; 1401 case IS_INTERNALIZED_STRING: 1402 *mask = kIsNotInternalizedMask; 1403 *tag = kInternalizedTag; 1404 return; 1405 default: 1406 UNREACHABLE(); 1407 } 1408 } 1409 1410 1411 void HCheckMaps::HandleSideEffectDominator(GVNFlag side_effect, 1412 HValue* dominator) { 1413 ASSERT(side_effect == kChangesMaps); 1414 // TODO(mstarzinger): For now we specialize on HStoreNamedField, but once 1415 // type information is rich enough we should generalize this to any HType 1416 // for which the map is known. 1417 if (HasNoUses() && dominator->IsStoreNamedField()) { 1418 HStoreNamedField* store = HStoreNamedField::cast(dominator); 1419 if (!store->has_transition() || store->object() != value()) return; 1420 HConstant* transition = HConstant::cast(store->transition()); 1421 for (int i = 0; i < map_set()->length(); i++) { 1422 if (transition->UniqueValueIdsMatch(map_unique_ids_.at(i))) { 1423 DeleteAndReplaceWith(NULL); 1424 return; 1425 } 1426 } 1427 } 1428 } 1429 1430 1431 void HCheckMaps::PrintDataTo(StringStream* stream) { 1432 value()->PrintNameTo(stream); 1433 stream->Add(" [%p", *map_set()->first()); 1434 for (int i = 1; i < map_set()->length(); ++i) { 1435 stream->Add(",%p", *map_set()->at(i)); 1436 } 1437 stream->Add("]%s", CanOmitMapChecks() ? "(omitted)" : ""); 1438 } 1439 1440 1441 void HCheckFunction::PrintDataTo(StringStream* stream) { 1442 value()->PrintNameTo(stream); 1443 stream->Add(" %p", *target()); 1444 } 1445 1446 1447 HValue* HCheckFunction::Canonicalize() { 1448 return (value()->IsConstant() && 1449 HConstant::cast(value())->UniqueValueIdsMatch(target_unique_id_)) 1450 ? NULL 1451 : this; 1452 } 1453 1454 1455 const char* HCheckInstanceType::GetCheckName() { 1456 switch (check_) { 1457 case IS_SPEC_OBJECT: return "object"; 1458 case IS_JS_ARRAY: return "array"; 1459 case IS_STRING: return "string"; 1460 case IS_INTERNALIZED_STRING: return "internalized_string"; 1461 } 1462 UNREACHABLE(); 1463 return ""; 1464 } 1465 1466 1467 void HCheckInstanceType::PrintDataTo(StringStream* stream) { 1468 stream->Add("%s ", GetCheckName()); 1469 HUnaryOperation::PrintDataTo(stream); 1470 } 1471 1472 1473 void HCallStub::PrintDataTo(StringStream* stream) { 1474 stream->Add("%s ", 1475 CodeStub::MajorName(major_key_, false)); 1476 HUnaryCall::PrintDataTo(stream); 1477 } 1478 1479 1480 void HInstanceOf::PrintDataTo(StringStream* stream) { 1481 left()->PrintNameTo(stream); 1482 stream->Add(" "); 1483 right()->PrintNameTo(stream); 1484 stream->Add(" "); 1485 context()->PrintNameTo(stream); 1486 } 1487 1488 1489 Range* HValue::InferRange(Zone* zone) { 1490 Range* result; 1491 if (representation().IsSmi() || type().IsSmi()) { 1492 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue); 1493 result->set_can_be_minus_zero(false); 1494 } else { 1495 result = new(zone) Range(); 1496 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32)); 1497 // TODO(jkummerow): The range cannot be minus zero when the upper type 1498 // bound is Integer32. 1499 } 1500 return result; 1501 } 1502 1503 1504 Range* HChange::InferRange(Zone* zone) { 1505 Range* input_range = value()->range(); 1506 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) && 1507 (to().IsSmi() || 1508 (to().IsTagged() && 1509 input_range != NULL && 1510 input_range->IsInSmiRange()))) { 1511 set_type(HType::Smi()); 1512 ClearGVNFlag(kChangesNewSpacePromotion); 1513 } 1514 Range* result = (input_range != NULL) 1515 ? input_range->Copy(zone) 1516 : HValue::InferRange(zone); 1517 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() || 1518 !(CheckFlag(kAllUsesTruncatingToInt32) || 1519 CheckFlag(kAllUsesTruncatingToSmi))); 1520 if (to().IsSmi()) result->ClampToSmi(); 1521 return result; 1522 } 1523 1524 1525 Range* HConstant::InferRange(Zone* zone) { 1526 if (has_int32_value_) { 1527 Range* result = new(zone) Range(int32_value_, int32_value_); 1528 result->set_can_be_minus_zero(false); 1529 return result; 1530 } 1531 return HValue::InferRange(zone); 1532 } 1533 1534 1535 Range* HPhi::InferRange(Zone* zone) { 1536 Representation r = representation(); 1537 if (r.IsSmiOrInteger32()) { 1538 if (block()->IsLoopHeader()) { 1539 Range* range = r.IsSmi() 1540 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue) 1541 : new(zone) Range(kMinInt, kMaxInt); 1542 return range; 1543 } else { 1544 Range* range = OperandAt(0)->range()->Copy(zone); 1545 for (int i = 1; i < OperandCount(); ++i) { 1546 range->Union(OperandAt(i)->range()); 1547 } 1548 return range; 1549 } 1550 } else { 1551 return HValue::InferRange(zone); 1552 } 1553 } 1554 1555 1556 Range* HAdd::InferRange(Zone* zone) { 1557 Representation r = representation(); 1558 if (r.IsSmiOrInteger32()) { 1559 Range* a = left()->range(); 1560 Range* b = right()->range(); 1561 Range* res = a->Copy(zone); 1562 if (!res->AddAndCheckOverflow(r, b) || 1563 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || 1564 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) { 1565 ClearFlag(kCanOverflow); 1566 } 1567 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && 1568 !CheckFlag(kAllUsesTruncatingToInt32) && 1569 a->CanBeMinusZero() && b->CanBeMinusZero()); 1570 return res; 1571 } else { 1572 return HValue::InferRange(zone); 1573 } 1574 } 1575 1576 1577 Range* HSub::InferRange(Zone* zone) { 1578 Representation r = representation(); 1579 if (r.IsSmiOrInteger32()) { 1580 Range* a = left()->range(); 1581 Range* b = right()->range(); 1582 Range* res = a->Copy(zone); 1583 if (!res->SubAndCheckOverflow(r, b) || 1584 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || 1585 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) { 1586 ClearFlag(kCanOverflow); 1587 } 1588 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && 1589 !CheckFlag(kAllUsesTruncatingToInt32) && 1590 a->CanBeMinusZero() && b->CanBeZero()); 1591 return res; 1592 } else { 1593 return HValue::InferRange(zone); 1594 } 1595 } 1596 1597 1598 Range* HMul::InferRange(Zone* zone) { 1599 Representation r = representation(); 1600 if (r.IsSmiOrInteger32()) { 1601 Range* a = left()->range(); 1602 Range* b = right()->range(); 1603 Range* res = a->Copy(zone); 1604 if (!res->MulAndCheckOverflow(r, b)) { 1605 // Clearing the kCanOverflow flag when kAllUsesAreTruncatingToInt32 1606 // would be wrong, because truncated integer multiplication is too 1607 // precise and therefore not the same as converting to Double and back. 1608 ClearFlag(kCanOverflow); 1609 } 1610 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && 1611 !CheckFlag(kAllUsesTruncatingToInt32) && 1612 ((a->CanBeZero() && b->CanBeNegative()) || 1613 (a->CanBeNegative() && b->CanBeZero()))); 1614 return res; 1615 } else { 1616 return HValue::InferRange(zone); 1617 } 1618 } 1619 1620 1621 Range* HDiv::InferRange(Zone* zone) { 1622 if (representation().IsInteger32()) { 1623 Range* a = left()->range(); 1624 Range* b = right()->range(); 1625 Range* result = new(zone) Range(); 1626 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && 1627 (a->CanBeMinusZero() || 1628 (a->CanBeZero() && b->CanBeNegative()))); 1629 if (!a->Includes(kMinInt) || !b->Includes(-1)) { 1630 ClearFlag(HValue::kCanOverflow); 1631 } 1632 1633 if (!b->CanBeZero()) { 1634 ClearFlag(HValue::kCanBeDivByZero); 1635 } 1636 return result; 1637 } else { 1638 return HValue::InferRange(zone); 1639 } 1640 } 1641 1642 1643 Range* HMod::InferRange(Zone* zone) { 1644 if (representation().IsInteger32()) { 1645 Range* a = left()->range(); 1646 Range* b = right()->range(); 1647 1648 // The magnitude of the modulus is bounded by the right operand. Note that 1649 // apart for the cases involving kMinInt, the calculation below is the same 1650 // as Max(Abs(b->lower()), Abs(b->upper())) - 1. 1651 int32_t positive_bound = -(Min(NegAbs(b->lower()), NegAbs(b->upper())) + 1); 1652 1653 // The result of the modulo operation has the sign of its left operand. 1654 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative(); 1655 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0, 1656 a->CanBePositive() ? positive_bound : 0); 1657 1658 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && 1659 left_can_be_negative); 1660 1661 if (!a->Includes(kMinInt) || !b->Includes(-1)) { 1662 ClearFlag(HValue::kCanOverflow); 1663 } 1664 1665 if (!b->CanBeZero()) { 1666 ClearFlag(HValue::kCanBeDivByZero); 1667 } 1668 return result; 1669 } else { 1670 return HValue::InferRange(zone); 1671 } 1672 } 1673 1674 1675 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) { 1676 if (phi->block()->loop_information() == NULL) return NULL; 1677 if (phi->OperandCount() != 2) return NULL; 1678 int32_t candidate_increment; 1679 1680 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0)); 1681 if (candidate_increment != 0) { 1682 return new(phi->block()->graph()->zone()) 1683 InductionVariableData(phi, phi->OperandAt(1), candidate_increment); 1684 } 1685 1686 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1)); 1687 if (candidate_increment != 0) { 1688 return new(phi->block()->graph()->zone()) 1689 InductionVariableData(phi, phi->OperandAt(0), candidate_increment); 1690 } 1691 1692 return NULL; 1693 } 1694 1695 1696 /* 1697 * This function tries to match the following patterns (and all the relevant 1698 * variants related to |, & and + being commutative): 1699 * base | constant_or_mask 1700 * base & constant_and_mask 1701 * (base + constant_offset) & constant_and_mask 1702 * (base - constant_offset) & constant_and_mask 1703 */ 1704 void InductionVariableData::DecomposeBitwise( 1705 HValue* value, 1706 BitwiseDecompositionResult* result) { 1707 HValue* base = IgnoreOsrValue(value); 1708 result->base = value; 1709 1710 if (!base->representation().IsInteger32()) return; 1711 1712 if (base->IsBitwise()) { 1713 bool allow_offset = false; 1714 int32_t mask = 0; 1715 1716 HBitwise* bitwise = HBitwise::cast(base); 1717 if (bitwise->right()->IsInteger32Constant()) { 1718 mask = bitwise->right()->GetInteger32Constant(); 1719 base = bitwise->left(); 1720 } else if (bitwise->left()->IsInteger32Constant()) { 1721 mask = bitwise->left()->GetInteger32Constant(); 1722 base = bitwise->right(); 1723 } else { 1724 return; 1725 } 1726 if (bitwise->op() == Token::BIT_AND) { 1727 result->and_mask = mask; 1728 allow_offset = true; 1729 } else if (bitwise->op() == Token::BIT_OR) { 1730 result->or_mask = mask; 1731 } else { 1732 return; 1733 } 1734 1735 result->context = bitwise->context(); 1736 1737 if (allow_offset) { 1738 if (base->IsAdd()) { 1739 HAdd* add = HAdd::cast(base); 1740 if (add->right()->IsInteger32Constant()) { 1741 base = add->left(); 1742 } else if (add->left()->IsInteger32Constant()) { 1743 base = add->right(); 1744 } 1745 } else if (base->IsSub()) { 1746 HSub* sub = HSub::cast(base); 1747 if (sub->right()->IsInteger32Constant()) { 1748 base = sub->left(); 1749 } 1750 } 1751 } 1752 1753 result->base = base; 1754 } 1755 } 1756 1757 1758 void InductionVariableData::AddCheck(HBoundsCheck* check, 1759 int32_t upper_limit) { 1760 ASSERT(limit_validity() != NULL); 1761 if (limit_validity() != check->block() && 1762 !limit_validity()->Dominates(check->block())) return; 1763 if (!phi()->block()->current_loop()->IsNestedInThisLoop( 1764 check->block()->current_loop())) return; 1765 1766 ChecksRelatedToLength* length_checks = checks(); 1767 while (length_checks != NULL) { 1768 if (length_checks->length() == check->length()) break; 1769 length_checks = length_checks->next(); 1770 } 1771 if (length_checks == NULL) { 1772 length_checks = new(check->block()->zone()) 1773 ChecksRelatedToLength(check->length(), checks()); 1774 checks_ = length_checks; 1775 } 1776 1777 length_checks->AddCheck(check, upper_limit); 1778 } 1779 1780 1781 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() { 1782 if (checks() != NULL) { 1783 InductionVariableCheck* c = checks(); 1784 HBasicBlock* current_block = c->check()->block(); 1785 while (c != NULL && c->check()->block() == current_block) { 1786 c->set_upper_limit(current_upper_limit_); 1787 c = c->next(); 1788 } 1789 } 1790 } 1791 1792 1793 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock( 1794 Token::Value token, 1795 int32_t mask, 1796 HValue* index_base, 1797 HValue* context) { 1798 ASSERT(first_check_in_block() != NULL); 1799 HValue* previous_index = first_check_in_block()->index(); 1800 ASSERT(context != NULL); 1801 1802 Zone* zone = index_base->block()->graph()->zone(); 1803 set_added_constant(HConstant::New(zone, context, mask)); 1804 if (added_index() != NULL) { 1805 added_constant()->InsertBefore(added_index()); 1806 } else { 1807 added_constant()->InsertBefore(first_check_in_block()); 1808 } 1809 1810 if (added_index() == NULL) { 1811 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index()); 1812 HInstruction* new_index = HBitwise::New(zone, context, token, index_base, 1813 added_constant()); 1814 ASSERT(new_index->IsBitwise()); 1815 new_index->ClearAllSideEffects(); 1816 new_index->AssumeRepresentation(Representation::Integer32()); 1817 set_added_index(HBitwise::cast(new_index)); 1818 added_index()->InsertBefore(first_check_in_block()); 1819 } 1820 ASSERT(added_index()->op() == token); 1821 1822 added_index()->SetOperandAt(1, index_base); 1823 added_index()->SetOperandAt(2, added_constant()); 1824 first_check_in_block()->SetOperandAt(0, added_index()); 1825 if (previous_index->UseCount() == 0) { 1826 previous_index->DeleteAndReplaceWith(NULL); 1827 } 1828 } 1829 1830 void InductionVariableData::ChecksRelatedToLength::AddCheck( 1831 HBoundsCheck* check, 1832 int32_t upper_limit) { 1833 BitwiseDecompositionResult decomposition; 1834 InductionVariableData::DecomposeBitwise(check->index(), &decomposition); 1835 1836 if (first_check_in_block() == NULL || 1837 first_check_in_block()->block() != check->block()) { 1838 CloseCurrentBlock(); 1839 1840 first_check_in_block_ = check; 1841 set_added_index(NULL); 1842 set_added_constant(NULL); 1843 current_and_mask_in_block_ = decomposition.and_mask; 1844 current_or_mask_in_block_ = decomposition.or_mask; 1845 current_upper_limit_ = upper_limit; 1846 1847 InductionVariableCheck* new_check = new(check->block()->graph()->zone()) 1848 InductionVariableCheck(check, checks_, upper_limit); 1849 checks_ = new_check; 1850 return; 1851 } 1852 1853 if (upper_limit > current_upper_limit()) { 1854 current_upper_limit_ = upper_limit; 1855 } 1856 1857 if (decomposition.and_mask != 0 && 1858 current_or_mask_in_block() == 0) { 1859 if (current_and_mask_in_block() == 0 || 1860 decomposition.and_mask > current_and_mask_in_block()) { 1861 UseNewIndexInCurrentBlock(Token::BIT_AND, 1862 decomposition.and_mask, 1863 decomposition.base, 1864 decomposition.context); 1865 current_and_mask_in_block_ = decomposition.and_mask; 1866 } 1867 check->set_skip_check(); 1868 } 1869 if (current_and_mask_in_block() == 0) { 1870 if (decomposition.or_mask > current_or_mask_in_block()) { 1871 UseNewIndexInCurrentBlock(Token::BIT_OR, 1872 decomposition.or_mask, 1873 decomposition.base, 1874 decomposition.context); 1875 current_or_mask_in_block_ = decomposition.or_mask; 1876 } 1877 check->set_skip_check(); 1878 } 1879 1880 if (!check->skip_check()) { 1881 InductionVariableCheck* new_check = new(check->block()->graph()->zone()) 1882 InductionVariableCheck(check, checks_, upper_limit); 1883 checks_ = new_check; 1884 } 1885 } 1886 1887 1888 /* 1889 * This method detects if phi is an induction variable, with phi_operand as 1890 * its "incremented" value (the other operand would be the "base" value). 1891 * 1892 * It cheks is phi_operand has the form "phi + constant". 1893 * If yes, the constant is the increment that the induction variable gets at 1894 * every loop iteration. 1895 * Otherwise it returns 0. 1896 */ 1897 int32_t InductionVariableData::ComputeIncrement(HPhi* phi, 1898 HValue* phi_operand) { 1899 if (!phi_operand->representation().IsInteger32()) return 0; 1900 1901 if (phi_operand->IsAdd()) { 1902 HAdd* operation = HAdd::cast(phi_operand); 1903 if (operation->left() == phi && 1904 operation->right()->IsInteger32Constant()) { 1905 return operation->right()->GetInteger32Constant(); 1906 } else if (operation->right() == phi && 1907 operation->left()->IsInteger32Constant()) { 1908 return operation->left()->GetInteger32Constant(); 1909 } 1910 } else if (phi_operand->IsSub()) { 1911 HSub* operation = HSub::cast(phi_operand); 1912 if (operation->left() == phi && 1913 operation->right()->IsInteger32Constant()) { 1914 return -operation->right()->GetInteger32Constant(); 1915 } 1916 } 1917 1918 return 0; 1919 } 1920 1921 1922 /* 1923 * Swaps the information in "update" with the one contained in "this". 1924 * The swapping is important because this method is used while doing a 1925 * dominator tree traversal, and "update" will retain the old data that 1926 * will be restored while backtracking. 1927 */ 1928 void InductionVariableData::UpdateAdditionalLimit( 1929 InductionVariableLimitUpdate* update) { 1930 ASSERT(update->updated_variable == this); 1931 if (update->limit_is_upper) { 1932 swap(&additional_upper_limit_, &update->limit); 1933 swap(&additional_upper_limit_is_included_, &update->limit_is_included); 1934 } else { 1935 swap(&additional_lower_limit_, &update->limit); 1936 swap(&additional_lower_limit_is_included_, &update->limit_is_included); 1937 } 1938 } 1939 1940 1941 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask, 1942 int32_t or_mask) { 1943 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway. 1944 const int32_t MAX_LIMIT = 1 << 30; 1945 1946 int32_t result = MAX_LIMIT; 1947 1948 if (limit() != NULL && 1949 limit()->IsInteger32Constant()) { 1950 int32_t limit_value = limit()->GetInteger32Constant(); 1951 if (!limit_included()) { 1952 limit_value--; 1953 } 1954 if (limit_value < result) result = limit_value; 1955 } 1956 1957 if (additional_upper_limit() != NULL && 1958 additional_upper_limit()->IsInteger32Constant()) { 1959 int32_t limit_value = additional_upper_limit()->GetInteger32Constant(); 1960 if (!additional_upper_limit_is_included()) { 1961 limit_value--; 1962 } 1963 if (limit_value < result) result = limit_value; 1964 } 1965 1966 if (and_mask > 0 && and_mask < MAX_LIMIT) { 1967 if (and_mask < result) result = and_mask; 1968 return result; 1969 } 1970 1971 // Add the effect of the or_mask. 1972 result |= or_mask; 1973 1974 return result >= MAX_LIMIT ? kNoLimit : result; 1975 } 1976 1977 1978 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) { 1979 if (!v->IsPhi()) return v; 1980 HPhi* phi = HPhi::cast(v); 1981 if (phi->OperandCount() != 2) return v; 1982 if (phi->OperandAt(0)->block()->is_osr_entry()) { 1983 return phi->OperandAt(1); 1984 } else if (phi->OperandAt(1)->block()->is_osr_entry()) { 1985 return phi->OperandAt(0); 1986 } else { 1987 return v; 1988 } 1989 } 1990 1991 1992 InductionVariableData* InductionVariableData::GetInductionVariableData( 1993 HValue* v) { 1994 v = IgnoreOsrValue(v); 1995 if (v->IsPhi()) { 1996 return HPhi::cast(v)->induction_variable_data(); 1997 } 1998 return NULL; 1999 } 2000 2001 2002 /* 2003 * Check if a conditional branch to "current_branch" with token "token" is 2004 * the branch that keeps the induction loop running (and, conversely, will 2005 * terminate it if the "other_branch" is taken). 2006 * 2007 * Three conditions must be met: 2008 * - "current_branch" must be in the induction loop. 2009 * - "other_branch" must be out of the induction loop. 2010 * - "token" and the induction increment must be "compatible": the token should 2011 * be a condition that keeps the execution inside the loop until the limit is 2012 * reached. 2013 */ 2014 bool InductionVariableData::CheckIfBranchIsLoopGuard( 2015 Token::Value token, 2016 HBasicBlock* current_branch, 2017 HBasicBlock* other_branch) { 2018 if (!phi()->block()->current_loop()->IsNestedInThisLoop( 2019 current_branch->current_loop())) { 2020 return false; 2021 } 2022 2023 if (phi()->block()->current_loop()->IsNestedInThisLoop( 2024 other_branch->current_loop())) { 2025 return false; 2026 } 2027 2028 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) { 2029 return true; 2030 } 2031 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) { 2032 return true; 2033 } 2034 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) { 2035 return true; 2036 } 2037 2038 return false; 2039 } 2040 2041 2042 void InductionVariableData::ComputeLimitFromPredecessorBlock( 2043 HBasicBlock* block, 2044 LimitFromPredecessorBlock* result) { 2045 if (block->predecessors()->length() != 1) return; 2046 HBasicBlock* predecessor = block->predecessors()->at(0); 2047 HInstruction* end = predecessor->last(); 2048 2049 if (!end->IsCompareNumericAndBranch()) return; 2050 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end); 2051 2052 Token::Value token = branch->token(); 2053 if (!Token::IsArithmeticCompareOp(token)) return; 2054 2055 HBasicBlock* other_target; 2056 if (block == branch->SuccessorAt(0)) { 2057 other_target = branch->SuccessorAt(1); 2058 } else { 2059 other_target = branch->SuccessorAt(0); 2060 token = Token::NegateCompareOp(token); 2061 ASSERT(block == branch->SuccessorAt(1)); 2062 } 2063 2064 InductionVariableData* data; 2065 2066 data = GetInductionVariableData(branch->left()); 2067 HValue* limit = branch->right(); 2068 if (data == NULL) { 2069 data = GetInductionVariableData(branch->right()); 2070 token = Token::ReverseCompareOp(token); 2071 limit = branch->left(); 2072 } 2073 2074 if (data != NULL) { 2075 result->variable = data; 2076 result->token = token; 2077 result->limit = limit; 2078 result->other_target = other_target; 2079 } 2080 } 2081 2082 2083 /* 2084 * Compute the limit that is imposed on an induction variable when entering 2085 * "block" (if any). 2086 * If the limit is the "proper" induction limit (the one that makes the loop 2087 * terminate when the induction variable reaches it) it is stored directly in 2088 * the induction variable data. 2089 * Otherwise the limit is written in "additional_limit" and the method 2090 * returns true. 2091 */ 2092 bool InductionVariableData::ComputeInductionVariableLimit( 2093 HBasicBlock* block, 2094 InductionVariableLimitUpdate* additional_limit) { 2095 LimitFromPredecessorBlock limit; 2096 ComputeLimitFromPredecessorBlock(block, &limit); 2097 if (!limit.LimitIsValid()) return false; 2098 2099 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token, 2100 block, 2101 limit.other_target)) { 2102 limit.variable->limit_ = limit.limit; 2103 limit.variable->limit_included_ = limit.LimitIsIncluded(); 2104 limit.variable->limit_validity_ = block; 2105 limit.variable->induction_exit_block_ = block->predecessors()->at(0); 2106 limit.variable->induction_exit_target_ = limit.other_target; 2107 return false; 2108 } else { 2109 additional_limit->updated_variable = limit.variable; 2110 additional_limit->limit = limit.limit; 2111 additional_limit->limit_is_upper = limit.LimitIsUpper(); 2112 additional_limit->limit_is_included = limit.LimitIsIncluded(); 2113 return true; 2114 } 2115 } 2116 2117 2118 Range* HMathMinMax::InferRange(Zone* zone) { 2119 if (representation().IsSmiOrInteger32()) { 2120 Range* a = left()->range(); 2121 Range* b = right()->range(); 2122 Range* res = a->Copy(zone); 2123 if (operation_ == kMathMax) { 2124 res->CombinedMax(b); 2125 } else { 2126 ASSERT(operation_ == kMathMin); 2127 res->CombinedMin(b); 2128 } 2129 return res; 2130 } else { 2131 return HValue::InferRange(zone); 2132 } 2133 } 2134 2135 2136 void HPhi::PrintTo(StringStream* stream) { 2137 stream->Add("["); 2138 for (int i = 0; i < OperandCount(); ++i) { 2139 HValue* value = OperandAt(i); 2140 stream->Add(" "); 2141 value->PrintNameTo(stream); 2142 stream->Add(" "); 2143 } 2144 stream->Add(" uses:%d_%ds_%di_%dd_%dt", 2145 UseCount(), 2146 smi_non_phi_uses() + smi_indirect_uses(), 2147 int32_non_phi_uses() + int32_indirect_uses(), 2148 double_non_phi_uses() + double_indirect_uses(), 2149 tagged_non_phi_uses() + tagged_indirect_uses()); 2150 PrintRangeTo(stream); 2151 PrintTypeTo(stream); 2152 stream->Add("]"); 2153 } 2154 2155 2156 void HPhi::AddInput(HValue* value) { 2157 inputs_.Add(NULL, value->block()->zone()); 2158 SetOperandAt(OperandCount() - 1, value); 2159 // Mark phis that may have 'arguments' directly or indirectly as an operand. 2160 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) { 2161 SetFlag(kIsArguments); 2162 } 2163 } 2164 2165 2166 bool HPhi::HasRealUses() { 2167 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 2168 if (!it.value()->IsPhi()) return true; 2169 } 2170 return false; 2171 } 2172 2173 2174 HValue* HPhi::GetRedundantReplacement() { 2175 HValue* candidate = NULL; 2176 int count = OperandCount(); 2177 int position = 0; 2178 while (position < count && candidate == NULL) { 2179 HValue* current = OperandAt(position++); 2180 if (current != this) candidate = current; 2181 } 2182 while (position < count) { 2183 HValue* current = OperandAt(position++); 2184 if (current != this && current != candidate) return NULL; 2185 } 2186 ASSERT(candidate != this); 2187 return candidate; 2188 } 2189 2190 2191 void HPhi::DeleteFromGraph() { 2192 ASSERT(block() != NULL); 2193 block()->RemovePhi(this); 2194 ASSERT(block() == NULL); 2195 } 2196 2197 2198 void HPhi::InitRealUses(int phi_id) { 2199 // Initialize real uses. 2200 phi_id_ = phi_id; 2201 // Compute a conservative approximation of truncating uses before inferring 2202 // representations. The proper, exact computation will be done later, when 2203 // inserting representation changes. 2204 SetFlag(kTruncatingToSmi); 2205 SetFlag(kTruncatingToInt32); 2206 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 2207 HValue* value = it.value(); 2208 if (!value->IsPhi()) { 2209 Representation rep = value->observed_input_representation(it.index()); 2210 non_phi_uses_[rep.kind()] += value->LoopWeight(); 2211 if (FLAG_trace_representation) { 2212 PrintF("#%d Phi is used by real #%d %s as %s\n", 2213 id(), value->id(), value->Mnemonic(), rep.Mnemonic()); 2214 } 2215 if (!value->IsSimulate()) { 2216 if (!value->CheckFlag(kTruncatingToSmi)) { 2217 ClearFlag(kTruncatingToSmi); 2218 } 2219 if (!value->CheckFlag(kTruncatingToInt32)) { 2220 ClearFlag(kTruncatingToInt32); 2221 } 2222 } 2223 } 2224 } 2225 } 2226 2227 2228 void HPhi::AddNonPhiUsesFrom(HPhi* other) { 2229 if (FLAG_trace_representation) { 2230 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n", 2231 id(), other->id(), 2232 other->non_phi_uses_[Representation::kSmi], 2233 other->non_phi_uses_[Representation::kInteger32], 2234 other->non_phi_uses_[Representation::kDouble], 2235 other->non_phi_uses_[Representation::kTagged]); 2236 } 2237 2238 for (int i = 0; i < Representation::kNumRepresentations; i++) { 2239 indirect_uses_[i] += other->non_phi_uses_[i]; 2240 } 2241 } 2242 2243 2244 void HPhi::AddIndirectUsesTo(int* dest) { 2245 for (int i = 0; i < Representation::kNumRepresentations; i++) { 2246 dest[i] += indirect_uses_[i]; 2247 } 2248 } 2249 2250 2251 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) { 2252 while (!list->is_empty()) { 2253 HSimulate* from = list->RemoveLast(); 2254 ZoneList<HValue*>* from_values = &from->values_; 2255 for (int i = 0; i < from_values->length(); ++i) { 2256 if (from->HasAssignedIndexAt(i)) { 2257 int index = from->GetAssignedIndexAt(i); 2258 if (HasValueForIndex(index)) continue; 2259 AddAssignedValue(index, from_values->at(i)); 2260 } else { 2261 if (pop_count_ > 0) { 2262 pop_count_--; 2263 } else { 2264 AddPushedValue(from_values->at(i)); 2265 } 2266 } 2267 } 2268 pop_count_ += from->pop_count_; 2269 from->DeleteAndReplaceWith(NULL); 2270 } 2271 } 2272 2273 2274 void HSimulate::PrintDataTo(StringStream* stream) { 2275 stream->Add("id=%d", ast_id().ToInt()); 2276 if (pop_count_ > 0) stream->Add(" pop %d", pop_count_); 2277 if (values_.length() > 0) { 2278 if (pop_count_ > 0) stream->Add(" /"); 2279 for (int i = values_.length() - 1; i >= 0; --i) { 2280 if (HasAssignedIndexAt(i)) { 2281 stream->Add(" var[%d] = ", GetAssignedIndexAt(i)); 2282 } else { 2283 stream->Add(" push "); 2284 } 2285 values_[i]->PrintNameTo(stream); 2286 if (i > 0) stream->Add(","); 2287 } 2288 } 2289 } 2290 2291 2292 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target, 2293 Zone* zone) { 2294 ASSERT(return_target->IsInlineReturnTarget()); 2295 return_targets_.Add(return_target, zone); 2296 } 2297 2298 2299 void HEnterInlined::PrintDataTo(StringStream* stream) { 2300 SmartArrayPointer<char> name = function()->debug_name()->ToCString(); 2301 stream->Add("%s, id=%d", *name, function()->id().ToInt()); 2302 } 2303 2304 2305 static bool IsInteger32(double value) { 2306 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value)); 2307 return BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(value); 2308 } 2309 2310 2311 HConstant::HConstant(Handle<Object> handle, Representation r) 2312 : HTemplateInstruction<0>(HType::TypeFromValue(handle)), 2313 handle_(handle), 2314 unique_id_(), 2315 has_smi_value_(false), 2316 has_int32_value_(false), 2317 has_double_value_(false), 2318 has_external_reference_value_(false), 2319 is_internalized_string_(false), 2320 is_not_in_new_space_(true), 2321 is_cell_(false), 2322 boolean_value_(handle->BooleanValue()) { 2323 if (handle_->IsHeapObject()) { 2324 Heap* heap = Handle<HeapObject>::cast(handle)->GetHeap(); 2325 is_not_in_new_space_ = !heap->InNewSpace(*handle); 2326 } 2327 if (handle_->IsNumber()) { 2328 double n = handle_->Number(); 2329 has_int32_value_ = IsInteger32(n); 2330 int32_value_ = DoubleToInt32(n); 2331 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_); 2332 double_value_ = n; 2333 has_double_value_ = true; 2334 } else { 2335 is_internalized_string_ = handle_->IsInternalizedString(); 2336 } 2337 2338 is_cell_ = !handle_.is_null() && 2339 (handle_->IsCell() || handle_->IsPropertyCell()); 2340 Initialize(r); 2341 } 2342 2343 2344 HConstant::HConstant(Handle<Object> handle, 2345 UniqueValueId unique_id, 2346 Representation r, 2347 HType type, 2348 bool is_internalize_string, 2349 bool is_not_in_new_space, 2350 bool is_cell, 2351 bool boolean_value) 2352 : HTemplateInstruction<0>(type), 2353 handle_(handle), 2354 unique_id_(unique_id), 2355 has_smi_value_(false), 2356 has_int32_value_(false), 2357 has_double_value_(false), 2358 has_external_reference_value_(false), 2359 is_internalized_string_(is_internalize_string), 2360 is_not_in_new_space_(is_not_in_new_space), 2361 is_cell_(is_cell), 2362 boolean_value_(boolean_value) { 2363 ASSERT(!handle.is_null()); 2364 ASSERT(!type.IsTaggedNumber()); 2365 Initialize(r); 2366 } 2367 2368 2369 HConstant::HConstant(Handle<Map> handle, 2370 UniqueValueId unique_id) 2371 : HTemplateInstruction<0>(HType::Tagged()), 2372 handle_(handle), 2373 unique_id_(unique_id), 2374 has_smi_value_(false), 2375 has_int32_value_(false), 2376 has_double_value_(false), 2377 has_external_reference_value_(false), 2378 is_internalized_string_(false), 2379 is_not_in_new_space_(true), 2380 is_cell_(false), 2381 boolean_value_(false) { 2382 ASSERT(!handle.is_null()); 2383 Initialize(Representation::Tagged()); 2384 } 2385 2386 2387 HConstant::HConstant(int32_t integer_value, 2388 Representation r, 2389 bool is_not_in_new_space, 2390 Handle<Object> optional_handle) 2391 : handle_(optional_handle), 2392 unique_id_(), 2393 has_smi_value_(Smi::IsValid(integer_value)), 2394 has_int32_value_(true), 2395 has_double_value_(true), 2396 has_external_reference_value_(false), 2397 is_internalized_string_(false), 2398 is_not_in_new_space_(is_not_in_new_space), 2399 is_cell_(false), 2400 boolean_value_(integer_value != 0), 2401 int32_value_(integer_value), 2402 double_value_(FastI2D(integer_value)) { 2403 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber()); 2404 Initialize(r); 2405 } 2406 2407 2408 HConstant::HConstant(double double_value, 2409 Representation r, 2410 bool is_not_in_new_space, 2411 Handle<Object> optional_handle) 2412 : handle_(optional_handle), 2413 unique_id_(), 2414 has_int32_value_(IsInteger32(double_value)), 2415 has_double_value_(true), 2416 has_external_reference_value_(false), 2417 is_internalized_string_(false), 2418 is_not_in_new_space_(is_not_in_new_space), 2419 is_cell_(false), 2420 boolean_value_(double_value != 0 && !std::isnan(double_value)), 2421 int32_value_(DoubleToInt32(double_value)), 2422 double_value_(double_value) { 2423 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_); 2424 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber()); 2425 Initialize(r); 2426 } 2427 2428 2429 HConstant::HConstant(ExternalReference reference) 2430 : HTemplateInstruction<0>(HType::None()), 2431 has_smi_value_(false), 2432 has_int32_value_(false), 2433 has_double_value_(false), 2434 has_external_reference_value_(true), 2435 is_internalized_string_(false), 2436 is_not_in_new_space_(true), 2437 is_cell_(false), 2438 boolean_value_(true), 2439 external_reference_value_(reference) { 2440 Initialize(Representation::External()); 2441 } 2442 2443 2444 static void PrepareConstant(Handle<Object> object) { 2445 if (!object->IsJSObject()) return; 2446 Handle<JSObject> js_object = Handle<JSObject>::cast(object); 2447 if (!js_object->map()->is_deprecated()) return; 2448 JSObject::TryMigrateInstance(js_object); 2449 } 2450 2451 2452 void HConstant::Initialize(Representation r) { 2453 if (r.IsNone()) { 2454 if (has_smi_value_ && kSmiValueSize == 31) { 2455 r = Representation::Smi(); 2456 } else if (has_int32_value_) { 2457 r = Representation::Integer32(); 2458 } else if (has_double_value_) { 2459 r = Representation::Double(); 2460 } else if (has_external_reference_value_) { 2461 r = Representation::External(); 2462 } else { 2463 PrepareConstant(handle_); 2464 r = Representation::Tagged(); 2465 } 2466 } 2467 set_representation(r); 2468 SetFlag(kUseGVN); 2469 } 2470 2471 2472 bool HConstant::EmitAtUses() { 2473 ASSERT(IsLinked()); 2474 if (block()->graph()->has_osr()) { 2475 return block()->graph()->IsStandardConstant(this); 2476 } 2477 if (IsCell()) return false; 2478 if (representation().IsDouble()) return false; 2479 return true; 2480 } 2481 2482 2483 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const { 2484 if (r.IsSmi() && !has_smi_value_) return NULL; 2485 if (r.IsInteger32() && !has_int32_value_) return NULL; 2486 if (r.IsDouble() && !has_double_value_) return NULL; 2487 if (r.IsExternal() && !has_external_reference_value_) return NULL; 2488 if (has_int32_value_) { 2489 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, handle_); 2490 } 2491 if (has_double_value_) { 2492 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, handle_); 2493 } 2494 if (has_external_reference_value_) { 2495 return new(zone) HConstant(external_reference_value_); 2496 } 2497 ASSERT(!handle_.is_null()); 2498 return new(zone) HConstant(handle_, 2499 unique_id_, 2500 r, 2501 type_, 2502 is_internalized_string_, 2503 is_not_in_new_space_, 2504 is_cell_, 2505 boolean_value_); 2506 } 2507 2508 2509 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) { 2510 HConstant* res = NULL; 2511 if (has_int32_value_) { 2512 res = new(zone) HConstant(int32_value_, 2513 Representation::Integer32(), 2514 is_not_in_new_space_, 2515 handle_); 2516 } else if (has_double_value_) { 2517 res = new(zone) HConstant(DoubleToInt32(double_value_), 2518 Representation::Integer32(), 2519 is_not_in_new_space_, 2520 handle_); 2521 } else { 2522 ASSERT(!HasNumberValue()); 2523 Maybe<HConstant*> number = CopyToTruncatedNumber(zone); 2524 if (number.has_value) return number.value->CopyToTruncatedInt32(zone); 2525 } 2526 return Maybe<HConstant*>(res != NULL, res); 2527 } 2528 2529 2530 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) { 2531 HConstant* res = NULL; 2532 if (handle()->IsBoolean()) { 2533 res = handle()->BooleanValue() ? 2534 new(zone) HConstant(1) : new(zone) HConstant(0); 2535 } else if (handle()->IsUndefined()) { 2536 res = new(zone) HConstant(OS::nan_value()); 2537 } else if (handle()->IsNull()) { 2538 res = new(zone) HConstant(0); 2539 } 2540 return Maybe<HConstant*>(res != NULL, res); 2541 } 2542 2543 2544 void HConstant::PrintDataTo(StringStream* stream) { 2545 if (has_int32_value_) { 2546 stream->Add("%d ", int32_value_); 2547 } else if (has_double_value_) { 2548 stream->Add("%f ", FmtElm(double_value_)); 2549 } else if (has_external_reference_value_) { 2550 stream->Add("%p ", reinterpret_cast<void*>( 2551 external_reference_value_.address())); 2552 } else { 2553 handle()->ShortPrint(stream); 2554 } 2555 } 2556 2557 2558 void HBinaryOperation::PrintDataTo(StringStream* stream) { 2559 left()->PrintNameTo(stream); 2560 stream->Add(" "); 2561 right()->PrintNameTo(stream); 2562 if (CheckFlag(kCanOverflow)) stream->Add(" !"); 2563 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?"); 2564 } 2565 2566 2567 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) { 2568 ASSERT(CheckFlag(kFlexibleRepresentation)); 2569 Representation new_rep = RepresentationFromInputs(); 2570 UpdateRepresentation(new_rep, h_infer, "inputs"); 2571 if (observed_output_representation_.IsNone()) { 2572 new_rep = RepresentationFromUses(); 2573 UpdateRepresentation(new_rep, h_infer, "uses"); 2574 } else { 2575 new_rep = RepresentationFromOutput(); 2576 UpdateRepresentation(new_rep, h_infer, "output"); 2577 } 2578 2579 if (representation().IsSmi() && HasNonSmiUse()) { 2580 UpdateRepresentation( 2581 Representation::Integer32(), h_infer, "use requirements"); 2582 } 2583 } 2584 2585 2586 Representation HBinaryOperation::RepresentationFromInputs() { 2587 // Determine the worst case of observed input representations and 2588 // the currently assumed output representation. 2589 Representation rep = representation(); 2590 for (int i = 1; i <= 2; ++i) { 2591 rep = rep.generalize(observed_input_representation(i)); 2592 } 2593 // If any of the actual input representation is more general than what we 2594 // have so far but not Tagged, use that representation instead. 2595 Representation left_rep = left()->representation(); 2596 Representation right_rep = right()->representation(); 2597 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep); 2598 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep); 2599 2600 return rep; 2601 } 2602 2603 2604 bool HBinaryOperation::IgnoreObservedOutputRepresentation( 2605 Representation current_rep) { 2606 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) || 2607 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) && 2608 // Mul in Integer32 mode would be too precise. 2609 !this->IsMul(); 2610 } 2611 2612 2613 Representation HBinaryOperation::RepresentationFromOutput() { 2614 Representation rep = representation(); 2615 // Consider observed output representation, but ignore it if it's Double, 2616 // this instruction is not a division, and all its uses are truncating 2617 // to Integer32. 2618 if (observed_output_representation_.is_more_general_than(rep) && 2619 !IgnoreObservedOutputRepresentation(rep)) { 2620 return observed_output_representation_; 2621 } 2622 return Representation::None(); 2623 } 2624 2625 2626 void HBinaryOperation::AssumeRepresentation(Representation r) { 2627 set_observed_input_representation(1, r); 2628 set_observed_input_representation(2, r); 2629 HValue::AssumeRepresentation(r); 2630 } 2631 2632 2633 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) { 2634 ASSERT(CheckFlag(kFlexibleRepresentation)); 2635 Representation new_rep = RepresentationFromInputs(); 2636 UpdateRepresentation(new_rep, h_infer, "inputs"); 2637 // Do not care about uses. 2638 } 2639 2640 2641 Range* HBitwise::InferRange(Zone* zone) { 2642 if (op() == Token::BIT_XOR) { 2643 if (left()->HasRange() && right()->HasRange()) { 2644 // The maximum value has the high bit, and all bits below, set: 2645 // (1 << high) - 1. 2646 // If the range can be negative, the minimum int is a negative number with 2647 // the high bit, and all bits below, unset: 2648 // -(1 << high). 2649 // If it cannot be negative, conservatively choose 0 as minimum int. 2650 int64_t left_upper = left()->range()->upper(); 2651 int64_t left_lower = left()->range()->lower(); 2652 int64_t right_upper = right()->range()->upper(); 2653 int64_t right_lower = right()->range()->lower(); 2654 2655 if (left_upper < 0) left_upper = ~left_upper; 2656 if (left_lower < 0) left_lower = ~left_lower; 2657 if (right_upper < 0) right_upper = ~right_upper; 2658 if (right_lower < 0) right_lower = ~right_lower; 2659 2660 int high = MostSignificantBit( 2661 static_cast<uint32_t>( 2662 left_upper | left_lower | right_upper | right_lower)); 2663 2664 int64_t limit = 1; 2665 limit <<= high; 2666 int32_t min = (left()->range()->CanBeNegative() || 2667 right()->range()->CanBeNegative()) 2668 ? static_cast<int32_t>(-limit) : 0; 2669 return new(zone) Range(min, static_cast<int32_t>(limit - 1)); 2670 } 2671 Range* result = HValue::InferRange(zone); 2672 result->set_can_be_minus_zero(false); 2673 return result; 2674 } 2675 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff); 2676 int32_t left_mask = (left()->range() != NULL) 2677 ? left()->range()->Mask() 2678 : kDefaultMask; 2679 int32_t right_mask = (right()->range() != NULL) 2680 ? right()->range()->Mask() 2681 : kDefaultMask; 2682 int32_t result_mask = (op() == Token::BIT_AND) 2683 ? left_mask & right_mask 2684 : left_mask | right_mask; 2685 if (result_mask >= 0) return new(zone) Range(0, result_mask); 2686 2687 Range* result = HValue::InferRange(zone); 2688 result->set_can_be_minus_zero(false); 2689 return result; 2690 } 2691 2692 2693 Range* HSar::InferRange(Zone* zone) { 2694 if (right()->IsConstant()) { 2695 HConstant* c = HConstant::cast(right()); 2696 if (c->HasInteger32Value()) { 2697 Range* result = (left()->range() != NULL) 2698 ? left()->range()->Copy(zone) 2699 : new(zone) Range(); 2700 result->Sar(c->Integer32Value()); 2701 return result; 2702 } 2703 } 2704 return HValue::InferRange(zone); 2705 } 2706 2707 2708 Range* HShr::InferRange(Zone* zone) { 2709 if (right()->IsConstant()) { 2710 HConstant* c = HConstant::cast(right()); 2711 if (c->HasInteger32Value()) { 2712 int shift_count = c->Integer32Value() & 0x1f; 2713 if (left()->range()->CanBeNegative()) { 2714 // Only compute bounds if the result always fits into an int32. 2715 return (shift_count >= 1) 2716 ? new(zone) Range(0, 2717 static_cast<uint32_t>(0xffffffff) >> shift_count) 2718 : new(zone) Range(); 2719 } else { 2720 // For positive inputs we can use the >> operator. 2721 Range* result = (left()->range() != NULL) 2722 ? left()->range()->Copy(zone) 2723 : new(zone) Range(); 2724 result->Sar(c->Integer32Value()); 2725 return result; 2726 } 2727 } 2728 } 2729 return HValue::InferRange(zone); 2730 } 2731 2732 2733 Range* HShl::InferRange(Zone* zone) { 2734 if (right()->IsConstant()) { 2735 HConstant* c = HConstant::cast(right()); 2736 if (c->HasInteger32Value()) { 2737 Range* result = (left()->range() != NULL) 2738 ? left()->range()->Copy(zone) 2739 : new(zone) Range(); 2740 result->Shl(c->Integer32Value()); 2741 return result; 2742 } 2743 } 2744 return HValue::InferRange(zone); 2745 } 2746 2747 2748 Range* HLoadNamedField::InferRange(Zone* zone) { 2749 if (access().IsStringLength()) { 2750 return new(zone) Range(0, String::kMaxLength); 2751 } 2752 return HValue::InferRange(zone); 2753 } 2754 2755 2756 Range* HLoadKeyed::InferRange(Zone* zone) { 2757 switch (elements_kind()) { 2758 case EXTERNAL_PIXEL_ELEMENTS: 2759 return new(zone) Range(0, 255); 2760 case EXTERNAL_BYTE_ELEMENTS: 2761 return new(zone) Range(-128, 127); 2762 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 2763 return new(zone) Range(0, 255); 2764 case EXTERNAL_SHORT_ELEMENTS: 2765 return new(zone) Range(-32768, 32767); 2766 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 2767 return new(zone) Range(0, 65535); 2768 default: 2769 return HValue::InferRange(zone); 2770 } 2771 } 2772 2773 2774 void HCompareGeneric::PrintDataTo(StringStream* stream) { 2775 stream->Add(Token::Name(token())); 2776 stream->Add(" "); 2777 HBinaryOperation::PrintDataTo(stream); 2778 } 2779 2780 2781 void HStringCompareAndBranch::PrintDataTo(StringStream* stream) { 2782 stream->Add(Token::Name(token())); 2783 stream->Add(" "); 2784 HControlInstruction::PrintDataTo(stream); 2785 } 2786 2787 2788 void HCompareNumericAndBranch::PrintDataTo(StringStream* stream) { 2789 stream->Add(Token::Name(token())); 2790 stream->Add(" "); 2791 left()->PrintNameTo(stream); 2792 stream->Add(" "); 2793 right()->PrintNameTo(stream); 2794 HControlInstruction::PrintDataTo(stream); 2795 } 2796 2797 2798 void HCompareObjectEqAndBranch::PrintDataTo(StringStream* stream) { 2799 left()->PrintNameTo(stream); 2800 stream->Add(" "); 2801 right()->PrintNameTo(stream); 2802 HControlInstruction::PrintDataTo(stream); 2803 } 2804 2805 2806 void HCompareHoleAndBranch::PrintDataTo(StringStream* stream) { 2807 object()->PrintNameTo(stream); 2808 HControlInstruction::PrintDataTo(stream); 2809 } 2810 2811 2812 void HCompareHoleAndBranch::InferRepresentation( 2813 HInferRepresentationPhase* h_infer) { 2814 ChangeRepresentation(object()->representation()); 2815 } 2816 2817 2818 void HGoto::PrintDataTo(StringStream* stream) { 2819 stream->Add("B%d", SuccessorAt(0)->block_id()); 2820 } 2821 2822 2823 void HCompareNumericAndBranch::InferRepresentation( 2824 HInferRepresentationPhase* h_infer) { 2825 Representation left_rep = left()->representation(); 2826 Representation right_rep = right()->representation(); 2827 Representation observed_left = observed_input_representation(0); 2828 Representation observed_right = observed_input_representation(1); 2829 2830 Representation rep = Representation::None(); 2831 rep = rep.generalize(observed_left); 2832 rep = rep.generalize(observed_right); 2833 if (rep.IsNone() || rep.IsSmiOrInteger32()) { 2834 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep); 2835 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep); 2836 } else { 2837 rep = Representation::Double(); 2838 } 2839 2840 if (rep.IsDouble()) { 2841 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, === 2842 // and !=) have special handling of undefined, e.g. undefined == undefined 2843 // is 'true'. Relational comparisons have a different semantic, first 2844 // calling ToPrimitive() on their arguments. The standard Crankshaft 2845 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's 2846 // inputs are doubles caused 'undefined' to be converted to NaN. That's 2847 // compatible out-of-the box with ordered relational comparisons (<, >, <=, 2848 // >=). However, for equality comparisons (and for 'in' and 'instanceof'), 2849 // it is not consistent with the spec. For example, it would cause undefined 2850 // == undefined (should be true) to be evaluated as NaN == NaN 2851 // (false). Therefore, any comparisons other than ordered relational 2852 // comparisons must cause a deopt when one of their arguments is undefined. 2853 // See also v8:1434 2854 if (Token::IsOrderedRelationalCompareOp(token_)) { 2855 SetFlag(kAllowUndefinedAsNaN); 2856 } 2857 } 2858 ChangeRepresentation(rep); 2859 } 2860 2861 2862 void HParameter::PrintDataTo(StringStream* stream) { 2863 stream->Add("%u", index()); 2864 } 2865 2866 2867 void HLoadNamedField::PrintDataTo(StringStream* stream) { 2868 object()->PrintNameTo(stream); 2869 access_.PrintTo(stream); 2870 if (HasTypeCheck()) { 2871 stream->Add(" "); 2872 typecheck()->PrintNameTo(stream); 2873 } 2874 } 2875 2876 2877 HCheckMaps* HCheckMaps::New(Zone* zone, 2878 HValue* context, 2879 HValue* value, 2880 Handle<Map> map, 2881 CompilationInfo* info, 2882 HValue* typecheck) { 2883 HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck); 2884 check_map->Add(map, zone); 2885 if (map->CanOmitMapChecks() && 2886 value->IsConstant() && 2887 HConstant::cast(value)->InstanceOf(map)) { 2888 check_map->omit(info); 2889 } 2890 return check_map; 2891 } 2892 2893 2894 void HCheckMaps::FinalizeUniqueValueId() { 2895 if (!map_unique_ids_.is_empty()) return; 2896 Zone* zone = block()->zone(); 2897 map_unique_ids_.Initialize(map_set_.length(), zone); 2898 for (int i = 0; i < map_set_.length(); i++) { 2899 map_unique_ids_.Add(UniqueValueId(map_set_.at(i)), zone); 2900 } 2901 } 2902 2903 2904 void HLoadNamedGeneric::PrintDataTo(StringStream* stream) { 2905 object()->PrintNameTo(stream); 2906 stream->Add("."); 2907 stream->Add(*String::cast(*name())->ToCString()); 2908 } 2909 2910 2911 void HLoadKeyed::PrintDataTo(StringStream* stream) { 2912 if (!is_external()) { 2913 elements()->PrintNameTo(stream); 2914 } else { 2915 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND && 2916 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND); 2917 elements()->PrintNameTo(stream); 2918 stream->Add("."); 2919 stream->Add(ElementsKindToString(elements_kind())); 2920 } 2921 2922 stream->Add("["); 2923 key()->PrintNameTo(stream); 2924 if (IsDehoisted()) { 2925 stream->Add(" + %d]", index_offset()); 2926 } else { 2927 stream->Add("]"); 2928 } 2929 2930 if (HasDependency()) { 2931 stream->Add(" "); 2932 dependency()->PrintNameTo(stream); 2933 } 2934 2935 if (RequiresHoleCheck()) { 2936 stream->Add(" check_hole"); 2937 } 2938 } 2939 2940 2941 bool HLoadKeyed::UsesMustHandleHole() const { 2942 if (IsFastPackedElementsKind(elements_kind())) { 2943 return false; 2944 } 2945 2946 if (IsExternalArrayElementsKind(elements_kind())) { 2947 return false; 2948 } 2949 2950 if (hole_mode() == ALLOW_RETURN_HOLE) { 2951 if (IsFastDoubleElementsKind(elements_kind())) { 2952 return AllUsesCanTreatHoleAsNaN(); 2953 } 2954 return true; 2955 } 2956 2957 if (IsFastDoubleElementsKind(elements_kind())) { 2958 return false; 2959 } 2960 2961 // Holes are only returned as tagged values. 2962 if (!representation().IsTagged()) { 2963 return false; 2964 } 2965 2966 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 2967 HValue* use = it.value(); 2968 if (!use->IsChange()) return false; 2969 } 2970 2971 return true; 2972 } 2973 2974 2975 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const { 2976 return IsFastDoubleElementsKind(elements_kind()) && 2977 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN); 2978 } 2979 2980 2981 bool HLoadKeyed::RequiresHoleCheck() const { 2982 if (IsFastPackedElementsKind(elements_kind())) { 2983 return false; 2984 } 2985 2986 if (IsExternalArrayElementsKind(elements_kind())) { 2987 return false; 2988 } 2989 2990 return !UsesMustHandleHole(); 2991 } 2992 2993 2994 void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) { 2995 object()->PrintNameTo(stream); 2996 stream->Add("["); 2997 key()->PrintNameTo(stream); 2998 stream->Add("]"); 2999 } 3000 3001 3002 HValue* HLoadKeyedGeneric::Canonicalize() { 3003 // Recognize generic keyed loads that use property name generated 3004 // by for-in statement as a key and rewrite them into fast property load 3005 // by index. 3006 if (key()->IsLoadKeyed()) { 3007 HLoadKeyed* key_load = HLoadKeyed::cast(key()); 3008 if (key_load->elements()->IsForInCacheArray()) { 3009 HForInCacheArray* names_cache = 3010 HForInCacheArray::cast(key_load->elements()); 3011 3012 if (names_cache->enumerable() == object()) { 3013 HForInCacheArray* index_cache = 3014 names_cache->index_cache(); 3015 HCheckMapValue* map_check = 3016 HCheckMapValue::New(block()->graph()->zone(), 3017 block()->graph()->GetInvalidContext(), 3018 object(), 3019 names_cache->map()); 3020 HInstruction* index = HLoadKeyed::New( 3021 block()->graph()->zone(), 3022 block()->graph()->GetInvalidContext(), 3023 index_cache, 3024 key_load->key(), 3025 key_load->key(), 3026 key_load->elements_kind()); 3027 map_check->InsertBefore(this); 3028 index->InsertBefore(this); 3029 HLoadFieldByIndex* load = new(block()->zone()) HLoadFieldByIndex( 3030 object(), index); 3031 load->InsertBefore(this); 3032 return load; 3033 } 3034 } 3035 } 3036 3037 return this; 3038 } 3039 3040 3041 void HStoreNamedGeneric::PrintDataTo(StringStream* stream) { 3042 object()->PrintNameTo(stream); 3043 stream->Add("."); 3044 ASSERT(name()->IsString()); 3045 stream->Add(*String::cast(*name())->ToCString()); 3046 stream->Add(" = "); 3047 value()->PrintNameTo(stream); 3048 } 3049 3050 3051 void HStoreNamedField::PrintDataTo(StringStream* stream) { 3052 object()->PrintNameTo(stream); 3053 access_.PrintTo(stream); 3054 stream->Add(" = "); 3055 value()->PrintNameTo(stream); 3056 if (NeedsWriteBarrier()) { 3057 stream->Add(" (write-barrier)"); 3058 } 3059 if (has_transition()) { 3060 stream->Add(" (transition map %p)", *transition_map()); 3061 } 3062 } 3063 3064 3065 void HStoreKeyed::PrintDataTo(StringStream* stream) { 3066 if (!is_external()) { 3067 elements()->PrintNameTo(stream); 3068 } else { 3069 elements()->PrintNameTo(stream); 3070 stream->Add("."); 3071 stream->Add(ElementsKindToString(elements_kind())); 3072 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND && 3073 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND); 3074 } 3075 3076 stream->Add("["); 3077 key()->PrintNameTo(stream); 3078 if (IsDehoisted()) { 3079 stream->Add(" + %d] = ", index_offset()); 3080 } else { 3081 stream->Add("] = "); 3082 } 3083 3084 value()->PrintNameTo(stream); 3085 } 3086 3087 3088 void HStoreKeyedGeneric::PrintDataTo(StringStream* stream) { 3089 object()->PrintNameTo(stream); 3090 stream->Add("["); 3091 key()->PrintNameTo(stream); 3092 stream->Add("] = "); 3093 value()->PrintNameTo(stream); 3094 } 3095 3096 3097 void HTransitionElementsKind::PrintDataTo(StringStream* stream) { 3098 object()->PrintNameTo(stream); 3099 ElementsKind from_kind = original_map()->elements_kind(); 3100 ElementsKind to_kind = transitioned_map()->elements_kind(); 3101 stream->Add(" %p [%s] -> %p [%s]", 3102 *original_map(), 3103 ElementsAccessor::ForKind(from_kind)->name(), 3104 *transitioned_map(), 3105 ElementsAccessor::ForKind(to_kind)->name()); 3106 } 3107 3108 3109 void HLoadGlobalCell::PrintDataTo(StringStream* stream) { 3110 stream->Add("[%p]", *cell()); 3111 if (!details_.IsDontDelete()) stream->Add(" (deleteable)"); 3112 if (details_.IsReadOnly()) stream->Add(" (read-only)"); 3113 } 3114 3115 3116 bool HLoadGlobalCell::RequiresHoleCheck() const { 3117 if (details_.IsDontDelete() && !details_.IsReadOnly()) return false; 3118 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 3119 HValue* use = it.value(); 3120 if (!use->IsChange()) return true; 3121 } 3122 return false; 3123 } 3124 3125 3126 void HLoadGlobalGeneric::PrintDataTo(StringStream* stream) { 3127 stream->Add("%o ", *name()); 3128 } 3129 3130 3131 void HInnerAllocatedObject::PrintDataTo(StringStream* stream) { 3132 base_object()->PrintNameTo(stream); 3133 stream->Add(" offset %d", offset()); 3134 } 3135 3136 3137 void HStoreGlobalCell::PrintDataTo(StringStream* stream) { 3138 stream->Add("[%p] = ", *cell()); 3139 value()->PrintNameTo(stream); 3140 if (!details_.IsDontDelete()) stream->Add(" (deleteable)"); 3141 if (details_.IsReadOnly()) stream->Add(" (read-only)"); 3142 } 3143 3144 3145 void HStoreGlobalGeneric::PrintDataTo(StringStream* stream) { 3146 stream->Add("%o = ", *name()); 3147 value()->PrintNameTo(stream); 3148 } 3149 3150 3151 void HLoadContextSlot::PrintDataTo(StringStream* stream) { 3152 value()->PrintNameTo(stream); 3153 stream->Add("[%d]", slot_index()); 3154 } 3155 3156 3157 void HStoreContextSlot::PrintDataTo(StringStream* stream) { 3158 context()->PrintNameTo(stream); 3159 stream->Add("[%d] = ", slot_index()); 3160 value()->PrintNameTo(stream); 3161 } 3162 3163 3164 // Implementation of type inference and type conversions. Calculates 3165 // the inferred type of this instruction based on the input operands. 3166 3167 HType HValue::CalculateInferredType() { 3168 return type_; 3169 } 3170 3171 3172 HType HPhi::CalculateInferredType() { 3173 if (OperandCount() == 0) return HType::Tagged(); 3174 HType result = OperandAt(0)->type(); 3175 for (int i = 1; i < OperandCount(); ++i) { 3176 HType current = OperandAt(i)->type(); 3177 result = result.Combine(current); 3178 } 3179 return result; 3180 } 3181 3182 3183 HType HChange::CalculateInferredType() { 3184 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber(); 3185 return type(); 3186 } 3187 3188 3189 Representation HUnaryMathOperation::RepresentationFromInputs() { 3190 Representation rep = representation(); 3191 // If any of the actual input representation is more general than what we 3192 // have so far but not Tagged, use that representation instead. 3193 Representation input_rep = value()->representation(); 3194 if (!input_rep.IsTagged()) { 3195 rep = rep.generalize(input_rep); 3196 } 3197 return rep; 3198 } 3199 3200 3201 void HAllocate::HandleSideEffectDominator(GVNFlag side_effect, 3202 HValue* dominator) { 3203 ASSERT(side_effect == kChangesNewSpacePromotion); 3204 Zone* zone = block()->zone(); 3205 if (!FLAG_use_allocation_folding) return; 3206 3207 // Try to fold allocations together with their dominating allocations. 3208 if (!dominator->IsAllocate()) { 3209 if (FLAG_trace_allocation_folding) { 3210 PrintF("#%d (%s) cannot fold into #%d (%s)\n", 3211 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3212 } 3213 return; 3214 } 3215 3216 HAllocate* dominator_allocate = HAllocate::cast(dominator); 3217 HValue* dominator_size = dominator_allocate->size(); 3218 HValue* current_size = size(); 3219 3220 // TODO(hpayer): Add support for non-constant allocation in dominator. 3221 if (!current_size->IsInteger32Constant() || 3222 !dominator_size->IsInteger32Constant()) { 3223 if (FLAG_trace_allocation_folding) { 3224 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic allocation size\n", 3225 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3226 } 3227 return; 3228 } 3229 3230 dominator_allocate = GetFoldableDominator(dominator_allocate); 3231 if (dominator_allocate == NULL) { 3232 return; 3233 } 3234 3235 ASSERT((IsNewSpaceAllocation() && 3236 dominator_allocate->IsNewSpaceAllocation()) || 3237 (IsOldDataSpaceAllocation() && 3238 dominator_allocate->IsOldDataSpaceAllocation()) || 3239 (IsOldPointerSpaceAllocation() && 3240 dominator_allocate->IsOldPointerSpaceAllocation())); 3241 3242 // First update the size of the dominator allocate instruction. 3243 dominator_size = dominator_allocate->size(); 3244 int32_t original_object_size = 3245 HConstant::cast(dominator_size)->GetInteger32Constant(); 3246 int32_t dominator_size_constant = original_object_size; 3247 int32_t current_size_constant = 3248 HConstant::cast(current_size)->GetInteger32Constant(); 3249 int32_t new_dominator_size = dominator_size_constant + current_size_constant; 3250 3251 if (MustAllocateDoubleAligned()) { 3252 if (!dominator_allocate->MustAllocateDoubleAligned()) { 3253 dominator_allocate->MakeDoubleAligned(); 3254 } 3255 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) { 3256 dominator_size_constant += kDoubleSize / 2; 3257 new_dominator_size += kDoubleSize / 2; 3258 } 3259 } 3260 3261 if (new_dominator_size > Page::kMaxNonCodeHeapObjectSize) { 3262 if (FLAG_trace_allocation_folding) { 3263 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n", 3264 id(), Mnemonic(), dominator_allocate->id(), 3265 dominator_allocate->Mnemonic(), new_dominator_size); 3266 } 3267 return; 3268 } 3269 3270 HInstruction* new_dominator_size_constant = HConstant::CreateAndInsertBefore( 3271 zone, 3272 context(), 3273 new_dominator_size, 3274 Representation::None(), 3275 dominator_allocate); 3276 dominator_allocate->UpdateSize(new_dominator_size_constant); 3277 3278 #ifdef VERIFY_HEAP 3279 if (FLAG_verify_heap && dominator_allocate->IsNewSpaceAllocation()) { 3280 dominator_allocate->MakePrefillWithFiller(); 3281 } else { 3282 // TODO(hpayer): This is a short-term hack to make allocation mementos 3283 // work again in new space. 3284 dominator_allocate->ClearNextMapWord(original_object_size); 3285 } 3286 #else 3287 // TODO(hpayer): This is a short-term hack to make allocation mementos 3288 // work again in new space. 3289 dominator_allocate->ClearNextMapWord(original_object_size); 3290 #endif 3291 3292 dominator_allocate->clear_next_map_word_ = clear_next_map_word_; 3293 3294 // After that replace the dominated allocate instruction. 3295 HInstruction* dominated_allocate_instr = 3296 HInnerAllocatedObject::New(zone, 3297 context(), 3298 dominator_allocate, 3299 dominator_size_constant, 3300 type()); 3301 dominated_allocate_instr->InsertBefore(this); 3302 DeleteAndReplaceWith(dominated_allocate_instr); 3303 if (FLAG_trace_allocation_folding) { 3304 PrintF("#%d (%s) folded into #%d (%s)\n", 3305 id(), Mnemonic(), dominator_allocate->id(), 3306 dominator_allocate->Mnemonic()); 3307 } 3308 } 3309 3310 3311 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) { 3312 if (!IsFoldable(dominator)) { 3313 // We cannot hoist old space allocations over new space allocations. 3314 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) { 3315 if (FLAG_trace_allocation_folding) { 3316 PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n", 3317 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3318 } 3319 return NULL; 3320 } 3321 3322 HAllocate* dominator_dominator = dominator->dominating_allocate_; 3323 3324 // We can hoist old data space allocations over an old pointer space 3325 // allocation and vice versa. For that we have to check the dominator 3326 // of the dominator allocate instruction. 3327 if (dominator_dominator == NULL) { 3328 dominating_allocate_ = dominator; 3329 if (FLAG_trace_allocation_folding) { 3330 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", 3331 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3332 } 3333 return NULL; 3334 } 3335 3336 // We can just fold old space allocations that are in the same basic block, 3337 // since it is not guaranteed that we fill up the whole allocated old 3338 // space memory. 3339 // TODO(hpayer): Remove this limitation and add filler maps for each each 3340 // allocation as soon as we have store elimination. 3341 if (block()->block_id() != dominator_dominator->block()->block_id()) { 3342 if (FLAG_trace_allocation_folding) { 3343 PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n", 3344 id(), Mnemonic(), dominator_dominator->id(), 3345 dominator_dominator->Mnemonic()); 3346 } 3347 return NULL; 3348 } 3349 3350 ASSERT((IsOldDataSpaceAllocation() && 3351 dominator_dominator->IsOldDataSpaceAllocation()) || 3352 (IsOldPointerSpaceAllocation() && 3353 dominator_dominator->IsOldPointerSpaceAllocation())); 3354 3355 int32_t current_size = HConstant::cast(size())->GetInteger32Constant(); 3356 HStoreNamedField* dominator_free_space_size = 3357 dominator->filler_free_space_size_; 3358 if (dominator_free_space_size != NULL) { 3359 // We already hoisted one old space allocation, i.e., we already installed 3360 // a filler map. Hence, we just have to update the free space size. 3361 dominator->UpdateFreeSpaceFiller(current_size); 3362 } else { 3363 // This is the first old space allocation that gets hoisted. We have to 3364 // install a filler map since the follwing allocation may cause a GC. 3365 dominator->CreateFreeSpaceFiller(current_size); 3366 } 3367 3368 // We can hoist the old space allocation over the actual dominator. 3369 return dominator_dominator; 3370 } 3371 return dominator; 3372 } 3373 3374 3375 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) { 3376 ASSERT(filler_free_space_size_ != NULL); 3377 Zone* zone = block()->zone(); 3378 // We must explicitly force Smi representation here because on x64 we 3379 // would otherwise automatically choose int32, but the actual store 3380 // requires a Smi-tagged value. 3381 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore( 3382 zone, 3383 context(), 3384 filler_free_space_size_->value()->GetInteger32Constant() + 3385 free_space_size, 3386 Representation::Smi(), 3387 filler_free_space_size_); 3388 filler_free_space_size_->UpdateValue(new_free_space_size); 3389 } 3390 3391 3392 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) { 3393 ASSERT(filler_free_space_size_ == NULL); 3394 Zone* zone = block()->zone(); 3395 int32_t dominator_size = 3396 HConstant::cast(dominating_allocate_->size())->GetInteger32Constant(); 3397 HInstruction* free_space_instr = 3398 HInnerAllocatedObject::New(zone, context(), dominating_allocate_, 3399 dominator_size, type()); 3400 free_space_instr->InsertBefore(this); 3401 HConstant* filler_map = HConstant::New( 3402 zone, 3403 context(), 3404 isolate()->factory()->free_space_map(), 3405 UniqueValueId(isolate()->heap()->free_space_map())); 3406 filler_map->InsertAfter(free_space_instr); 3407 HInstruction* store_map = HStoreNamedField::New(zone, context(), 3408 free_space_instr, HObjectAccess::ForMap(), filler_map); 3409 store_map->SetFlag(HValue::kHasNoObservableSideEffects); 3410 store_map->InsertAfter(filler_map); 3411 3412 // We must explicitly force Smi representation here because on x64 we 3413 // would otherwise automatically choose int32, but the actual store 3414 // requires a Smi-tagged value. 3415 HConstant* filler_size = HConstant::CreateAndInsertAfter( 3416 zone, context(), free_space_size, Representation::Smi(), store_map); 3417 // Must force Smi representation for x64 (see comment above). 3418 HObjectAccess access = 3419 HObjectAccess::ForJSObjectOffset(FreeSpace::kSizeOffset, 3420 Representation::Smi()); 3421 HStoreNamedField* store_size = HStoreNamedField::New(zone, context(), 3422 free_space_instr, access, filler_size); 3423 store_size->SetFlag(HValue::kHasNoObservableSideEffects); 3424 store_size->InsertAfter(filler_size); 3425 filler_free_space_size_ = store_size; 3426 } 3427 3428 3429 void HAllocate::ClearNextMapWord(int offset) { 3430 if (clear_next_map_word_) { 3431 Zone* zone = block()->zone(); 3432 HObjectAccess access = HObjectAccess::ForJSObjectOffset(offset); 3433 HStoreNamedField* clear_next_map = 3434 HStoreNamedField::New(zone, context(), this, access, 3435 block()->graph()->GetConstantNull()); 3436 clear_next_map->ClearAllSideEffects(); 3437 clear_next_map->InsertAfter(this); 3438 } 3439 } 3440 3441 3442 void HAllocate::PrintDataTo(StringStream* stream) { 3443 size()->PrintNameTo(stream); 3444 stream->Add(" ("); 3445 if (IsNewSpaceAllocation()) stream->Add("N"); 3446 if (IsOldPointerSpaceAllocation()) stream->Add("P"); 3447 if (IsOldDataSpaceAllocation()) stream->Add("D"); 3448 if (MustAllocateDoubleAligned()) stream->Add("A"); 3449 if (MustPrefillWithFiller()) stream->Add("F"); 3450 stream->Add(")"); 3451 } 3452 3453 3454 HValue* HUnaryMathOperation::EnsureAndPropagateNotMinusZero( 3455 BitVector* visited) { 3456 visited->Add(id()); 3457 if (representation().IsSmiOrInteger32() && 3458 !value()->representation().Equals(representation())) { 3459 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) { 3460 SetFlag(kBailoutOnMinusZero); 3461 } 3462 } 3463 if (RequiredInputRepresentation(0).IsSmiOrInteger32() && 3464 representation().Equals(RequiredInputRepresentation(0))) { 3465 return value(); 3466 } 3467 return NULL; 3468 } 3469 3470 3471 HValue* HChange::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3472 visited->Add(id()); 3473 if (from().IsSmiOrInteger32()) return NULL; 3474 if (CanTruncateToInt32()) return NULL; 3475 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) { 3476 SetFlag(kBailoutOnMinusZero); 3477 } 3478 ASSERT(!from().IsSmiOrInteger32() || !to().IsSmiOrInteger32()); 3479 return NULL; 3480 } 3481 3482 3483 HValue* HForceRepresentation::EnsureAndPropagateNotMinusZero( 3484 BitVector* visited) { 3485 visited->Add(id()); 3486 return value(); 3487 } 3488 3489 3490 HValue* HMod::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3491 visited->Add(id()); 3492 if (range() == NULL || range()->CanBeMinusZero()) { 3493 SetFlag(kBailoutOnMinusZero); 3494 return left(); 3495 } 3496 return NULL; 3497 } 3498 3499 3500 HValue* HDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3501 visited->Add(id()); 3502 if (range() == NULL || range()->CanBeMinusZero()) { 3503 SetFlag(kBailoutOnMinusZero); 3504 } 3505 return NULL; 3506 } 3507 3508 3509 HValue* HMathFloorOfDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3510 visited->Add(id()); 3511 SetFlag(kBailoutOnMinusZero); 3512 return NULL; 3513 } 3514 3515 3516 HValue* HMul::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3517 visited->Add(id()); 3518 if (range() == NULL || range()->CanBeMinusZero()) { 3519 SetFlag(kBailoutOnMinusZero); 3520 } 3521 return NULL; 3522 } 3523 3524 3525 HValue* HSub::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3526 visited->Add(id()); 3527 // Propagate to the left argument. If the left argument cannot be -0, then 3528 // the result of the add operation cannot be either. 3529 if (range() == NULL || range()->CanBeMinusZero()) { 3530 return left(); 3531 } 3532 return NULL; 3533 } 3534 3535 3536 HValue* HAdd::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3537 visited->Add(id()); 3538 // Propagate to the left argument. If the left argument cannot be -0, then 3539 // the result of the sub operation cannot be either. 3540 if (range() == NULL || range()->CanBeMinusZero()) { 3541 return left(); 3542 } 3543 return NULL; 3544 } 3545 3546 3547 bool HStoreKeyed::NeedsCanonicalization() { 3548 // If value is an integer or smi or comes from the result of a keyed load or 3549 // constant then it is either be a non-hole value or in the case of a constant 3550 // the hole is only being stored explicitly: no need for canonicalization. 3551 // 3552 // The exception to that is keyed loads from external float or double arrays: 3553 // these can load arbitrary representation of NaN. 3554 3555 if (value()->IsConstant()) { 3556 return false; 3557 } 3558 3559 if (value()->IsLoadKeyed()) { 3560 return IsExternalFloatOrDoubleElementsKind( 3561 HLoadKeyed::cast(value())->elements_kind()); 3562 } 3563 3564 if (value()->IsChange()) { 3565 if (HChange::cast(value())->from().IsSmiOrInteger32()) { 3566 return false; 3567 } 3568 if (HChange::cast(value())->value()->type().IsSmi()) { 3569 return false; 3570 } 3571 } 3572 return true; 3573 } 3574 3575 3576 #define H_CONSTANT_INT(val) \ 3577 HConstant::New(zone, context, static_cast<int32_t>(val)) 3578 #define H_CONSTANT_DOUBLE(val) \ 3579 HConstant::New(zone, context, static_cast<double>(val)) 3580 3581 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \ 3582 HInstruction* HInstr::New( \ 3583 Zone* zone, HValue* context, HValue* left, HValue* right) { \ 3584 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ 3585 HConstant* c_left = HConstant::cast(left); \ 3586 HConstant* c_right = HConstant::cast(right); \ 3587 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ 3588 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \ 3589 if (TypeInfo::IsInt32Double(double_res)) { \ 3590 return H_CONSTANT_INT(double_res); \ 3591 } \ 3592 return H_CONSTANT_DOUBLE(double_res); \ 3593 } \ 3594 } \ 3595 return new(zone) HInstr(context, left, right); \ 3596 } 3597 3598 3599 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +) 3600 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *) 3601 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -) 3602 3603 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR 3604 3605 3606 HInstruction* HStringAdd::New(Zone* zone, 3607 HValue* context, 3608 HValue* left, 3609 HValue* right, 3610 StringAddFlags flags) { 3611 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3612 HConstant* c_right = HConstant::cast(right); 3613 HConstant* c_left = HConstant::cast(left); 3614 if (c_left->HasStringValue() && c_right->HasStringValue()) { 3615 Handle<String> concat = zone->isolate()->factory()->NewFlatConcatString( 3616 c_left->StringValue(), c_right->StringValue()); 3617 return HConstant::New(zone, context, concat); 3618 } 3619 } 3620 return new(zone) HStringAdd(context, left, right, flags); 3621 } 3622 3623 3624 HInstruction* HStringCharFromCode::New( 3625 Zone* zone, HValue* context, HValue* char_code) { 3626 if (FLAG_fold_constants && char_code->IsConstant()) { 3627 HConstant* c_code = HConstant::cast(char_code); 3628 Isolate* isolate = Isolate::Current(); 3629 if (c_code->HasNumberValue()) { 3630 if (std::isfinite(c_code->DoubleValue())) { 3631 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff; 3632 return HConstant::New(zone, context, 3633 LookupSingleCharacterStringFromCode(isolate, code)); 3634 } 3635 return HConstant::New(zone, context, isolate->factory()->empty_string()); 3636 } 3637 } 3638 return new(zone) HStringCharFromCode(context, char_code); 3639 } 3640 3641 3642 HInstruction* HUnaryMathOperation::New( 3643 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op) { 3644 do { 3645 if (!FLAG_fold_constants) break; 3646 if (!value->IsConstant()) break; 3647 HConstant* constant = HConstant::cast(value); 3648 if (!constant->HasNumberValue()) break; 3649 double d = constant->DoubleValue(); 3650 if (std::isnan(d)) { // NaN poisons everything. 3651 return H_CONSTANT_DOUBLE(OS::nan_value()); 3652 } 3653 if (std::isinf(d)) { // +Infinity and -Infinity. 3654 switch (op) { 3655 case kMathSin: 3656 case kMathCos: 3657 case kMathTan: 3658 return H_CONSTANT_DOUBLE(OS::nan_value()); 3659 case kMathExp: 3660 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0); 3661 case kMathLog: 3662 case kMathSqrt: 3663 return H_CONSTANT_DOUBLE((d > 0.0) ? d : OS::nan_value()); 3664 case kMathPowHalf: 3665 case kMathAbs: 3666 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d); 3667 case kMathRound: 3668 case kMathFloor: 3669 return H_CONSTANT_DOUBLE(d); 3670 default: 3671 UNREACHABLE(); 3672 break; 3673 } 3674 } 3675 switch (op) { 3676 case kMathSin: 3677 return H_CONSTANT_DOUBLE(fast_sin(d)); 3678 case kMathCos: 3679 return H_CONSTANT_DOUBLE(fast_cos(d)); 3680 case kMathTan: 3681 return H_CONSTANT_DOUBLE(fast_tan(d)); 3682 case kMathExp: 3683 return H_CONSTANT_DOUBLE(fast_exp(d)); 3684 case kMathLog: 3685 return H_CONSTANT_DOUBLE(fast_log(d)); 3686 case kMathSqrt: 3687 return H_CONSTANT_DOUBLE(fast_sqrt(d)); 3688 case kMathPowHalf: 3689 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5)); 3690 case kMathAbs: 3691 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d); 3692 case kMathRound: 3693 // -0.5 .. -0.0 round to -0.0. 3694 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0); 3695 // Doubles are represented as Significant * 2 ^ Exponent. If the 3696 // Exponent is not negative, the double value is already an integer. 3697 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d); 3698 return H_CONSTANT_DOUBLE(floor(d + 0.5)); 3699 case kMathFloor: 3700 return H_CONSTANT_DOUBLE(floor(d)); 3701 default: 3702 UNREACHABLE(); 3703 break; 3704 } 3705 } while (false); 3706 return new(zone) HUnaryMathOperation(context, value, op); 3707 } 3708 3709 3710 HInstruction* HPower::New(Zone* zone, 3711 HValue* context, 3712 HValue* left, 3713 HValue* right) { 3714 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3715 HConstant* c_left = HConstant::cast(left); 3716 HConstant* c_right = HConstant::cast(right); 3717 if (c_left->HasNumberValue() && c_right->HasNumberValue()) { 3718 double result = power_helper(c_left->DoubleValue(), 3719 c_right->DoubleValue()); 3720 return H_CONSTANT_DOUBLE(std::isnan(result) ? OS::nan_value() : result); 3721 } 3722 } 3723 return new(zone) HPower(left, right); 3724 } 3725 3726 3727 HInstruction* HMathMinMax::New( 3728 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) { 3729 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3730 HConstant* c_left = HConstant::cast(left); 3731 HConstant* c_right = HConstant::cast(right); 3732 if (c_left->HasNumberValue() && c_right->HasNumberValue()) { 3733 double d_left = c_left->DoubleValue(); 3734 double d_right = c_right->DoubleValue(); 3735 if (op == kMathMin) { 3736 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right); 3737 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left); 3738 if (d_left == d_right) { 3739 // Handle +0 and -0. 3740 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left 3741 : d_right); 3742 } 3743 } else { 3744 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right); 3745 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left); 3746 if (d_left == d_right) { 3747 // Handle +0 and -0. 3748 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right 3749 : d_left); 3750 } 3751 } 3752 // All comparisons failed, must be NaN. 3753 return H_CONSTANT_DOUBLE(OS::nan_value()); 3754 } 3755 } 3756 return new(zone) HMathMinMax(context, left, right, op); 3757 } 3758 3759 3760 HInstruction* HMod::New(Zone* zone, 3761 HValue* context, 3762 HValue* left, 3763 HValue* right, 3764 Maybe<int> fixed_right_arg) { 3765 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3766 HConstant* c_left = HConstant::cast(left); 3767 HConstant* c_right = HConstant::cast(right); 3768 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) { 3769 int32_t dividend = c_left->Integer32Value(); 3770 int32_t divisor = c_right->Integer32Value(); 3771 if (dividend == kMinInt && divisor == -1) { 3772 return H_CONSTANT_DOUBLE(-0.0); 3773 } 3774 if (divisor != 0) { 3775 int32_t res = dividend % divisor; 3776 if ((res == 0) && (dividend < 0)) { 3777 return H_CONSTANT_DOUBLE(-0.0); 3778 } 3779 return H_CONSTANT_INT(res); 3780 } 3781 } 3782 } 3783 return new(zone) HMod(context, left, right, fixed_right_arg); 3784 } 3785 3786 3787 HInstruction* HDiv::New( 3788 Zone* zone, HValue* context, HValue* left, HValue* right) { 3789 // If left and right are constant values, try to return a constant value. 3790 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3791 HConstant* c_left = HConstant::cast(left); 3792 HConstant* c_right = HConstant::cast(right); 3793 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { 3794 if (c_right->DoubleValue() != 0) { 3795 double double_res = c_left->DoubleValue() / c_right->DoubleValue(); 3796 if (TypeInfo::IsInt32Double(double_res)) { 3797 return H_CONSTANT_INT(double_res); 3798 } 3799 return H_CONSTANT_DOUBLE(double_res); 3800 } else { 3801 int sign = Double(c_left->DoubleValue()).Sign() * 3802 Double(c_right->DoubleValue()).Sign(); // Right could be -0. 3803 return H_CONSTANT_DOUBLE(sign * V8_INFINITY); 3804 } 3805 } 3806 } 3807 return new(zone) HDiv(context, left, right); 3808 } 3809 3810 3811 HInstruction* HBitwise::New( 3812 Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) { 3813 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3814 HConstant* c_left = HConstant::cast(left); 3815 HConstant* c_right = HConstant::cast(right); 3816 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { 3817 int32_t result; 3818 int32_t v_left = c_left->NumberValueAsInteger32(); 3819 int32_t v_right = c_right->NumberValueAsInteger32(); 3820 switch (op) { 3821 case Token::BIT_XOR: 3822 result = v_left ^ v_right; 3823 break; 3824 case Token::BIT_AND: 3825 result = v_left & v_right; 3826 break; 3827 case Token::BIT_OR: 3828 result = v_left | v_right; 3829 break; 3830 default: 3831 result = 0; // Please the compiler. 3832 UNREACHABLE(); 3833 } 3834 return H_CONSTANT_INT(result); 3835 } 3836 } 3837 return new(zone) HBitwise(context, op, left, right); 3838 } 3839 3840 3841 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \ 3842 HInstruction* HInstr::New( \ 3843 Zone* zone, HValue* context, HValue* left, HValue* right) { \ 3844 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ 3845 HConstant* c_left = HConstant::cast(left); \ 3846 HConstant* c_right = HConstant::cast(right); \ 3847 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ 3848 return H_CONSTANT_INT(result); \ 3849 } \ 3850 } \ 3851 return new(zone) HInstr(context, left, right); \ 3852 } 3853 3854 3855 DEFINE_NEW_H_BITWISE_INSTR(HSar, 3856 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f)) 3857 DEFINE_NEW_H_BITWISE_INSTR(HShl, 3858 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f)) 3859 3860 #undef DEFINE_NEW_H_BITWISE_INSTR 3861 3862 3863 HInstruction* HShr::New( 3864 Zone* zone, HValue* context, HValue* left, HValue* right) { 3865 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3866 HConstant* c_left = HConstant::cast(left); 3867 HConstant* c_right = HConstant::cast(right); 3868 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { 3869 int32_t left_val = c_left->NumberValueAsInteger32(); 3870 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f; 3871 if ((right_val == 0) && (left_val < 0)) { 3872 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val)); 3873 } 3874 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val); 3875 } 3876 } 3877 return new(zone) HShr(context, left, right); 3878 } 3879 3880 3881 #undef H_CONSTANT_INT 3882 #undef H_CONSTANT_DOUBLE 3883 3884 3885 void HBitwise::PrintDataTo(StringStream* stream) { 3886 stream->Add(Token::Name(op_)); 3887 stream->Add(" "); 3888 HBitwiseBinaryOperation::PrintDataTo(stream); 3889 } 3890 3891 3892 void HPhi::SimplifyConstantInputs() { 3893 // Convert constant inputs to integers when all uses are truncating. 3894 // This must happen before representation inference takes place. 3895 if (!CheckUsesForFlag(kTruncatingToInt32)) return; 3896 for (int i = 0; i < OperandCount(); ++i) { 3897 if (!OperandAt(i)->IsConstant()) return; 3898 } 3899 HGraph* graph = block()->graph(); 3900 for (int i = 0; i < OperandCount(); ++i) { 3901 HConstant* operand = HConstant::cast(OperandAt(i)); 3902 if (operand->HasInteger32Value()) { 3903 continue; 3904 } else if (operand->HasDoubleValue()) { 3905 HConstant* integer_input = 3906 HConstant::New(graph->zone(), graph->GetInvalidContext(), 3907 DoubleToInt32(operand->DoubleValue())); 3908 integer_input->InsertAfter(operand); 3909 SetOperandAt(i, integer_input); 3910 } else if (operand->HasBooleanValue()) { 3911 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1() 3912 : graph->GetConstant0()); 3913 } else if (operand->ImmortalImmovable()) { 3914 SetOperandAt(i, graph->GetConstant0()); 3915 } 3916 } 3917 // Overwrite observed input representations because they are likely Tagged. 3918 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 3919 HValue* use = it.value(); 3920 if (use->IsBinaryOperation()) { 3921 HBinaryOperation::cast(use)->set_observed_input_representation( 3922 it.index(), Representation::Smi()); 3923 } 3924 } 3925 } 3926 3927 3928 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) { 3929 ASSERT(CheckFlag(kFlexibleRepresentation)); 3930 Representation new_rep = RepresentationFromInputs(); 3931 UpdateRepresentation(new_rep, h_infer, "inputs"); 3932 new_rep = RepresentationFromUses(); 3933 UpdateRepresentation(new_rep, h_infer, "uses"); 3934 new_rep = RepresentationFromUseRequirements(); 3935 UpdateRepresentation(new_rep, h_infer, "use requirements"); 3936 } 3937 3938 3939 Representation HPhi::RepresentationFromInputs() { 3940 Representation r = Representation::None(); 3941 for (int i = 0; i < OperandCount(); ++i) { 3942 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation()); 3943 } 3944 return r; 3945 } 3946 3947 3948 // Returns a representation if all uses agree on the same representation. 3949 // Integer32 is also returned when some uses are Smi but others are Integer32. 3950 Representation HValue::RepresentationFromUseRequirements() { 3951 Representation rep = Representation::None(); 3952 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 3953 // We check for observed_input_representation elsewhere. 3954 Representation use_rep = 3955 it.value()->RequiredInputRepresentation(it.index()); 3956 if (rep.IsNone()) { 3957 rep = use_rep; 3958 continue; 3959 } 3960 if (use_rep.IsNone() || rep.Equals(use_rep)) continue; 3961 if (rep.generalize(use_rep).IsInteger32()) { 3962 rep = Representation::Integer32(); 3963 continue; 3964 } 3965 return Representation::None(); 3966 } 3967 return rep; 3968 } 3969 3970 3971 bool HValue::HasNonSmiUse() { 3972 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 3973 // We check for observed_input_representation elsewhere. 3974 Representation use_rep = 3975 it.value()->RequiredInputRepresentation(it.index()); 3976 if (!use_rep.IsNone() && 3977 !use_rep.IsSmi() && 3978 !use_rep.IsTagged()) { 3979 return true; 3980 } 3981 } 3982 return false; 3983 } 3984 3985 3986 // Node-specific verification code is only included in debug mode. 3987 #ifdef DEBUG 3988 3989 void HPhi::Verify() { 3990 ASSERT(OperandCount() == block()->predecessors()->length()); 3991 for (int i = 0; i < OperandCount(); ++i) { 3992 HValue* value = OperandAt(i); 3993 HBasicBlock* defining_block = value->block(); 3994 HBasicBlock* predecessor_block = block()->predecessors()->at(i); 3995 ASSERT(defining_block == predecessor_block || 3996 defining_block->Dominates(predecessor_block)); 3997 } 3998 } 3999 4000 4001 void HSimulate::Verify() { 4002 HInstruction::Verify(); 4003 ASSERT(HasAstId()); 4004 } 4005 4006 4007 void HCheckHeapObject::Verify() { 4008 HInstruction::Verify(); 4009 ASSERT(HasNoUses()); 4010 } 4011 4012 4013 void HCheckFunction::Verify() { 4014 HInstruction::Verify(); 4015 ASSERT(HasNoUses()); 4016 } 4017 4018 #endif 4019 4020 4021 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) { 4022 ASSERT(offset >= 0); 4023 ASSERT(offset < FixedArray::kHeaderSize); 4024 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength(); 4025 return HObjectAccess(kInobject, offset); 4026 } 4027 4028 4029 HObjectAccess HObjectAccess::ForJSObjectOffset(int offset, 4030 Representation representation) { 4031 ASSERT(offset >= 0); 4032 Portion portion = kInobject; 4033 4034 if (offset == JSObject::kElementsOffset) { 4035 portion = kElementsPointer; 4036 } else if (offset == JSObject::kMapOffset) { 4037 portion = kMaps; 4038 } 4039 return HObjectAccess(portion, offset, representation); 4040 } 4041 4042 4043 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) { 4044 ASSERT(offset >= 0); 4045 Portion portion = kInobject; 4046 4047 if (offset == JSObject::kElementsOffset) { 4048 portion = kElementsPointer; 4049 } else if (offset == JSArray::kLengthOffset) { 4050 portion = kArrayLengths; 4051 } else if (offset == JSObject::kMapOffset) { 4052 portion = kMaps; 4053 } 4054 return HObjectAccess(portion, offset); 4055 } 4056 4057 4058 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset, 4059 Representation representation) { 4060 ASSERT(offset >= 0); 4061 return HObjectAccess(kBackingStore, offset, representation); 4062 } 4063 4064 4065 HObjectAccess HObjectAccess::ForField(Handle<Map> map, 4066 LookupResult *lookup, Handle<String> name) { 4067 ASSERT(lookup->IsField() || lookup->IsTransitionToField(*map)); 4068 int index; 4069 Representation representation; 4070 if (lookup->IsField()) { 4071 index = lookup->GetLocalFieldIndexFromMap(*map); 4072 representation = lookup->representation(); 4073 } else { 4074 Map* transition = lookup->GetTransitionMapFromMap(*map); 4075 int descriptor = transition->LastAdded(); 4076 index = transition->instance_descriptors()->GetFieldIndex(descriptor) - 4077 map->inobject_properties(); 4078 PropertyDetails details = 4079 transition->instance_descriptors()->GetDetails(descriptor); 4080 representation = details.representation(); 4081 } 4082 if (index < 0) { 4083 // Negative property indices are in-object properties, indexed 4084 // from the end of the fixed part of the object. 4085 int offset = (index * kPointerSize) + map->instance_size(); 4086 return HObjectAccess(kInobject, offset, representation); 4087 } else { 4088 // Non-negative property indices are in the properties array. 4089 int offset = (index * kPointerSize) + FixedArray::kHeaderSize; 4090 return HObjectAccess(kBackingStore, offset, representation, name); 4091 } 4092 } 4093 4094 4095 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) { 4096 return HObjectAccess( 4097 kInobject, Cell::kValueOffset, Representation::Tagged(), 4098 Handle<String>(isolate->heap()->cell_value_string())); 4099 } 4100 4101 4102 void HObjectAccess::SetGVNFlags(HValue *instr, bool is_store) { 4103 // set the appropriate GVN flags for a given load or store instruction 4104 if (is_store) { 4105 // track dominating allocations in order to eliminate write barriers 4106 instr->SetGVNFlag(kDependsOnNewSpacePromotion); 4107 instr->SetFlag(HValue::kTrackSideEffectDominators); 4108 } else { 4109 // try to GVN loads, but don't hoist above map changes 4110 instr->SetFlag(HValue::kUseGVN); 4111 instr->SetGVNFlag(kDependsOnMaps); 4112 } 4113 4114 switch (portion()) { 4115 case kArrayLengths: 4116 instr->SetGVNFlag(is_store 4117 ? kChangesArrayLengths : kDependsOnArrayLengths); 4118 break; 4119 case kStringLengths: 4120 instr->SetGVNFlag(is_store 4121 ? kChangesStringLengths : kDependsOnStringLengths); 4122 break; 4123 case kInobject: 4124 instr->SetGVNFlag(is_store 4125 ? kChangesInobjectFields : kDependsOnInobjectFields); 4126 break; 4127 case kDouble: 4128 instr->SetGVNFlag(is_store 4129 ? kChangesDoubleFields : kDependsOnDoubleFields); 4130 break; 4131 case kBackingStore: 4132 instr->SetGVNFlag(is_store 4133 ? kChangesBackingStoreFields : kDependsOnBackingStoreFields); 4134 break; 4135 case kElementsPointer: 4136 instr->SetGVNFlag(is_store 4137 ? kChangesElementsPointer : kDependsOnElementsPointer); 4138 break; 4139 case kMaps: 4140 instr->SetGVNFlag(is_store 4141 ? kChangesMaps : kDependsOnMaps); 4142 break; 4143 case kExternalMemory: 4144 instr->SetGVNFlag(is_store 4145 ? kChangesExternalMemory : kDependsOnExternalMemory); 4146 break; 4147 } 4148 } 4149 4150 4151 void HObjectAccess::PrintTo(StringStream* stream) { 4152 stream->Add("."); 4153 4154 switch (portion()) { 4155 case kArrayLengths: 4156 case kStringLengths: 4157 stream->Add("%length"); 4158 break; 4159 case kElementsPointer: 4160 stream->Add("%elements"); 4161 break; 4162 case kMaps: 4163 stream->Add("%map"); 4164 break; 4165 case kDouble: // fall through 4166 case kInobject: 4167 if (!name_.is_null()) stream->Add(*String::cast(*name_)->ToCString()); 4168 stream->Add("[in-object]"); 4169 break; 4170 case kBackingStore: 4171 if (!name_.is_null()) stream->Add(*String::cast(*name_)->ToCString()); 4172 stream->Add("[backing-store]"); 4173 break; 4174 case kExternalMemory: 4175 stream->Add("[external-memory]"); 4176 break; 4177 } 4178 4179 stream->Add("@%d", offset()); 4180 } 4181 4182 } } // namespace v8::internal 4183