1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/v8.h" 6 7 #include "src/double.h" 8 #include "src/factory.h" 9 #include "src/hydrogen-infer-representation.h" 10 #include "src/property-details-inl.h" 11 12 #if V8_TARGET_ARCH_IA32 13 #include "src/ia32/lithium-ia32.h" 14 #elif V8_TARGET_ARCH_X64 15 #include "src/x64/lithium-x64.h" 16 #elif V8_TARGET_ARCH_ARM64 17 #include "src/arm64/lithium-arm64.h" 18 #elif V8_TARGET_ARCH_ARM 19 #include "src/arm/lithium-arm.h" 20 #elif V8_TARGET_ARCH_MIPS 21 #include "src/mips/lithium-mips.h" 22 #elif V8_TARGET_ARCH_X87 23 #include "src/x87/lithium-x87.h" 24 #else 25 #error Unsupported target architecture. 26 #endif 27 28 namespace v8 { 29 namespace internal { 30 31 #define DEFINE_COMPILE(type) \ 32 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \ 33 return builder->Do##type(this); \ 34 } 35 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) 36 #undef DEFINE_COMPILE 37 38 39 Isolate* HValue::isolate() const { 40 ASSERT(block() != NULL); 41 return block()->isolate(); 42 } 43 44 45 void HValue::AssumeRepresentation(Representation r) { 46 if (CheckFlag(kFlexibleRepresentation)) { 47 ChangeRepresentation(r); 48 // The representation of the value is dictated by type feedback and 49 // will not be changed later. 50 ClearFlag(kFlexibleRepresentation); 51 } 52 } 53 54 55 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) { 56 ASSERT(CheckFlag(kFlexibleRepresentation)); 57 Representation new_rep = RepresentationFromInputs(); 58 UpdateRepresentation(new_rep, h_infer, "inputs"); 59 new_rep = RepresentationFromUses(); 60 UpdateRepresentation(new_rep, h_infer, "uses"); 61 if (representation().IsSmi() && HasNonSmiUse()) { 62 UpdateRepresentation( 63 Representation::Integer32(), h_infer, "use requirements"); 64 } 65 } 66 67 68 Representation HValue::RepresentationFromUses() { 69 if (HasNoUses()) return Representation::None(); 70 71 // Array of use counts for each representation. 72 int use_count[Representation::kNumRepresentations] = { 0 }; 73 74 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 75 HValue* use = it.value(); 76 Representation rep = use->observed_input_representation(it.index()); 77 if (rep.IsNone()) continue; 78 if (FLAG_trace_representation) { 79 PrintF("#%d %s is used by #%d %s as %s%s\n", 80 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(), 81 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : "")); 82 } 83 use_count[rep.kind()] += 1; 84 } 85 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]); 86 int tagged_count = use_count[Representation::kTagged]; 87 int double_count = use_count[Representation::kDouble]; 88 int int32_count = use_count[Representation::kInteger32]; 89 int smi_count = use_count[Representation::kSmi]; 90 91 if (tagged_count > 0) return Representation::Tagged(); 92 if (double_count > 0) return Representation::Double(); 93 if (int32_count > 0) return Representation::Integer32(); 94 if (smi_count > 0) return Representation::Smi(); 95 96 return Representation::None(); 97 } 98 99 100 void HValue::UpdateRepresentation(Representation new_rep, 101 HInferRepresentationPhase* h_infer, 102 const char* reason) { 103 Representation r = representation(); 104 if (new_rep.is_more_general_than(r)) { 105 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return; 106 if (FLAG_trace_representation) { 107 PrintF("Changing #%d %s representation %s -> %s based on %s\n", 108 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason); 109 } 110 ChangeRepresentation(new_rep); 111 AddDependantsToWorklist(h_infer); 112 } 113 } 114 115 116 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) { 117 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 118 h_infer->AddToWorklist(it.value()); 119 } 120 for (int i = 0; i < OperandCount(); ++i) { 121 h_infer->AddToWorklist(OperandAt(i)); 122 } 123 } 124 125 126 static int32_t ConvertAndSetOverflow(Representation r, 127 int64_t result, 128 bool* overflow) { 129 if (r.IsSmi()) { 130 if (result > Smi::kMaxValue) { 131 *overflow = true; 132 return Smi::kMaxValue; 133 } 134 if (result < Smi::kMinValue) { 135 *overflow = true; 136 return Smi::kMinValue; 137 } 138 } else { 139 if (result > kMaxInt) { 140 *overflow = true; 141 return kMaxInt; 142 } 143 if (result < kMinInt) { 144 *overflow = true; 145 return kMinInt; 146 } 147 } 148 return static_cast<int32_t>(result); 149 } 150 151 152 static int32_t AddWithoutOverflow(Representation r, 153 int32_t a, 154 int32_t b, 155 bool* overflow) { 156 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b); 157 return ConvertAndSetOverflow(r, result, overflow); 158 } 159 160 161 static int32_t SubWithoutOverflow(Representation r, 162 int32_t a, 163 int32_t b, 164 bool* overflow) { 165 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b); 166 return ConvertAndSetOverflow(r, result, overflow); 167 } 168 169 170 static int32_t MulWithoutOverflow(const Representation& r, 171 int32_t a, 172 int32_t b, 173 bool* overflow) { 174 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b); 175 return ConvertAndSetOverflow(r, result, overflow); 176 } 177 178 179 int32_t Range::Mask() const { 180 if (lower_ == upper_) return lower_; 181 if (lower_ >= 0) { 182 int32_t res = 1; 183 while (res < upper_) { 184 res = (res << 1) | 1; 185 } 186 return res; 187 } 188 return 0xffffffff; 189 } 190 191 192 void Range::AddConstant(int32_t value) { 193 if (value == 0) return; 194 bool may_overflow = false; // Overflow is ignored here. 195 Representation r = Representation::Integer32(); 196 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow); 197 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow); 198 #ifdef DEBUG 199 Verify(); 200 #endif 201 } 202 203 204 void Range::Intersect(Range* other) { 205 upper_ = Min(upper_, other->upper_); 206 lower_ = Max(lower_, other->lower_); 207 bool b = CanBeMinusZero() && other->CanBeMinusZero(); 208 set_can_be_minus_zero(b); 209 } 210 211 212 void Range::Union(Range* other) { 213 upper_ = Max(upper_, other->upper_); 214 lower_ = Min(lower_, other->lower_); 215 bool b = CanBeMinusZero() || other->CanBeMinusZero(); 216 set_can_be_minus_zero(b); 217 } 218 219 220 void Range::CombinedMax(Range* other) { 221 upper_ = Max(upper_, other->upper_); 222 lower_ = Max(lower_, other->lower_); 223 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero()); 224 } 225 226 227 void Range::CombinedMin(Range* other) { 228 upper_ = Min(upper_, other->upper_); 229 lower_ = Min(lower_, other->lower_); 230 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero()); 231 } 232 233 234 void Range::Sar(int32_t value) { 235 int32_t bits = value & 0x1F; 236 lower_ = lower_ >> bits; 237 upper_ = upper_ >> bits; 238 set_can_be_minus_zero(false); 239 } 240 241 242 void Range::Shl(int32_t value) { 243 int32_t bits = value & 0x1F; 244 int old_lower = lower_; 245 int old_upper = upper_; 246 lower_ = lower_ << bits; 247 upper_ = upper_ << bits; 248 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) { 249 upper_ = kMaxInt; 250 lower_ = kMinInt; 251 } 252 set_can_be_minus_zero(false); 253 } 254 255 256 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) { 257 bool may_overflow = false; 258 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow); 259 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow); 260 KeepOrder(); 261 #ifdef DEBUG 262 Verify(); 263 #endif 264 return may_overflow; 265 } 266 267 268 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) { 269 bool may_overflow = false; 270 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow); 271 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow); 272 KeepOrder(); 273 #ifdef DEBUG 274 Verify(); 275 #endif 276 return may_overflow; 277 } 278 279 280 void Range::KeepOrder() { 281 if (lower_ > upper_) { 282 int32_t tmp = lower_; 283 lower_ = upper_; 284 upper_ = tmp; 285 } 286 } 287 288 289 #ifdef DEBUG 290 void Range::Verify() const { 291 ASSERT(lower_ <= upper_); 292 } 293 #endif 294 295 296 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) { 297 bool may_overflow = false; 298 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow); 299 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow); 300 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow); 301 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow); 302 lower_ = Min(Min(v1, v2), Min(v3, v4)); 303 upper_ = Max(Max(v1, v2), Max(v3, v4)); 304 #ifdef DEBUG 305 Verify(); 306 #endif 307 return may_overflow; 308 } 309 310 311 bool HValue::IsDefinedAfter(HBasicBlock* other) const { 312 return block()->block_id() > other->block_id(); 313 } 314 315 316 HUseListNode* HUseListNode::tail() { 317 // Skip and remove dead items in the use list. 318 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) { 319 tail_ = tail_->tail_; 320 } 321 return tail_; 322 } 323 324 325 bool HValue::CheckUsesForFlag(Flag f) const { 326 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 327 if (it.value()->IsSimulate()) continue; 328 if (!it.value()->CheckFlag(f)) return false; 329 } 330 return true; 331 } 332 333 334 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const { 335 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 336 if (it.value()->IsSimulate()) continue; 337 if (!it.value()->CheckFlag(f)) { 338 *value = it.value(); 339 return false; 340 } 341 } 342 return true; 343 } 344 345 346 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const { 347 bool return_value = false; 348 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 349 if (it.value()->IsSimulate()) continue; 350 if (!it.value()->CheckFlag(f)) return false; 351 return_value = true; 352 } 353 return return_value; 354 } 355 356 357 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) { 358 Advance(); 359 } 360 361 362 void HUseIterator::Advance() { 363 current_ = next_; 364 if (current_ != NULL) { 365 next_ = current_->tail(); 366 value_ = current_->value(); 367 index_ = current_->index(); 368 } 369 } 370 371 372 int HValue::UseCount() const { 373 int count = 0; 374 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count; 375 return count; 376 } 377 378 379 HUseListNode* HValue::RemoveUse(HValue* value, int index) { 380 HUseListNode* previous = NULL; 381 HUseListNode* current = use_list_; 382 while (current != NULL) { 383 if (current->value() == value && current->index() == index) { 384 if (previous == NULL) { 385 use_list_ = current->tail(); 386 } else { 387 previous->set_tail(current->tail()); 388 } 389 break; 390 } 391 392 previous = current; 393 current = current->tail(); 394 } 395 396 #ifdef DEBUG 397 // Do not reuse use list nodes in debug mode, zap them. 398 if (current != NULL) { 399 HUseListNode* temp = 400 new(block()->zone()) 401 HUseListNode(current->value(), current->index(), NULL); 402 current->Zap(); 403 current = temp; 404 } 405 #endif 406 return current; 407 } 408 409 410 bool HValue::Equals(HValue* other) { 411 if (other->opcode() != opcode()) return false; 412 if (!other->representation().Equals(representation())) return false; 413 if (!other->type_.Equals(type_)) return false; 414 if (other->flags() != flags()) return false; 415 if (OperandCount() != other->OperandCount()) return false; 416 for (int i = 0; i < OperandCount(); ++i) { 417 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false; 418 } 419 bool result = DataEquals(other); 420 ASSERT(!result || Hashcode() == other->Hashcode()); 421 return result; 422 } 423 424 425 intptr_t HValue::Hashcode() { 426 intptr_t result = opcode(); 427 int count = OperandCount(); 428 for (int i = 0; i < count; ++i) { 429 result = result * 19 + OperandAt(i)->id() + (result >> 7); 430 } 431 return result; 432 } 433 434 435 const char* HValue::Mnemonic() const { 436 switch (opcode()) { 437 #define MAKE_CASE(type) case k##type: return #type; 438 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE) 439 #undef MAKE_CASE 440 case kPhi: return "Phi"; 441 default: return ""; 442 } 443 } 444 445 446 bool HValue::CanReplaceWithDummyUses() { 447 return FLAG_unreachable_code_elimination && 448 !(block()->IsReachable() || 449 IsBlockEntry() || 450 IsControlInstruction() || 451 IsArgumentsObject() || 452 IsCapturedObject() || 453 IsSimulate() || 454 IsEnterInlined() || 455 IsLeaveInlined()); 456 } 457 458 459 bool HValue::IsInteger32Constant() { 460 return IsConstant() && HConstant::cast(this)->HasInteger32Value(); 461 } 462 463 464 int32_t HValue::GetInteger32Constant() { 465 return HConstant::cast(this)->Integer32Value(); 466 } 467 468 469 bool HValue::EqualsInteger32Constant(int32_t value) { 470 return IsInteger32Constant() && GetInteger32Constant() == value; 471 } 472 473 474 void HValue::SetOperandAt(int index, HValue* value) { 475 RegisterUse(index, value); 476 InternalSetOperandAt(index, value); 477 } 478 479 480 void HValue::DeleteAndReplaceWith(HValue* other) { 481 // We replace all uses first, so Delete can assert that there are none. 482 if (other != NULL) ReplaceAllUsesWith(other); 483 Kill(); 484 DeleteFromGraph(); 485 } 486 487 488 void HValue::ReplaceAllUsesWith(HValue* other) { 489 while (use_list_ != NULL) { 490 HUseListNode* list_node = use_list_; 491 HValue* value = list_node->value(); 492 ASSERT(!value->block()->IsStartBlock()); 493 value->InternalSetOperandAt(list_node->index(), other); 494 use_list_ = list_node->tail(); 495 list_node->set_tail(other->use_list_); 496 other->use_list_ = list_node; 497 } 498 } 499 500 501 void HValue::Kill() { 502 // Instead of going through the entire use list of each operand, we only 503 // check the first item in each use list and rely on the tail() method to 504 // skip dead items, removing them lazily next time we traverse the list. 505 SetFlag(kIsDead); 506 for (int i = 0; i < OperandCount(); ++i) { 507 HValue* operand = OperandAt(i); 508 if (operand == NULL) continue; 509 HUseListNode* first = operand->use_list_; 510 if (first != NULL && first->value()->CheckFlag(kIsDead)) { 511 operand->use_list_ = first->tail(); 512 } 513 } 514 } 515 516 517 void HValue::SetBlock(HBasicBlock* block) { 518 ASSERT(block_ == NULL || block == NULL); 519 block_ = block; 520 if (id_ == kNoNumber && block != NULL) { 521 id_ = block->graph()->GetNextValueID(this); 522 } 523 } 524 525 526 void HValue::PrintTypeTo(StringStream* stream) { 527 if (!representation().IsTagged() || type().Equals(HType::Tagged())) return; 528 stream->Add(" type:%s", type().ToString()); 529 } 530 531 532 void HValue::PrintChangesTo(StringStream* stream) { 533 GVNFlagSet changes_flags = ChangesFlags(); 534 if (changes_flags.IsEmpty()) return; 535 stream->Add(" changes["); 536 if (changes_flags == AllSideEffectsFlagSet()) { 537 stream->Add("*"); 538 } else { 539 bool add_comma = false; 540 #define PRINT_DO(Type) \ 541 if (changes_flags.Contains(k##Type)) { \ 542 if (add_comma) stream->Add(","); \ 543 add_comma = true; \ 544 stream->Add(#Type); \ 545 } 546 GVN_TRACKED_FLAG_LIST(PRINT_DO); 547 GVN_UNTRACKED_FLAG_LIST(PRINT_DO); 548 #undef PRINT_DO 549 } 550 stream->Add("]"); 551 } 552 553 554 void HValue::PrintNameTo(StringStream* stream) { 555 stream->Add("%s%d", representation_.Mnemonic(), id()); 556 } 557 558 559 bool HValue::HasMonomorphicJSObjectType() { 560 return !GetMonomorphicJSObjectMap().is_null(); 561 } 562 563 564 bool HValue::UpdateInferredType() { 565 HType type = CalculateInferredType(); 566 bool result = (!type.Equals(type_)); 567 type_ = type; 568 return result; 569 } 570 571 572 void HValue::RegisterUse(int index, HValue* new_value) { 573 HValue* old_value = OperandAt(index); 574 if (old_value == new_value) return; 575 576 HUseListNode* removed = NULL; 577 if (old_value != NULL) { 578 removed = old_value->RemoveUse(this, index); 579 } 580 581 if (new_value != NULL) { 582 if (removed == NULL) { 583 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode( 584 this, index, new_value->use_list_); 585 } else { 586 removed->set_tail(new_value->use_list_); 587 new_value->use_list_ = removed; 588 } 589 } 590 } 591 592 593 void HValue::AddNewRange(Range* r, Zone* zone) { 594 if (!HasRange()) ComputeInitialRange(zone); 595 if (!HasRange()) range_ = new(zone) Range(); 596 ASSERT(HasRange()); 597 r->StackUpon(range_); 598 range_ = r; 599 } 600 601 602 void HValue::RemoveLastAddedRange() { 603 ASSERT(HasRange()); 604 ASSERT(range_->next() != NULL); 605 range_ = range_->next(); 606 } 607 608 609 void HValue::ComputeInitialRange(Zone* zone) { 610 ASSERT(!HasRange()); 611 range_ = InferRange(zone); 612 ASSERT(HasRange()); 613 } 614 615 616 void HSourcePosition::PrintTo(FILE* out) { 617 if (IsUnknown()) { 618 PrintF(out, "<?>"); 619 } else { 620 if (FLAG_hydrogen_track_positions) { 621 PrintF(out, "<%d:%d>", inlining_id(), position()); 622 } else { 623 PrintF(out, "<0:%d>", raw()); 624 } 625 } 626 } 627 628 629 void HInstruction::PrintTo(StringStream* stream) { 630 PrintMnemonicTo(stream); 631 PrintDataTo(stream); 632 PrintChangesTo(stream); 633 PrintTypeTo(stream); 634 if (CheckFlag(HValue::kHasNoObservableSideEffects)) { 635 stream->Add(" [noOSE]"); 636 } 637 if (CheckFlag(HValue::kIsDead)) { 638 stream->Add(" [dead]"); 639 } 640 } 641 642 643 void HInstruction::PrintDataTo(StringStream *stream) { 644 for (int i = 0; i < OperandCount(); ++i) { 645 if (i > 0) stream->Add(" "); 646 OperandAt(i)->PrintNameTo(stream); 647 } 648 } 649 650 651 void HInstruction::PrintMnemonicTo(StringStream* stream) { 652 stream->Add("%s ", Mnemonic()); 653 } 654 655 656 void HInstruction::Unlink() { 657 ASSERT(IsLinked()); 658 ASSERT(!IsControlInstruction()); // Must never move control instructions. 659 ASSERT(!IsBlockEntry()); // Doesn't make sense to delete these. 660 ASSERT(previous_ != NULL); 661 previous_->next_ = next_; 662 if (next_ == NULL) { 663 ASSERT(block()->last() == this); 664 block()->set_last(previous_); 665 } else { 666 next_->previous_ = previous_; 667 } 668 clear_block(); 669 } 670 671 672 void HInstruction::InsertBefore(HInstruction* next) { 673 ASSERT(!IsLinked()); 674 ASSERT(!next->IsBlockEntry()); 675 ASSERT(!IsControlInstruction()); 676 ASSERT(!next->block()->IsStartBlock()); 677 ASSERT(next->previous_ != NULL); 678 HInstruction* prev = next->previous(); 679 prev->next_ = this; 680 next->previous_ = this; 681 next_ = next; 682 previous_ = prev; 683 SetBlock(next->block()); 684 if (!has_position() && next->has_position()) { 685 set_position(next->position()); 686 } 687 } 688 689 690 void HInstruction::InsertAfter(HInstruction* previous) { 691 ASSERT(!IsLinked()); 692 ASSERT(!previous->IsControlInstruction()); 693 ASSERT(!IsControlInstruction() || previous->next_ == NULL); 694 HBasicBlock* block = previous->block(); 695 // Never insert anything except constants into the start block after finishing 696 // it. 697 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) { 698 ASSERT(block->end()->SecondSuccessor() == NULL); 699 InsertAfter(block->end()->FirstSuccessor()->first()); 700 return; 701 } 702 703 // If we're inserting after an instruction with side-effects that is 704 // followed by a simulate instruction, we need to insert after the 705 // simulate instruction instead. 706 HInstruction* next = previous->next_; 707 if (previous->HasObservableSideEffects() && next != NULL) { 708 ASSERT(next->IsSimulate()); 709 previous = next; 710 next = previous->next_; 711 } 712 713 previous_ = previous; 714 next_ = next; 715 SetBlock(block); 716 previous->next_ = this; 717 if (next != NULL) next->previous_ = this; 718 if (block->last() == previous) { 719 block->set_last(this); 720 } 721 if (!has_position() && previous->has_position()) { 722 set_position(previous->position()); 723 } 724 } 725 726 727 bool HInstruction::Dominates(HInstruction* other) { 728 if (block() != other->block()) { 729 return block()->Dominates(other->block()); 730 } 731 // Both instructions are in the same basic block. This instruction 732 // should precede the other one in order to dominate it. 733 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) { 734 if (instr == other) { 735 return true; 736 } 737 } 738 return false; 739 } 740 741 742 #ifdef DEBUG 743 void HInstruction::Verify() { 744 // Verify that input operands are defined before use. 745 HBasicBlock* cur_block = block(); 746 for (int i = 0; i < OperandCount(); ++i) { 747 HValue* other_operand = OperandAt(i); 748 if (other_operand == NULL) continue; 749 HBasicBlock* other_block = other_operand->block(); 750 if (cur_block == other_block) { 751 if (!other_operand->IsPhi()) { 752 HInstruction* cur = this->previous(); 753 while (cur != NULL) { 754 if (cur == other_operand) break; 755 cur = cur->previous(); 756 } 757 // Must reach other operand in the same block! 758 ASSERT(cur == other_operand); 759 } 760 } else { 761 // If the following assert fires, you may have forgotten an 762 // AddInstruction. 763 ASSERT(other_block->Dominates(cur_block)); 764 } 765 } 766 767 // Verify that instructions that may have side-effects are followed 768 // by a simulate instruction. 769 if (HasObservableSideEffects() && !IsOsrEntry()) { 770 ASSERT(next()->IsSimulate()); 771 } 772 773 // Verify that instructions that can be eliminated by GVN have overridden 774 // HValue::DataEquals. The default implementation is UNREACHABLE. We 775 // don't actually care whether DataEquals returns true or false here. 776 if (CheckFlag(kUseGVN)) DataEquals(this); 777 778 // Verify that all uses are in the graph. 779 for (HUseIterator use = uses(); !use.Done(); use.Advance()) { 780 if (use.value()->IsInstruction()) { 781 ASSERT(HInstruction::cast(use.value())->IsLinked()); 782 } 783 } 784 } 785 #endif 786 787 788 bool HInstruction::CanDeoptimize() { 789 // TODO(titzer): make this a virtual method? 790 switch (opcode()) { 791 case HValue::kAbnormalExit: 792 case HValue::kAccessArgumentsAt: 793 case HValue::kAllocate: 794 case HValue::kArgumentsElements: 795 case HValue::kArgumentsLength: 796 case HValue::kArgumentsObject: 797 case HValue::kBlockEntry: 798 case HValue::kBoundsCheckBaseIndexInformation: 799 case HValue::kCallFunction: 800 case HValue::kCallNew: 801 case HValue::kCallNewArray: 802 case HValue::kCallStub: 803 case HValue::kCallWithDescriptor: 804 case HValue::kCapturedObject: 805 case HValue::kClassOfTestAndBranch: 806 case HValue::kCompareGeneric: 807 case HValue::kCompareHoleAndBranch: 808 case HValue::kCompareMap: 809 case HValue::kCompareMinusZeroAndBranch: 810 case HValue::kCompareNumericAndBranch: 811 case HValue::kCompareObjectEqAndBranch: 812 case HValue::kConstant: 813 case HValue::kConstructDouble: 814 case HValue::kContext: 815 case HValue::kDebugBreak: 816 case HValue::kDeclareGlobals: 817 case HValue::kDoubleBits: 818 case HValue::kDummyUse: 819 case HValue::kEnterInlined: 820 case HValue::kEnvironmentMarker: 821 case HValue::kForceRepresentation: 822 case HValue::kGetCachedArrayIndex: 823 case HValue::kGoto: 824 case HValue::kHasCachedArrayIndexAndBranch: 825 case HValue::kHasInstanceTypeAndBranch: 826 case HValue::kInnerAllocatedObject: 827 case HValue::kInstanceOf: 828 case HValue::kInstanceOfKnownGlobal: 829 case HValue::kIsConstructCallAndBranch: 830 case HValue::kIsObjectAndBranch: 831 case HValue::kIsSmiAndBranch: 832 case HValue::kIsStringAndBranch: 833 case HValue::kIsUndetectableAndBranch: 834 case HValue::kLeaveInlined: 835 case HValue::kLoadFieldByIndex: 836 case HValue::kLoadGlobalGeneric: 837 case HValue::kLoadNamedField: 838 case HValue::kLoadNamedGeneric: 839 case HValue::kLoadRoot: 840 case HValue::kMapEnumLength: 841 case HValue::kMathMinMax: 842 case HValue::kParameter: 843 case HValue::kPhi: 844 case HValue::kPushArguments: 845 case HValue::kRegExpLiteral: 846 case HValue::kReturn: 847 case HValue::kSeqStringGetChar: 848 case HValue::kStoreCodeEntry: 849 case HValue::kStoreFrameContext: 850 case HValue::kStoreKeyed: 851 case HValue::kStoreNamedField: 852 case HValue::kStoreNamedGeneric: 853 case HValue::kStringCharCodeAt: 854 case HValue::kStringCharFromCode: 855 case HValue::kThisFunction: 856 case HValue::kTypeofIsAndBranch: 857 case HValue::kUnknownOSRValue: 858 case HValue::kUseConst: 859 return false; 860 861 case HValue::kAdd: 862 case HValue::kAllocateBlockContext: 863 case HValue::kApplyArguments: 864 case HValue::kBitwise: 865 case HValue::kBoundsCheck: 866 case HValue::kBranch: 867 case HValue::kCallJSFunction: 868 case HValue::kCallRuntime: 869 case HValue::kChange: 870 case HValue::kCheckHeapObject: 871 case HValue::kCheckInstanceType: 872 case HValue::kCheckMapValue: 873 case HValue::kCheckMaps: 874 case HValue::kCheckSmi: 875 case HValue::kCheckValue: 876 case HValue::kClampToUint8: 877 case HValue::kDateField: 878 case HValue::kDeoptimize: 879 case HValue::kDiv: 880 case HValue::kForInCacheArray: 881 case HValue::kForInPrepareMap: 882 case HValue::kFunctionLiteral: 883 case HValue::kInvokeFunction: 884 case HValue::kLoadContextSlot: 885 case HValue::kLoadFunctionPrototype: 886 case HValue::kLoadGlobalCell: 887 case HValue::kLoadKeyed: 888 case HValue::kLoadKeyedGeneric: 889 case HValue::kMathFloorOfDiv: 890 case HValue::kMod: 891 case HValue::kMul: 892 case HValue::kOsrEntry: 893 case HValue::kPower: 894 case HValue::kRor: 895 case HValue::kSar: 896 case HValue::kSeqStringSetChar: 897 case HValue::kShl: 898 case HValue::kShr: 899 case HValue::kSimulate: 900 case HValue::kStackCheck: 901 case HValue::kStoreContextSlot: 902 case HValue::kStoreGlobalCell: 903 case HValue::kStoreKeyedGeneric: 904 case HValue::kStringAdd: 905 case HValue::kStringCompareAndBranch: 906 case HValue::kSub: 907 case HValue::kToFastProperties: 908 case HValue::kTransitionElementsKind: 909 case HValue::kTrapAllocationMemento: 910 case HValue::kTypeof: 911 case HValue::kUnaryMathOperation: 912 case HValue::kWrapReceiver: 913 return true; 914 } 915 UNREACHABLE(); 916 return true; 917 } 918 919 920 void HDummyUse::PrintDataTo(StringStream* stream) { 921 value()->PrintNameTo(stream); 922 } 923 924 925 void HEnvironmentMarker::PrintDataTo(StringStream* stream) { 926 stream->Add("%s var[%d]", kind() == BIND ? "bind" : "lookup", index()); 927 } 928 929 930 void HUnaryCall::PrintDataTo(StringStream* stream) { 931 value()->PrintNameTo(stream); 932 stream->Add(" "); 933 stream->Add("#%d", argument_count()); 934 } 935 936 937 void HCallJSFunction::PrintDataTo(StringStream* stream) { 938 function()->PrintNameTo(stream); 939 stream->Add(" "); 940 stream->Add("#%d", argument_count()); 941 } 942 943 944 HCallJSFunction* HCallJSFunction::New( 945 Zone* zone, 946 HValue* context, 947 HValue* function, 948 int argument_count, 949 bool pass_argument_count) { 950 bool has_stack_check = false; 951 if (function->IsConstant()) { 952 HConstant* fun_const = HConstant::cast(function); 953 Handle<JSFunction> jsfun = 954 Handle<JSFunction>::cast(fun_const->handle(zone->isolate())); 955 has_stack_check = !jsfun.is_null() && 956 (jsfun->code()->kind() == Code::FUNCTION || 957 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION); 958 } 959 960 return new(zone) HCallJSFunction( 961 function, argument_count, pass_argument_count, 962 has_stack_check); 963 } 964 965 966 967 968 void HBinaryCall::PrintDataTo(StringStream* stream) { 969 first()->PrintNameTo(stream); 970 stream->Add(" "); 971 second()->PrintNameTo(stream); 972 stream->Add(" "); 973 stream->Add("#%d", argument_count()); 974 } 975 976 977 void HBoundsCheck::ApplyIndexChange() { 978 if (skip_check()) return; 979 980 DecompositionResult decomposition; 981 bool index_is_decomposable = index()->TryDecompose(&decomposition); 982 if (index_is_decomposable) { 983 ASSERT(decomposition.base() == base()); 984 if (decomposition.offset() == offset() && 985 decomposition.scale() == scale()) return; 986 } else { 987 return; 988 } 989 990 ReplaceAllUsesWith(index()); 991 992 HValue* current_index = decomposition.base(); 993 int actual_offset = decomposition.offset() + offset(); 994 int actual_scale = decomposition.scale() + scale(); 995 996 Zone* zone = block()->graph()->zone(); 997 HValue* context = block()->graph()->GetInvalidContext(); 998 if (actual_offset != 0) { 999 HConstant* add_offset = HConstant::New(zone, context, actual_offset); 1000 add_offset->InsertBefore(this); 1001 HInstruction* add = HAdd::New(zone, context, 1002 current_index, add_offset); 1003 add->InsertBefore(this); 1004 add->AssumeRepresentation(index()->representation()); 1005 add->ClearFlag(kCanOverflow); 1006 current_index = add; 1007 } 1008 1009 if (actual_scale != 0) { 1010 HConstant* sar_scale = HConstant::New(zone, context, actual_scale); 1011 sar_scale->InsertBefore(this); 1012 HInstruction* sar = HSar::New(zone, context, 1013 current_index, sar_scale); 1014 sar->InsertBefore(this); 1015 sar->AssumeRepresentation(index()->representation()); 1016 current_index = sar; 1017 } 1018 1019 SetOperandAt(0, current_index); 1020 1021 base_ = NULL; 1022 offset_ = 0; 1023 scale_ = 0; 1024 } 1025 1026 1027 void HBoundsCheck::PrintDataTo(StringStream* stream) { 1028 index()->PrintNameTo(stream); 1029 stream->Add(" "); 1030 length()->PrintNameTo(stream); 1031 if (base() != NULL && (offset() != 0 || scale() != 0)) { 1032 stream->Add(" base: (("); 1033 if (base() != index()) { 1034 index()->PrintNameTo(stream); 1035 } else { 1036 stream->Add("index"); 1037 } 1038 stream->Add(" + %d) >> %d)", offset(), scale()); 1039 } 1040 if (skip_check()) { 1041 stream->Add(" [DISABLED]"); 1042 } 1043 } 1044 1045 1046 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) { 1047 ASSERT(CheckFlag(kFlexibleRepresentation)); 1048 HValue* actual_index = index()->ActualValue(); 1049 HValue* actual_length = length()->ActualValue(); 1050 Representation index_rep = actual_index->representation(); 1051 Representation length_rep = actual_length->representation(); 1052 if (index_rep.IsTagged() && actual_index->type().IsSmi()) { 1053 index_rep = Representation::Smi(); 1054 } 1055 if (length_rep.IsTagged() && actual_length->type().IsSmi()) { 1056 length_rep = Representation::Smi(); 1057 } 1058 Representation r = index_rep.generalize(length_rep); 1059 if (r.is_more_general_than(Representation::Integer32())) { 1060 r = Representation::Integer32(); 1061 } 1062 UpdateRepresentation(r, h_infer, "boundscheck"); 1063 } 1064 1065 1066 Range* HBoundsCheck::InferRange(Zone* zone) { 1067 Representation r = representation(); 1068 if (r.IsSmiOrInteger32() && length()->HasRange()) { 1069 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1); 1070 int lower = 0; 1071 1072 Range* result = new(zone) Range(lower, upper); 1073 if (index()->HasRange()) { 1074 result->Intersect(index()->range()); 1075 } 1076 1077 // In case of Smi representation, clamp result to Smi::kMaxValue. 1078 if (r.IsSmi()) result->ClampToSmi(); 1079 return result; 1080 } 1081 return HValue::InferRange(zone); 1082 } 1083 1084 1085 void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) { 1086 stream->Add("base: "); 1087 base_index()->PrintNameTo(stream); 1088 stream->Add(", check: "); 1089 base_index()->PrintNameTo(stream); 1090 } 1091 1092 1093 void HCallWithDescriptor::PrintDataTo(StringStream* stream) { 1094 for (int i = 0; i < OperandCount(); i++) { 1095 OperandAt(i)->PrintNameTo(stream); 1096 stream->Add(" "); 1097 } 1098 stream->Add("#%d", argument_count()); 1099 } 1100 1101 1102 void HCallNewArray::PrintDataTo(StringStream* stream) { 1103 stream->Add(ElementsKindToString(elements_kind())); 1104 stream->Add(" "); 1105 HBinaryCall::PrintDataTo(stream); 1106 } 1107 1108 1109 void HCallRuntime::PrintDataTo(StringStream* stream) { 1110 stream->Add("%o ", *name()); 1111 if (save_doubles() == kSaveFPRegs) { 1112 stream->Add("[save doubles] "); 1113 } 1114 stream->Add("#%d", argument_count()); 1115 } 1116 1117 1118 void HClassOfTestAndBranch::PrintDataTo(StringStream* stream) { 1119 stream->Add("class_of_test("); 1120 value()->PrintNameTo(stream); 1121 stream->Add(", \"%o\")", *class_name()); 1122 } 1123 1124 1125 void HWrapReceiver::PrintDataTo(StringStream* stream) { 1126 receiver()->PrintNameTo(stream); 1127 stream->Add(" "); 1128 function()->PrintNameTo(stream); 1129 } 1130 1131 1132 void HAccessArgumentsAt::PrintDataTo(StringStream* stream) { 1133 arguments()->PrintNameTo(stream); 1134 stream->Add("["); 1135 index()->PrintNameTo(stream); 1136 stream->Add("], length "); 1137 length()->PrintNameTo(stream); 1138 } 1139 1140 1141 void HAllocateBlockContext::PrintDataTo(StringStream* stream) { 1142 context()->PrintNameTo(stream); 1143 stream->Add(" "); 1144 function()->PrintNameTo(stream); 1145 } 1146 1147 1148 void HControlInstruction::PrintDataTo(StringStream* stream) { 1149 stream->Add(" goto ("); 1150 bool first_block = true; 1151 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) { 1152 stream->Add(first_block ? "B%d" : ", B%d", it.Current()->block_id()); 1153 first_block = false; 1154 } 1155 stream->Add(")"); 1156 } 1157 1158 1159 void HUnaryControlInstruction::PrintDataTo(StringStream* stream) { 1160 value()->PrintNameTo(stream); 1161 HControlInstruction::PrintDataTo(stream); 1162 } 1163 1164 1165 void HReturn::PrintDataTo(StringStream* stream) { 1166 value()->PrintNameTo(stream); 1167 stream->Add(" (pop "); 1168 parameter_count()->PrintNameTo(stream); 1169 stream->Add(" values)"); 1170 } 1171 1172 1173 Representation HBranch::observed_input_representation(int index) { 1174 static const ToBooleanStub::Types tagged_types( 1175 ToBooleanStub::NULL_TYPE | 1176 ToBooleanStub::SPEC_OBJECT | 1177 ToBooleanStub::STRING | 1178 ToBooleanStub::SYMBOL); 1179 if (expected_input_types_.ContainsAnyOf(tagged_types)) { 1180 return Representation::Tagged(); 1181 } 1182 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) { 1183 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) { 1184 return Representation::Double(); 1185 } 1186 return Representation::Tagged(); 1187 } 1188 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) { 1189 return Representation::Double(); 1190 } 1191 if (expected_input_types_.Contains(ToBooleanStub::SMI)) { 1192 return Representation::Smi(); 1193 } 1194 return Representation::None(); 1195 } 1196 1197 1198 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) { 1199 HValue* value = this->value(); 1200 if (value->EmitAtUses()) { 1201 ASSERT(value->IsConstant()); 1202 ASSERT(!value->representation().IsDouble()); 1203 *block = HConstant::cast(value)->BooleanValue() 1204 ? FirstSuccessor() 1205 : SecondSuccessor(); 1206 return true; 1207 } 1208 *block = NULL; 1209 return false; 1210 } 1211 1212 1213 void HBranch::PrintDataTo(StringStream* stream) { 1214 HUnaryControlInstruction::PrintDataTo(stream); 1215 stream->Add(" "); 1216 expected_input_types().Print(stream); 1217 } 1218 1219 1220 void HCompareMap::PrintDataTo(StringStream* stream) { 1221 value()->PrintNameTo(stream); 1222 stream->Add(" (%p)", *map().handle()); 1223 HControlInstruction::PrintDataTo(stream); 1224 if (known_successor_index() == 0) { 1225 stream->Add(" [true]"); 1226 } else if (known_successor_index() == 1) { 1227 stream->Add(" [false]"); 1228 } 1229 } 1230 1231 1232 const char* HUnaryMathOperation::OpName() const { 1233 switch (op()) { 1234 case kMathFloor: return "floor"; 1235 case kMathRound: return "round"; 1236 case kMathAbs: return "abs"; 1237 case kMathLog: return "log"; 1238 case kMathExp: return "exp"; 1239 case kMathSqrt: return "sqrt"; 1240 case kMathPowHalf: return "pow-half"; 1241 case kMathClz32: return "clz32"; 1242 default: 1243 UNREACHABLE(); 1244 return NULL; 1245 } 1246 } 1247 1248 1249 Range* HUnaryMathOperation::InferRange(Zone* zone) { 1250 Representation r = representation(); 1251 if (op() == kMathClz32) return new(zone) Range(0, 32); 1252 if (r.IsSmiOrInteger32() && value()->HasRange()) { 1253 if (op() == kMathAbs) { 1254 int upper = value()->range()->upper(); 1255 int lower = value()->range()->lower(); 1256 bool spans_zero = value()->range()->CanBeZero(); 1257 // Math.abs(kMinInt) overflows its representation, on which the 1258 // instruction deopts. Hence clamp it to kMaxInt. 1259 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper); 1260 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower); 1261 Range* result = 1262 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper), 1263 Max(abs_lower, abs_upper)); 1264 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to 1265 // Smi::kMaxValue. 1266 if (r.IsSmi()) result->ClampToSmi(); 1267 return result; 1268 } 1269 } 1270 return HValue::InferRange(zone); 1271 } 1272 1273 1274 void HUnaryMathOperation::PrintDataTo(StringStream* stream) { 1275 const char* name = OpName(); 1276 stream->Add("%s ", name); 1277 value()->PrintNameTo(stream); 1278 } 1279 1280 1281 void HUnaryOperation::PrintDataTo(StringStream* stream) { 1282 value()->PrintNameTo(stream); 1283 } 1284 1285 1286 void HHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) { 1287 value()->PrintNameTo(stream); 1288 switch (from_) { 1289 case FIRST_JS_RECEIVER_TYPE: 1290 if (to_ == LAST_TYPE) stream->Add(" spec_object"); 1291 break; 1292 case JS_REGEXP_TYPE: 1293 if (to_ == JS_REGEXP_TYPE) stream->Add(" reg_exp"); 1294 break; 1295 case JS_ARRAY_TYPE: 1296 if (to_ == JS_ARRAY_TYPE) stream->Add(" array"); 1297 break; 1298 case JS_FUNCTION_TYPE: 1299 if (to_ == JS_FUNCTION_TYPE) stream->Add(" function"); 1300 break; 1301 default: 1302 break; 1303 } 1304 } 1305 1306 1307 void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) { 1308 value()->PrintNameTo(stream); 1309 stream->Add(" == %o", *type_literal_.handle()); 1310 HControlInstruction::PrintDataTo(stream); 1311 } 1312 1313 1314 static String* TypeOfString(HConstant* constant, Isolate* isolate) { 1315 Heap* heap = isolate->heap(); 1316 if (constant->HasNumberValue()) return heap->number_string(); 1317 if (constant->IsUndetectable()) return heap->undefined_string(); 1318 if (constant->HasStringValue()) return heap->string_string(); 1319 switch (constant->GetInstanceType()) { 1320 case ODDBALL_TYPE: { 1321 Unique<Object> unique = constant->GetUnique(); 1322 if (unique.IsKnownGlobal(heap->true_value()) || 1323 unique.IsKnownGlobal(heap->false_value())) { 1324 return heap->boolean_string(); 1325 } 1326 if (unique.IsKnownGlobal(heap->null_value())) { 1327 return FLAG_harmony_typeof ? heap->null_string() 1328 : heap->object_string(); 1329 } 1330 ASSERT(unique.IsKnownGlobal(heap->undefined_value())); 1331 return heap->undefined_string(); 1332 } 1333 case SYMBOL_TYPE: 1334 return heap->symbol_string(); 1335 case JS_FUNCTION_TYPE: 1336 case JS_FUNCTION_PROXY_TYPE: 1337 return heap->function_string(); 1338 default: 1339 return heap->object_string(); 1340 } 1341 } 1342 1343 1344 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 1345 if (FLAG_fold_constants && value()->IsConstant()) { 1346 HConstant* constant = HConstant::cast(value()); 1347 String* type_string = TypeOfString(constant, isolate()); 1348 bool same_type = type_literal_.IsKnownGlobal(type_string); 1349 *block = same_type ? FirstSuccessor() : SecondSuccessor(); 1350 return true; 1351 } else if (value()->representation().IsSpecialization()) { 1352 bool number_type = 1353 type_literal_.IsKnownGlobal(isolate()->heap()->number_string()); 1354 *block = number_type ? FirstSuccessor() : SecondSuccessor(); 1355 return true; 1356 } 1357 *block = NULL; 1358 return false; 1359 } 1360 1361 1362 void HCheckMapValue::PrintDataTo(StringStream* stream) { 1363 value()->PrintNameTo(stream); 1364 stream->Add(" "); 1365 map()->PrintNameTo(stream); 1366 } 1367 1368 1369 HValue* HCheckMapValue::Canonicalize() { 1370 if (map()->IsConstant()) { 1371 HConstant* c_map = HConstant::cast(map()); 1372 return HCheckMaps::CreateAndInsertAfter( 1373 block()->graph()->zone(), value(), c_map->MapValue(), 1374 c_map->HasStableMapValue(), this); 1375 } 1376 return this; 1377 } 1378 1379 1380 void HForInPrepareMap::PrintDataTo(StringStream* stream) { 1381 enumerable()->PrintNameTo(stream); 1382 } 1383 1384 1385 void HForInCacheArray::PrintDataTo(StringStream* stream) { 1386 enumerable()->PrintNameTo(stream); 1387 stream->Add(" "); 1388 map()->PrintNameTo(stream); 1389 stream->Add("[%d]", idx_); 1390 } 1391 1392 1393 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) { 1394 object()->PrintNameTo(stream); 1395 stream->Add(" "); 1396 index()->PrintNameTo(stream); 1397 } 1398 1399 1400 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) { 1401 if (!l->EqualsInteger32Constant(~0)) return false; 1402 *negated = r; 1403 return true; 1404 } 1405 1406 1407 static bool MatchNegationViaXor(HValue* instr, HValue** negated) { 1408 if (!instr->IsBitwise()) return false; 1409 HBitwise* b = HBitwise::cast(instr); 1410 return (b->op() == Token::BIT_XOR) && 1411 (MatchLeftIsOnes(b->left(), b->right(), negated) || 1412 MatchLeftIsOnes(b->right(), b->left(), negated)); 1413 } 1414 1415 1416 static bool MatchDoubleNegation(HValue* instr, HValue** arg) { 1417 HValue* negated; 1418 return MatchNegationViaXor(instr, &negated) && 1419 MatchNegationViaXor(negated, arg); 1420 } 1421 1422 1423 HValue* HBitwise::Canonicalize() { 1424 if (!representation().IsSmiOrInteger32()) return this; 1425 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x. 1426 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0; 1427 if (left()->EqualsInteger32Constant(nop_constant) && 1428 !right()->CheckFlag(kUint32)) { 1429 return right(); 1430 } 1431 if (right()->EqualsInteger32Constant(nop_constant) && 1432 !left()->CheckFlag(kUint32)) { 1433 return left(); 1434 } 1435 // Optimize double negation, a common pattern used for ToInt32(x). 1436 HValue* arg; 1437 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) { 1438 return arg; 1439 } 1440 return this; 1441 } 1442 1443 1444 Representation HAdd::RepresentationFromInputs() { 1445 Representation left_rep = left()->representation(); 1446 if (left_rep.IsExternal()) { 1447 return Representation::External(); 1448 } 1449 return HArithmeticBinaryOperation::RepresentationFromInputs(); 1450 } 1451 1452 1453 Representation HAdd::RequiredInputRepresentation(int index) { 1454 if (index == 2) { 1455 Representation left_rep = left()->representation(); 1456 if (left_rep.IsExternal()) { 1457 return Representation::Integer32(); 1458 } 1459 } 1460 return HArithmeticBinaryOperation::RequiredInputRepresentation(index); 1461 } 1462 1463 1464 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) { 1465 return arg1->representation().IsSpecialization() && 1466 arg2->EqualsInteger32Constant(identity); 1467 } 1468 1469 1470 HValue* HAdd::Canonicalize() { 1471 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0 1472 if (IsIdentityOperation(left(), right(), 0) && 1473 !left()->representation().IsDouble()) { // Left could be -0. 1474 return left(); 1475 } 1476 if (IsIdentityOperation(right(), left(), 0) && 1477 !left()->representation().IsDouble()) { // Right could be -0. 1478 return right(); 1479 } 1480 return this; 1481 } 1482 1483 1484 HValue* HSub::Canonicalize() { 1485 if (IsIdentityOperation(left(), right(), 0)) return left(); 1486 return this; 1487 } 1488 1489 1490 HValue* HMul::Canonicalize() { 1491 if (IsIdentityOperation(left(), right(), 1)) return left(); 1492 if (IsIdentityOperation(right(), left(), 1)) return right(); 1493 return this; 1494 } 1495 1496 1497 bool HMul::MulMinusOne() { 1498 if (left()->EqualsInteger32Constant(-1) || 1499 right()->EqualsInteger32Constant(-1)) { 1500 return true; 1501 } 1502 1503 return false; 1504 } 1505 1506 1507 HValue* HMod::Canonicalize() { 1508 return this; 1509 } 1510 1511 1512 HValue* HDiv::Canonicalize() { 1513 if (IsIdentityOperation(left(), right(), 1)) return left(); 1514 return this; 1515 } 1516 1517 1518 HValue* HChange::Canonicalize() { 1519 return (from().Equals(to())) ? value() : this; 1520 } 1521 1522 1523 HValue* HWrapReceiver::Canonicalize() { 1524 if (HasNoUses()) return NULL; 1525 if (receiver()->type().IsJSObject()) { 1526 return receiver(); 1527 } 1528 return this; 1529 } 1530 1531 1532 void HTypeof::PrintDataTo(StringStream* stream) { 1533 value()->PrintNameTo(stream); 1534 } 1535 1536 1537 HInstruction* HForceRepresentation::New(Zone* zone, HValue* context, 1538 HValue* value, Representation representation) { 1539 if (FLAG_fold_constants && value->IsConstant()) { 1540 HConstant* c = HConstant::cast(value); 1541 if (c->HasNumberValue()) { 1542 double double_res = c->DoubleValue(); 1543 if (representation.IsDouble()) { 1544 return HConstant::New(zone, context, double_res); 1545 1546 } else if (representation.CanContainDouble(double_res)) { 1547 return HConstant::New(zone, context, 1548 static_cast<int32_t>(double_res), 1549 representation); 1550 } 1551 } 1552 } 1553 return new(zone) HForceRepresentation(value, representation); 1554 } 1555 1556 1557 void HForceRepresentation::PrintDataTo(StringStream* stream) { 1558 stream->Add("%s ", representation().Mnemonic()); 1559 value()->PrintNameTo(stream); 1560 } 1561 1562 1563 void HChange::PrintDataTo(StringStream* stream) { 1564 HUnaryOperation::PrintDataTo(stream); 1565 stream->Add(" %s to %s", from().Mnemonic(), to().Mnemonic()); 1566 1567 if (CanTruncateToSmi()) stream->Add(" truncating-smi"); 1568 if (CanTruncateToInt32()) stream->Add(" truncating-int32"); 1569 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?"); 1570 if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(" allow-undefined-as-nan"); 1571 } 1572 1573 1574 HValue* HUnaryMathOperation::Canonicalize() { 1575 if (op() == kMathRound || op() == kMathFloor) { 1576 HValue* val = value(); 1577 if (val->IsChange()) val = HChange::cast(val)->value(); 1578 if (val->representation().IsSmiOrInteger32()) { 1579 if (val->representation().Equals(representation())) return val; 1580 return Prepend(new(block()->zone()) HChange( 1581 val, representation(), false, false)); 1582 } 1583 } 1584 if (op() == kMathFloor && value()->IsDiv() && value()->UseCount() == 1) { 1585 HDiv* hdiv = HDiv::cast(value()); 1586 1587 HValue* left = hdiv->left(); 1588 if (left->representation().IsInteger32()) { 1589 // A value with an integer representation does not need to be transformed. 1590 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) { 1591 // A change from an integer32 can be replaced by the integer32 value. 1592 left = HChange::cast(left)->value(); 1593 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) { 1594 left = Prepend(new(block()->zone()) HChange( 1595 left, Representation::Integer32(), false, false)); 1596 } else { 1597 return this; 1598 } 1599 1600 HValue* right = hdiv->right(); 1601 if (right->IsInteger32Constant()) { 1602 right = Prepend(HConstant::cast(right)->CopyToRepresentation( 1603 Representation::Integer32(), right->block()->zone())); 1604 } else if (right->representation().IsInteger32()) { 1605 // A value with an integer representation does not need to be transformed. 1606 } else if (right->IsChange() && 1607 HChange::cast(right)->from().IsInteger32()) { 1608 // A change from an integer32 can be replaced by the integer32 value. 1609 right = HChange::cast(right)->value(); 1610 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) { 1611 right = Prepend(new(block()->zone()) HChange( 1612 right, Representation::Integer32(), false, false)); 1613 } else { 1614 return this; 1615 } 1616 1617 return Prepend(HMathFloorOfDiv::New( 1618 block()->zone(), context(), left, right)); 1619 } 1620 return this; 1621 } 1622 1623 1624 HValue* HCheckInstanceType::Canonicalize() { 1625 if ((check_ == IS_SPEC_OBJECT && value()->type().IsJSObject()) || 1626 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) || 1627 (check_ == IS_STRING && value()->type().IsString())) { 1628 return value(); 1629 } 1630 1631 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) { 1632 if (HConstant::cast(value())->HasInternalizedStringValue()) { 1633 return value(); 1634 } 1635 } 1636 return this; 1637 } 1638 1639 1640 void HCheckInstanceType::GetCheckInterval(InstanceType* first, 1641 InstanceType* last) { 1642 ASSERT(is_interval_check()); 1643 switch (check_) { 1644 case IS_SPEC_OBJECT: 1645 *first = FIRST_SPEC_OBJECT_TYPE; 1646 *last = LAST_SPEC_OBJECT_TYPE; 1647 return; 1648 case IS_JS_ARRAY: 1649 *first = *last = JS_ARRAY_TYPE; 1650 return; 1651 default: 1652 UNREACHABLE(); 1653 } 1654 } 1655 1656 1657 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) { 1658 ASSERT(!is_interval_check()); 1659 switch (check_) { 1660 case IS_STRING: 1661 *mask = kIsNotStringMask; 1662 *tag = kStringTag; 1663 return; 1664 case IS_INTERNALIZED_STRING: 1665 *mask = kIsNotStringMask | kIsNotInternalizedMask; 1666 *tag = kInternalizedTag; 1667 return; 1668 default: 1669 UNREACHABLE(); 1670 } 1671 } 1672 1673 1674 void HCheckMaps::PrintDataTo(StringStream* stream) { 1675 value()->PrintNameTo(stream); 1676 stream->Add(" [%p", *maps()->at(0).handle()); 1677 for (int i = 1; i < maps()->size(); ++i) { 1678 stream->Add(",%p", *maps()->at(i).handle()); 1679 } 1680 stream->Add("]%s", IsStabilityCheck() ? "(stability-check)" : ""); 1681 } 1682 1683 1684 HValue* HCheckMaps::Canonicalize() { 1685 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) { 1686 HConstant* c_value = HConstant::cast(value()); 1687 if (c_value->HasObjectMap()) { 1688 for (int i = 0; i < maps()->size(); ++i) { 1689 if (c_value->ObjectMap() == maps()->at(i)) { 1690 if (maps()->size() > 1) { 1691 set_maps(new(block()->graph()->zone()) UniqueSet<Map>( 1692 maps()->at(i), block()->graph()->zone())); 1693 } 1694 MarkAsStabilityCheck(); 1695 break; 1696 } 1697 } 1698 } 1699 } 1700 return this; 1701 } 1702 1703 1704 void HCheckValue::PrintDataTo(StringStream* stream) { 1705 value()->PrintNameTo(stream); 1706 stream->Add(" "); 1707 object().handle()->ShortPrint(stream); 1708 } 1709 1710 1711 HValue* HCheckValue::Canonicalize() { 1712 return (value()->IsConstant() && 1713 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this; 1714 } 1715 1716 1717 const char* HCheckInstanceType::GetCheckName() { 1718 switch (check_) { 1719 case IS_SPEC_OBJECT: return "object"; 1720 case IS_JS_ARRAY: return "array"; 1721 case IS_STRING: return "string"; 1722 case IS_INTERNALIZED_STRING: return "internalized_string"; 1723 } 1724 UNREACHABLE(); 1725 return ""; 1726 } 1727 1728 1729 void HCheckInstanceType::PrintDataTo(StringStream* stream) { 1730 stream->Add("%s ", GetCheckName()); 1731 HUnaryOperation::PrintDataTo(stream); 1732 } 1733 1734 1735 void HCallStub::PrintDataTo(StringStream* stream) { 1736 stream->Add("%s ", 1737 CodeStub::MajorName(major_key_, false)); 1738 HUnaryCall::PrintDataTo(stream); 1739 } 1740 1741 1742 void HUnknownOSRValue::PrintDataTo(StringStream *stream) { 1743 const char* type = "expression"; 1744 if (environment_->is_local_index(index_)) type = "local"; 1745 if (environment_->is_special_index(index_)) type = "special"; 1746 if (environment_->is_parameter_index(index_)) type = "parameter"; 1747 stream->Add("%s @ %d", type, index_); 1748 } 1749 1750 1751 void HInstanceOf::PrintDataTo(StringStream* stream) { 1752 left()->PrintNameTo(stream); 1753 stream->Add(" "); 1754 right()->PrintNameTo(stream); 1755 stream->Add(" "); 1756 context()->PrintNameTo(stream); 1757 } 1758 1759 1760 Range* HValue::InferRange(Zone* zone) { 1761 Range* result; 1762 if (representation().IsSmi() || type().IsSmi()) { 1763 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue); 1764 result->set_can_be_minus_zero(false); 1765 } else { 1766 result = new(zone) Range(); 1767 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32)); 1768 // TODO(jkummerow): The range cannot be minus zero when the upper type 1769 // bound is Integer32. 1770 } 1771 return result; 1772 } 1773 1774 1775 Range* HChange::InferRange(Zone* zone) { 1776 Range* input_range = value()->range(); 1777 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) && 1778 (to().IsSmi() || 1779 (to().IsTagged() && 1780 input_range != NULL && 1781 input_range->IsInSmiRange()))) { 1782 set_type(HType::Smi()); 1783 ClearChangesFlag(kNewSpacePromotion); 1784 } 1785 if (to().IsSmiOrTagged() && 1786 input_range != NULL && 1787 input_range->IsInSmiRange() && 1788 (!SmiValuesAre32Bits() || 1789 !value()->CheckFlag(HValue::kUint32) || 1790 input_range->upper() != kMaxInt)) { 1791 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32] 1792 // interval, so we treat kMaxInt as a sentinel for this entire interval. 1793 ClearFlag(kCanOverflow); 1794 } 1795 Range* result = (input_range != NULL) 1796 ? input_range->Copy(zone) 1797 : HValue::InferRange(zone); 1798 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() || 1799 !(CheckFlag(kAllUsesTruncatingToInt32) || 1800 CheckFlag(kAllUsesTruncatingToSmi))); 1801 if (to().IsSmi()) result->ClampToSmi(); 1802 return result; 1803 } 1804 1805 1806 Range* HConstant::InferRange(Zone* zone) { 1807 if (has_int32_value_) { 1808 Range* result = new(zone) Range(int32_value_, int32_value_); 1809 result->set_can_be_minus_zero(false); 1810 return result; 1811 } 1812 return HValue::InferRange(zone); 1813 } 1814 1815 1816 HSourcePosition HPhi::position() const { 1817 return block()->first()->position(); 1818 } 1819 1820 1821 Range* HPhi::InferRange(Zone* zone) { 1822 Representation r = representation(); 1823 if (r.IsSmiOrInteger32()) { 1824 if (block()->IsLoopHeader()) { 1825 Range* range = r.IsSmi() 1826 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue) 1827 : new(zone) Range(kMinInt, kMaxInt); 1828 return range; 1829 } else { 1830 Range* range = OperandAt(0)->range()->Copy(zone); 1831 for (int i = 1; i < OperandCount(); ++i) { 1832 range->Union(OperandAt(i)->range()); 1833 } 1834 return range; 1835 } 1836 } else { 1837 return HValue::InferRange(zone); 1838 } 1839 } 1840 1841 1842 Range* HAdd::InferRange(Zone* zone) { 1843 Representation r = representation(); 1844 if (r.IsSmiOrInteger32()) { 1845 Range* a = left()->range(); 1846 Range* b = right()->range(); 1847 Range* res = a->Copy(zone); 1848 if (!res->AddAndCheckOverflow(r, b) || 1849 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || 1850 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) { 1851 ClearFlag(kCanOverflow); 1852 } 1853 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && 1854 !CheckFlag(kAllUsesTruncatingToInt32) && 1855 a->CanBeMinusZero() && b->CanBeMinusZero()); 1856 return res; 1857 } else { 1858 return HValue::InferRange(zone); 1859 } 1860 } 1861 1862 1863 Range* HSub::InferRange(Zone* zone) { 1864 Representation r = representation(); 1865 if (r.IsSmiOrInteger32()) { 1866 Range* a = left()->range(); 1867 Range* b = right()->range(); 1868 Range* res = a->Copy(zone); 1869 if (!res->SubAndCheckOverflow(r, b) || 1870 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || 1871 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) { 1872 ClearFlag(kCanOverflow); 1873 } 1874 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && 1875 !CheckFlag(kAllUsesTruncatingToInt32) && 1876 a->CanBeMinusZero() && b->CanBeZero()); 1877 return res; 1878 } else { 1879 return HValue::InferRange(zone); 1880 } 1881 } 1882 1883 1884 Range* HMul::InferRange(Zone* zone) { 1885 Representation r = representation(); 1886 if (r.IsSmiOrInteger32()) { 1887 Range* a = left()->range(); 1888 Range* b = right()->range(); 1889 Range* res = a->Copy(zone); 1890 if (!res->MulAndCheckOverflow(r, b) || 1891 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || 1892 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) && 1893 MulMinusOne())) { 1894 // Truncated int multiplication is too precise and therefore not the 1895 // same as converting to Double and back. 1896 // Handle truncated integer multiplication by -1 special. 1897 ClearFlag(kCanOverflow); 1898 } 1899 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && 1900 !CheckFlag(kAllUsesTruncatingToInt32) && 1901 ((a->CanBeZero() && b->CanBeNegative()) || 1902 (a->CanBeNegative() && b->CanBeZero()))); 1903 return res; 1904 } else { 1905 return HValue::InferRange(zone); 1906 } 1907 } 1908 1909 1910 Range* HDiv::InferRange(Zone* zone) { 1911 if (representation().IsInteger32()) { 1912 Range* a = left()->range(); 1913 Range* b = right()->range(); 1914 Range* result = new(zone) Range(); 1915 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && 1916 (a->CanBeMinusZero() || 1917 (a->CanBeZero() && b->CanBeNegative()))); 1918 if (!a->Includes(kMinInt) || !b->Includes(-1)) { 1919 ClearFlag(kCanOverflow); 1920 } 1921 1922 if (!b->CanBeZero()) { 1923 ClearFlag(kCanBeDivByZero); 1924 } 1925 return result; 1926 } else { 1927 return HValue::InferRange(zone); 1928 } 1929 } 1930 1931 1932 Range* HMathFloorOfDiv::InferRange(Zone* zone) { 1933 if (representation().IsInteger32()) { 1934 Range* a = left()->range(); 1935 Range* b = right()->range(); 1936 Range* result = new(zone) Range(); 1937 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && 1938 (a->CanBeMinusZero() || 1939 (a->CanBeZero() && b->CanBeNegative()))); 1940 if (!a->Includes(kMinInt)) { 1941 ClearFlag(kLeftCanBeMinInt); 1942 } 1943 1944 if (!a->CanBeNegative()) { 1945 ClearFlag(HValue::kLeftCanBeNegative); 1946 } 1947 1948 if (!a->CanBePositive()) { 1949 ClearFlag(HValue::kLeftCanBePositive); 1950 } 1951 1952 if (!a->Includes(kMinInt) || !b->Includes(-1)) { 1953 ClearFlag(kCanOverflow); 1954 } 1955 1956 if (!b->CanBeZero()) { 1957 ClearFlag(kCanBeDivByZero); 1958 } 1959 return result; 1960 } else { 1961 return HValue::InferRange(zone); 1962 } 1963 } 1964 1965 1966 Range* HMod::InferRange(Zone* zone) { 1967 if (representation().IsInteger32()) { 1968 Range* a = left()->range(); 1969 Range* b = right()->range(); 1970 1971 // The magnitude of the modulus is bounded by the right operand. Note that 1972 // apart for the cases involving kMinInt, the calculation below is the same 1973 // as Max(Abs(b->lower()), Abs(b->upper())) - 1. 1974 int32_t positive_bound = -(Min(NegAbs(b->lower()), NegAbs(b->upper())) + 1); 1975 1976 // The result of the modulo operation has the sign of its left operand. 1977 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative(); 1978 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0, 1979 a->CanBePositive() ? positive_bound : 0); 1980 1981 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && 1982 left_can_be_negative); 1983 1984 if (!a->CanBeNegative()) { 1985 ClearFlag(HValue::kLeftCanBeNegative); 1986 } 1987 1988 if (!a->Includes(kMinInt) || !b->Includes(-1)) { 1989 ClearFlag(HValue::kCanOverflow); 1990 } 1991 1992 if (!b->CanBeZero()) { 1993 ClearFlag(HValue::kCanBeDivByZero); 1994 } 1995 return result; 1996 } else { 1997 return HValue::InferRange(zone); 1998 } 1999 } 2000 2001 2002 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) { 2003 if (phi->block()->loop_information() == NULL) return NULL; 2004 if (phi->OperandCount() != 2) return NULL; 2005 int32_t candidate_increment; 2006 2007 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0)); 2008 if (candidate_increment != 0) { 2009 return new(phi->block()->graph()->zone()) 2010 InductionVariableData(phi, phi->OperandAt(1), candidate_increment); 2011 } 2012 2013 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1)); 2014 if (candidate_increment != 0) { 2015 return new(phi->block()->graph()->zone()) 2016 InductionVariableData(phi, phi->OperandAt(0), candidate_increment); 2017 } 2018 2019 return NULL; 2020 } 2021 2022 2023 /* 2024 * This function tries to match the following patterns (and all the relevant 2025 * variants related to |, & and + being commutative): 2026 * base | constant_or_mask 2027 * base & constant_and_mask 2028 * (base + constant_offset) & constant_and_mask 2029 * (base - constant_offset) & constant_and_mask 2030 */ 2031 void InductionVariableData::DecomposeBitwise( 2032 HValue* value, 2033 BitwiseDecompositionResult* result) { 2034 HValue* base = IgnoreOsrValue(value); 2035 result->base = value; 2036 2037 if (!base->representation().IsInteger32()) return; 2038 2039 if (base->IsBitwise()) { 2040 bool allow_offset = false; 2041 int32_t mask = 0; 2042 2043 HBitwise* bitwise = HBitwise::cast(base); 2044 if (bitwise->right()->IsInteger32Constant()) { 2045 mask = bitwise->right()->GetInteger32Constant(); 2046 base = bitwise->left(); 2047 } else if (bitwise->left()->IsInteger32Constant()) { 2048 mask = bitwise->left()->GetInteger32Constant(); 2049 base = bitwise->right(); 2050 } else { 2051 return; 2052 } 2053 if (bitwise->op() == Token::BIT_AND) { 2054 result->and_mask = mask; 2055 allow_offset = true; 2056 } else if (bitwise->op() == Token::BIT_OR) { 2057 result->or_mask = mask; 2058 } else { 2059 return; 2060 } 2061 2062 result->context = bitwise->context(); 2063 2064 if (allow_offset) { 2065 if (base->IsAdd()) { 2066 HAdd* add = HAdd::cast(base); 2067 if (add->right()->IsInteger32Constant()) { 2068 base = add->left(); 2069 } else if (add->left()->IsInteger32Constant()) { 2070 base = add->right(); 2071 } 2072 } else if (base->IsSub()) { 2073 HSub* sub = HSub::cast(base); 2074 if (sub->right()->IsInteger32Constant()) { 2075 base = sub->left(); 2076 } 2077 } 2078 } 2079 2080 result->base = base; 2081 } 2082 } 2083 2084 2085 void InductionVariableData::AddCheck(HBoundsCheck* check, 2086 int32_t upper_limit) { 2087 ASSERT(limit_validity() != NULL); 2088 if (limit_validity() != check->block() && 2089 !limit_validity()->Dominates(check->block())) return; 2090 if (!phi()->block()->current_loop()->IsNestedInThisLoop( 2091 check->block()->current_loop())) return; 2092 2093 ChecksRelatedToLength* length_checks = checks(); 2094 while (length_checks != NULL) { 2095 if (length_checks->length() == check->length()) break; 2096 length_checks = length_checks->next(); 2097 } 2098 if (length_checks == NULL) { 2099 length_checks = new(check->block()->zone()) 2100 ChecksRelatedToLength(check->length(), checks()); 2101 checks_ = length_checks; 2102 } 2103 2104 length_checks->AddCheck(check, upper_limit); 2105 } 2106 2107 2108 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() { 2109 if (checks() != NULL) { 2110 InductionVariableCheck* c = checks(); 2111 HBasicBlock* current_block = c->check()->block(); 2112 while (c != NULL && c->check()->block() == current_block) { 2113 c->set_upper_limit(current_upper_limit_); 2114 c = c->next(); 2115 } 2116 } 2117 } 2118 2119 2120 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock( 2121 Token::Value token, 2122 int32_t mask, 2123 HValue* index_base, 2124 HValue* context) { 2125 ASSERT(first_check_in_block() != NULL); 2126 HValue* previous_index = first_check_in_block()->index(); 2127 ASSERT(context != NULL); 2128 2129 Zone* zone = index_base->block()->graph()->zone(); 2130 set_added_constant(HConstant::New(zone, context, mask)); 2131 if (added_index() != NULL) { 2132 added_constant()->InsertBefore(added_index()); 2133 } else { 2134 added_constant()->InsertBefore(first_check_in_block()); 2135 } 2136 2137 if (added_index() == NULL) { 2138 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index()); 2139 HInstruction* new_index = HBitwise::New(zone, context, token, index_base, 2140 added_constant()); 2141 ASSERT(new_index->IsBitwise()); 2142 new_index->ClearAllSideEffects(); 2143 new_index->AssumeRepresentation(Representation::Integer32()); 2144 set_added_index(HBitwise::cast(new_index)); 2145 added_index()->InsertBefore(first_check_in_block()); 2146 } 2147 ASSERT(added_index()->op() == token); 2148 2149 added_index()->SetOperandAt(1, index_base); 2150 added_index()->SetOperandAt(2, added_constant()); 2151 first_check_in_block()->SetOperandAt(0, added_index()); 2152 if (previous_index->UseCount() == 0) { 2153 previous_index->DeleteAndReplaceWith(NULL); 2154 } 2155 } 2156 2157 void InductionVariableData::ChecksRelatedToLength::AddCheck( 2158 HBoundsCheck* check, 2159 int32_t upper_limit) { 2160 BitwiseDecompositionResult decomposition; 2161 InductionVariableData::DecomposeBitwise(check->index(), &decomposition); 2162 2163 if (first_check_in_block() == NULL || 2164 first_check_in_block()->block() != check->block()) { 2165 CloseCurrentBlock(); 2166 2167 first_check_in_block_ = check; 2168 set_added_index(NULL); 2169 set_added_constant(NULL); 2170 current_and_mask_in_block_ = decomposition.and_mask; 2171 current_or_mask_in_block_ = decomposition.or_mask; 2172 current_upper_limit_ = upper_limit; 2173 2174 InductionVariableCheck* new_check = new(check->block()->graph()->zone()) 2175 InductionVariableCheck(check, checks_, upper_limit); 2176 checks_ = new_check; 2177 return; 2178 } 2179 2180 if (upper_limit > current_upper_limit()) { 2181 current_upper_limit_ = upper_limit; 2182 } 2183 2184 if (decomposition.and_mask != 0 && 2185 current_or_mask_in_block() == 0) { 2186 if (current_and_mask_in_block() == 0 || 2187 decomposition.and_mask > current_and_mask_in_block()) { 2188 UseNewIndexInCurrentBlock(Token::BIT_AND, 2189 decomposition.and_mask, 2190 decomposition.base, 2191 decomposition.context); 2192 current_and_mask_in_block_ = decomposition.and_mask; 2193 } 2194 check->set_skip_check(); 2195 } 2196 if (current_and_mask_in_block() == 0) { 2197 if (decomposition.or_mask > current_or_mask_in_block()) { 2198 UseNewIndexInCurrentBlock(Token::BIT_OR, 2199 decomposition.or_mask, 2200 decomposition.base, 2201 decomposition.context); 2202 current_or_mask_in_block_ = decomposition.or_mask; 2203 } 2204 check->set_skip_check(); 2205 } 2206 2207 if (!check->skip_check()) { 2208 InductionVariableCheck* new_check = new(check->block()->graph()->zone()) 2209 InductionVariableCheck(check, checks_, upper_limit); 2210 checks_ = new_check; 2211 } 2212 } 2213 2214 2215 /* 2216 * This method detects if phi is an induction variable, with phi_operand as 2217 * its "incremented" value (the other operand would be the "base" value). 2218 * 2219 * It cheks is phi_operand has the form "phi + constant". 2220 * If yes, the constant is the increment that the induction variable gets at 2221 * every loop iteration. 2222 * Otherwise it returns 0. 2223 */ 2224 int32_t InductionVariableData::ComputeIncrement(HPhi* phi, 2225 HValue* phi_operand) { 2226 if (!phi_operand->representation().IsInteger32()) return 0; 2227 2228 if (phi_operand->IsAdd()) { 2229 HAdd* operation = HAdd::cast(phi_operand); 2230 if (operation->left() == phi && 2231 operation->right()->IsInteger32Constant()) { 2232 return operation->right()->GetInteger32Constant(); 2233 } else if (operation->right() == phi && 2234 operation->left()->IsInteger32Constant()) { 2235 return operation->left()->GetInteger32Constant(); 2236 } 2237 } else if (phi_operand->IsSub()) { 2238 HSub* operation = HSub::cast(phi_operand); 2239 if (operation->left() == phi && 2240 operation->right()->IsInteger32Constant()) { 2241 return -operation->right()->GetInteger32Constant(); 2242 } 2243 } 2244 2245 return 0; 2246 } 2247 2248 2249 /* 2250 * Swaps the information in "update" with the one contained in "this". 2251 * The swapping is important because this method is used while doing a 2252 * dominator tree traversal, and "update" will retain the old data that 2253 * will be restored while backtracking. 2254 */ 2255 void InductionVariableData::UpdateAdditionalLimit( 2256 InductionVariableLimitUpdate* update) { 2257 ASSERT(update->updated_variable == this); 2258 if (update->limit_is_upper) { 2259 swap(&additional_upper_limit_, &update->limit); 2260 swap(&additional_upper_limit_is_included_, &update->limit_is_included); 2261 } else { 2262 swap(&additional_lower_limit_, &update->limit); 2263 swap(&additional_lower_limit_is_included_, &update->limit_is_included); 2264 } 2265 } 2266 2267 2268 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask, 2269 int32_t or_mask) { 2270 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway. 2271 const int32_t MAX_LIMIT = 1 << 30; 2272 2273 int32_t result = MAX_LIMIT; 2274 2275 if (limit() != NULL && 2276 limit()->IsInteger32Constant()) { 2277 int32_t limit_value = limit()->GetInteger32Constant(); 2278 if (!limit_included()) { 2279 limit_value--; 2280 } 2281 if (limit_value < result) result = limit_value; 2282 } 2283 2284 if (additional_upper_limit() != NULL && 2285 additional_upper_limit()->IsInteger32Constant()) { 2286 int32_t limit_value = additional_upper_limit()->GetInteger32Constant(); 2287 if (!additional_upper_limit_is_included()) { 2288 limit_value--; 2289 } 2290 if (limit_value < result) result = limit_value; 2291 } 2292 2293 if (and_mask > 0 && and_mask < MAX_LIMIT) { 2294 if (and_mask < result) result = and_mask; 2295 return result; 2296 } 2297 2298 // Add the effect of the or_mask. 2299 result |= or_mask; 2300 2301 return result >= MAX_LIMIT ? kNoLimit : result; 2302 } 2303 2304 2305 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) { 2306 if (!v->IsPhi()) return v; 2307 HPhi* phi = HPhi::cast(v); 2308 if (phi->OperandCount() != 2) return v; 2309 if (phi->OperandAt(0)->block()->is_osr_entry()) { 2310 return phi->OperandAt(1); 2311 } else if (phi->OperandAt(1)->block()->is_osr_entry()) { 2312 return phi->OperandAt(0); 2313 } else { 2314 return v; 2315 } 2316 } 2317 2318 2319 InductionVariableData* InductionVariableData::GetInductionVariableData( 2320 HValue* v) { 2321 v = IgnoreOsrValue(v); 2322 if (v->IsPhi()) { 2323 return HPhi::cast(v)->induction_variable_data(); 2324 } 2325 return NULL; 2326 } 2327 2328 2329 /* 2330 * Check if a conditional branch to "current_branch" with token "token" is 2331 * the branch that keeps the induction loop running (and, conversely, will 2332 * terminate it if the "other_branch" is taken). 2333 * 2334 * Three conditions must be met: 2335 * - "current_branch" must be in the induction loop. 2336 * - "other_branch" must be out of the induction loop. 2337 * - "token" and the induction increment must be "compatible": the token should 2338 * be a condition that keeps the execution inside the loop until the limit is 2339 * reached. 2340 */ 2341 bool InductionVariableData::CheckIfBranchIsLoopGuard( 2342 Token::Value token, 2343 HBasicBlock* current_branch, 2344 HBasicBlock* other_branch) { 2345 if (!phi()->block()->current_loop()->IsNestedInThisLoop( 2346 current_branch->current_loop())) { 2347 return false; 2348 } 2349 2350 if (phi()->block()->current_loop()->IsNestedInThisLoop( 2351 other_branch->current_loop())) { 2352 return false; 2353 } 2354 2355 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) { 2356 return true; 2357 } 2358 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) { 2359 return true; 2360 } 2361 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) { 2362 return true; 2363 } 2364 2365 return false; 2366 } 2367 2368 2369 void InductionVariableData::ComputeLimitFromPredecessorBlock( 2370 HBasicBlock* block, 2371 LimitFromPredecessorBlock* result) { 2372 if (block->predecessors()->length() != 1) return; 2373 HBasicBlock* predecessor = block->predecessors()->at(0); 2374 HInstruction* end = predecessor->last(); 2375 2376 if (!end->IsCompareNumericAndBranch()) return; 2377 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end); 2378 2379 Token::Value token = branch->token(); 2380 if (!Token::IsArithmeticCompareOp(token)) return; 2381 2382 HBasicBlock* other_target; 2383 if (block == branch->SuccessorAt(0)) { 2384 other_target = branch->SuccessorAt(1); 2385 } else { 2386 other_target = branch->SuccessorAt(0); 2387 token = Token::NegateCompareOp(token); 2388 ASSERT(block == branch->SuccessorAt(1)); 2389 } 2390 2391 InductionVariableData* data; 2392 2393 data = GetInductionVariableData(branch->left()); 2394 HValue* limit = branch->right(); 2395 if (data == NULL) { 2396 data = GetInductionVariableData(branch->right()); 2397 token = Token::ReverseCompareOp(token); 2398 limit = branch->left(); 2399 } 2400 2401 if (data != NULL) { 2402 result->variable = data; 2403 result->token = token; 2404 result->limit = limit; 2405 result->other_target = other_target; 2406 } 2407 } 2408 2409 2410 /* 2411 * Compute the limit that is imposed on an induction variable when entering 2412 * "block" (if any). 2413 * If the limit is the "proper" induction limit (the one that makes the loop 2414 * terminate when the induction variable reaches it) it is stored directly in 2415 * the induction variable data. 2416 * Otherwise the limit is written in "additional_limit" and the method 2417 * returns true. 2418 */ 2419 bool InductionVariableData::ComputeInductionVariableLimit( 2420 HBasicBlock* block, 2421 InductionVariableLimitUpdate* additional_limit) { 2422 LimitFromPredecessorBlock limit; 2423 ComputeLimitFromPredecessorBlock(block, &limit); 2424 if (!limit.LimitIsValid()) return false; 2425 2426 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token, 2427 block, 2428 limit.other_target)) { 2429 limit.variable->limit_ = limit.limit; 2430 limit.variable->limit_included_ = limit.LimitIsIncluded(); 2431 limit.variable->limit_validity_ = block; 2432 limit.variable->induction_exit_block_ = block->predecessors()->at(0); 2433 limit.variable->induction_exit_target_ = limit.other_target; 2434 return false; 2435 } else { 2436 additional_limit->updated_variable = limit.variable; 2437 additional_limit->limit = limit.limit; 2438 additional_limit->limit_is_upper = limit.LimitIsUpper(); 2439 additional_limit->limit_is_included = limit.LimitIsIncluded(); 2440 return true; 2441 } 2442 } 2443 2444 2445 Range* HMathMinMax::InferRange(Zone* zone) { 2446 if (representation().IsSmiOrInteger32()) { 2447 Range* a = left()->range(); 2448 Range* b = right()->range(); 2449 Range* res = a->Copy(zone); 2450 if (operation_ == kMathMax) { 2451 res->CombinedMax(b); 2452 } else { 2453 ASSERT(operation_ == kMathMin); 2454 res->CombinedMin(b); 2455 } 2456 return res; 2457 } else { 2458 return HValue::InferRange(zone); 2459 } 2460 } 2461 2462 2463 void HPushArguments::AddInput(HValue* value) { 2464 inputs_.Add(NULL, value->block()->zone()); 2465 SetOperandAt(OperandCount() - 1, value); 2466 } 2467 2468 2469 void HPhi::PrintTo(StringStream* stream) { 2470 stream->Add("["); 2471 for (int i = 0; i < OperandCount(); ++i) { 2472 HValue* value = OperandAt(i); 2473 stream->Add(" "); 2474 value->PrintNameTo(stream); 2475 stream->Add(" "); 2476 } 2477 stream->Add(" uses:%d_%ds_%di_%dd_%dt", 2478 UseCount(), 2479 smi_non_phi_uses() + smi_indirect_uses(), 2480 int32_non_phi_uses() + int32_indirect_uses(), 2481 double_non_phi_uses() + double_indirect_uses(), 2482 tagged_non_phi_uses() + tagged_indirect_uses()); 2483 PrintTypeTo(stream); 2484 stream->Add("]"); 2485 } 2486 2487 2488 void HPhi::AddInput(HValue* value) { 2489 inputs_.Add(NULL, value->block()->zone()); 2490 SetOperandAt(OperandCount() - 1, value); 2491 // Mark phis that may have 'arguments' directly or indirectly as an operand. 2492 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) { 2493 SetFlag(kIsArguments); 2494 } 2495 } 2496 2497 2498 bool HPhi::HasRealUses() { 2499 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 2500 if (!it.value()->IsPhi()) return true; 2501 } 2502 return false; 2503 } 2504 2505 2506 HValue* HPhi::GetRedundantReplacement() { 2507 HValue* candidate = NULL; 2508 int count = OperandCount(); 2509 int position = 0; 2510 while (position < count && candidate == NULL) { 2511 HValue* current = OperandAt(position++); 2512 if (current != this) candidate = current; 2513 } 2514 while (position < count) { 2515 HValue* current = OperandAt(position++); 2516 if (current != this && current != candidate) return NULL; 2517 } 2518 ASSERT(candidate != this); 2519 return candidate; 2520 } 2521 2522 2523 void HPhi::DeleteFromGraph() { 2524 ASSERT(block() != NULL); 2525 block()->RemovePhi(this); 2526 ASSERT(block() == NULL); 2527 } 2528 2529 2530 void HPhi::InitRealUses(int phi_id) { 2531 // Initialize real uses. 2532 phi_id_ = phi_id; 2533 // Compute a conservative approximation of truncating uses before inferring 2534 // representations. The proper, exact computation will be done later, when 2535 // inserting representation changes. 2536 SetFlag(kTruncatingToSmi); 2537 SetFlag(kTruncatingToInt32); 2538 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 2539 HValue* value = it.value(); 2540 if (!value->IsPhi()) { 2541 Representation rep = value->observed_input_representation(it.index()); 2542 non_phi_uses_[rep.kind()] += 1; 2543 if (FLAG_trace_representation) { 2544 PrintF("#%d Phi is used by real #%d %s as %s\n", 2545 id(), value->id(), value->Mnemonic(), rep.Mnemonic()); 2546 } 2547 if (!value->IsSimulate()) { 2548 if (!value->CheckFlag(kTruncatingToSmi)) { 2549 ClearFlag(kTruncatingToSmi); 2550 } 2551 if (!value->CheckFlag(kTruncatingToInt32)) { 2552 ClearFlag(kTruncatingToInt32); 2553 } 2554 } 2555 } 2556 } 2557 } 2558 2559 2560 void HPhi::AddNonPhiUsesFrom(HPhi* other) { 2561 if (FLAG_trace_representation) { 2562 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n", 2563 id(), other->id(), 2564 other->non_phi_uses_[Representation::kSmi], 2565 other->non_phi_uses_[Representation::kInteger32], 2566 other->non_phi_uses_[Representation::kDouble], 2567 other->non_phi_uses_[Representation::kTagged]); 2568 } 2569 2570 for (int i = 0; i < Representation::kNumRepresentations; i++) { 2571 indirect_uses_[i] += other->non_phi_uses_[i]; 2572 } 2573 } 2574 2575 2576 void HPhi::AddIndirectUsesTo(int* dest) { 2577 for (int i = 0; i < Representation::kNumRepresentations; i++) { 2578 dest[i] += indirect_uses_[i]; 2579 } 2580 } 2581 2582 2583 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) { 2584 while (!list->is_empty()) { 2585 HSimulate* from = list->RemoveLast(); 2586 ZoneList<HValue*>* from_values = &from->values_; 2587 for (int i = 0; i < from_values->length(); ++i) { 2588 if (from->HasAssignedIndexAt(i)) { 2589 int index = from->GetAssignedIndexAt(i); 2590 if (HasValueForIndex(index)) continue; 2591 AddAssignedValue(index, from_values->at(i)); 2592 } else { 2593 if (pop_count_ > 0) { 2594 pop_count_--; 2595 } else { 2596 AddPushedValue(from_values->at(i)); 2597 } 2598 } 2599 } 2600 pop_count_ += from->pop_count_; 2601 from->DeleteAndReplaceWith(NULL); 2602 } 2603 } 2604 2605 2606 void HSimulate::PrintDataTo(StringStream* stream) { 2607 stream->Add("id=%d", ast_id().ToInt()); 2608 if (pop_count_ > 0) stream->Add(" pop %d", pop_count_); 2609 if (values_.length() > 0) { 2610 if (pop_count_ > 0) stream->Add(" /"); 2611 for (int i = values_.length() - 1; i >= 0; --i) { 2612 if (HasAssignedIndexAt(i)) { 2613 stream->Add(" var[%d] = ", GetAssignedIndexAt(i)); 2614 } else { 2615 stream->Add(" push "); 2616 } 2617 values_[i]->PrintNameTo(stream); 2618 if (i > 0) stream->Add(","); 2619 } 2620 } 2621 } 2622 2623 2624 void HSimulate::ReplayEnvironment(HEnvironment* env) { 2625 if (done_with_replay_) return; 2626 ASSERT(env != NULL); 2627 env->set_ast_id(ast_id()); 2628 env->Drop(pop_count()); 2629 for (int i = values()->length() - 1; i >= 0; --i) { 2630 HValue* value = values()->at(i); 2631 if (HasAssignedIndexAt(i)) { 2632 env->Bind(GetAssignedIndexAt(i), value); 2633 } else { 2634 env->Push(value); 2635 } 2636 } 2637 done_with_replay_ = true; 2638 } 2639 2640 2641 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values, 2642 HCapturedObject* other) { 2643 for (int i = 0; i < values->length(); ++i) { 2644 HValue* value = values->at(i); 2645 if (value->IsCapturedObject()) { 2646 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) { 2647 values->at(i) = other; 2648 } else { 2649 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other); 2650 } 2651 } 2652 } 2653 } 2654 2655 2656 // Replay captured objects by replacing all captured objects with the 2657 // same capture id in the current and all outer environments. 2658 void HCapturedObject::ReplayEnvironment(HEnvironment* env) { 2659 ASSERT(env != NULL); 2660 while (env != NULL) { 2661 ReplayEnvironmentNested(env->values(), this); 2662 env = env->outer(); 2663 } 2664 } 2665 2666 2667 void HCapturedObject::PrintDataTo(StringStream* stream) { 2668 stream->Add("#%d ", capture_id()); 2669 HDematerializedObject::PrintDataTo(stream); 2670 } 2671 2672 2673 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target, 2674 Zone* zone) { 2675 ASSERT(return_target->IsInlineReturnTarget()); 2676 return_targets_.Add(return_target, zone); 2677 } 2678 2679 2680 void HEnterInlined::PrintDataTo(StringStream* stream) { 2681 SmartArrayPointer<char> name = function()->debug_name()->ToCString(); 2682 stream->Add("%s, id=%d", name.get(), function()->id().ToInt()); 2683 } 2684 2685 2686 static bool IsInteger32(double value) { 2687 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value)); 2688 return BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(value); 2689 } 2690 2691 2692 HConstant::HConstant(Handle<Object> object, Representation r) 2693 : HTemplateInstruction<0>(HType::FromValue(object)), 2694 object_(Unique<Object>::CreateUninitialized(object)), 2695 object_map_(Handle<Map>::null()), 2696 has_stable_map_value_(false), 2697 has_smi_value_(false), 2698 has_int32_value_(false), 2699 has_double_value_(false), 2700 has_external_reference_value_(false), 2701 is_not_in_new_space_(true), 2702 boolean_value_(object->BooleanValue()), 2703 is_undetectable_(false), 2704 instance_type_(kUnknownInstanceType) { 2705 if (object->IsHeapObject()) { 2706 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object); 2707 Isolate* isolate = heap_object->GetIsolate(); 2708 Handle<Map> map(heap_object->map(), isolate); 2709 is_not_in_new_space_ = !isolate->heap()->InNewSpace(*object); 2710 instance_type_ = map->instance_type(); 2711 is_undetectable_ = map->is_undetectable(); 2712 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map); 2713 has_stable_map_value_ = (instance_type_ == MAP_TYPE && 2714 Handle<Map>::cast(heap_object)->is_stable()); 2715 } 2716 if (object->IsNumber()) { 2717 double n = object->Number(); 2718 has_int32_value_ = IsInteger32(n); 2719 int32_value_ = DoubleToInt32(n); 2720 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_); 2721 double_value_ = n; 2722 has_double_value_ = true; 2723 // TODO(titzer): if this heap number is new space, tenure a new one. 2724 } 2725 2726 Initialize(r); 2727 } 2728 2729 2730 HConstant::HConstant(Unique<Object> object, 2731 Unique<Map> object_map, 2732 bool has_stable_map_value, 2733 Representation r, 2734 HType type, 2735 bool is_not_in_new_space, 2736 bool boolean_value, 2737 bool is_undetectable, 2738 InstanceType instance_type) 2739 : HTemplateInstruction<0>(type), 2740 object_(object), 2741 object_map_(object_map), 2742 has_stable_map_value_(has_stable_map_value), 2743 has_smi_value_(false), 2744 has_int32_value_(false), 2745 has_double_value_(false), 2746 has_external_reference_value_(false), 2747 is_not_in_new_space_(is_not_in_new_space), 2748 boolean_value_(boolean_value), 2749 is_undetectable_(is_undetectable), 2750 instance_type_(instance_type) { 2751 ASSERT(!object.handle().is_null()); 2752 ASSERT(!type.IsTaggedNumber() || type.IsNone()); 2753 Initialize(r); 2754 } 2755 2756 2757 HConstant::HConstant(int32_t integer_value, 2758 Representation r, 2759 bool is_not_in_new_space, 2760 Unique<Object> object) 2761 : object_(object), 2762 object_map_(Handle<Map>::null()), 2763 has_stable_map_value_(false), 2764 has_smi_value_(Smi::IsValid(integer_value)), 2765 has_int32_value_(true), 2766 has_double_value_(true), 2767 has_external_reference_value_(false), 2768 is_not_in_new_space_(is_not_in_new_space), 2769 boolean_value_(integer_value != 0), 2770 is_undetectable_(false), 2771 int32_value_(integer_value), 2772 double_value_(FastI2D(integer_value)), 2773 instance_type_(kUnknownInstanceType) { 2774 // It's possible to create a constant with a value in Smi-range but stored 2775 // in a (pre-existing) HeapNumber. See crbug.com/349878. 2776 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null(); 2777 bool is_smi = has_smi_value_ && !could_be_heapobject; 2778 set_type(is_smi ? HType::Smi() : HType::TaggedNumber()); 2779 Initialize(r); 2780 } 2781 2782 2783 HConstant::HConstant(double double_value, 2784 Representation r, 2785 bool is_not_in_new_space, 2786 Unique<Object> object) 2787 : object_(object), 2788 object_map_(Handle<Map>::null()), 2789 has_stable_map_value_(false), 2790 has_int32_value_(IsInteger32(double_value)), 2791 has_double_value_(true), 2792 has_external_reference_value_(false), 2793 is_not_in_new_space_(is_not_in_new_space), 2794 boolean_value_(double_value != 0 && !std::isnan(double_value)), 2795 is_undetectable_(false), 2796 int32_value_(DoubleToInt32(double_value)), 2797 double_value_(double_value), 2798 instance_type_(kUnknownInstanceType) { 2799 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_); 2800 // It's possible to create a constant with a value in Smi-range but stored 2801 // in a (pre-existing) HeapNumber. See crbug.com/349878. 2802 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null(); 2803 bool is_smi = has_smi_value_ && !could_be_heapobject; 2804 set_type(is_smi ? HType::Smi() : HType::TaggedNumber()); 2805 Initialize(r); 2806 } 2807 2808 2809 HConstant::HConstant(ExternalReference reference) 2810 : HTemplateInstruction<0>(HType::Any()), 2811 object_(Unique<Object>(Handle<Object>::null())), 2812 object_map_(Handle<Map>::null()), 2813 has_stable_map_value_(false), 2814 has_smi_value_(false), 2815 has_int32_value_(false), 2816 has_double_value_(false), 2817 has_external_reference_value_(true), 2818 is_not_in_new_space_(true), 2819 boolean_value_(true), 2820 is_undetectable_(false), 2821 external_reference_value_(reference), 2822 instance_type_(kUnknownInstanceType) { 2823 Initialize(Representation::External()); 2824 } 2825 2826 2827 void HConstant::Initialize(Representation r) { 2828 if (r.IsNone()) { 2829 if (has_smi_value_ && SmiValuesAre31Bits()) { 2830 r = Representation::Smi(); 2831 } else if (has_int32_value_) { 2832 r = Representation::Integer32(); 2833 } else if (has_double_value_) { 2834 r = Representation::Double(); 2835 } else if (has_external_reference_value_) { 2836 r = Representation::External(); 2837 } else { 2838 Handle<Object> object = object_.handle(); 2839 if (object->IsJSObject()) { 2840 // Try to eagerly migrate JSObjects that have deprecated maps. 2841 Handle<JSObject> js_object = Handle<JSObject>::cast(object); 2842 if (js_object->map()->is_deprecated()) { 2843 JSObject::TryMigrateInstance(js_object); 2844 } 2845 } 2846 r = Representation::Tagged(); 2847 } 2848 } 2849 set_representation(r); 2850 SetFlag(kUseGVN); 2851 } 2852 2853 2854 bool HConstant::ImmortalImmovable() const { 2855 if (has_int32_value_) { 2856 return false; 2857 } 2858 if (has_double_value_) { 2859 if (IsSpecialDouble()) { 2860 return true; 2861 } 2862 return false; 2863 } 2864 if (has_external_reference_value_) { 2865 return false; 2866 } 2867 2868 ASSERT(!object_.handle().is_null()); 2869 Heap* heap = isolate()->heap(); 2870 ASSERT(!object_.IsKnownGlobal(heap->minus_zero_value())); 2871 ASSERT(!object_.IsKnownGlobal(heap->nan_value())); 2872 return 2873 #define IMMORTAL_IMMOVABLE_ROOT(name) \ 2874 object_.IsKnownGlobal(heap->name()) || 2875 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT) 2876 #undef IMMORTAL_IMMOVABLE_ROOT 2877 #define INTERNALIZED_STRING(name, value) \ 2878 object_.IsKnownGlobal(heap->name()) || 2879 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING) 2880 #undef INTERNALIZED_STRING 2881 #define STRING_TYPE(NAME, size, name, Name) \ 2882 object_.IsKnownGlobal(heap->name##_map()) || 2883 STRING_TYPE_LIST(STRING_TYPE) 2884 #undef STRING_TYPE 2885 false; 2886 } 2887 2888 2889 bool HConstant::EmitAtUses() { 2890 ASSERT(IsLinked()); 2891 if (block()->graph()->has_osr() && 2892 block()->graph()->IsStandardConstant(this)) { 2893 // TODO(titzer): this seems like a hack that should be fixed by custom OSR. 2894 return true; 2895 } 2896 if (UseCount() == 0) return true; 2897 if (IsCell()) return false; 2898 if (representation().IsDouble()) return false; 2899 if (representation().IsExternal()) return false; 2900 return true; 2901 } 2902 2903 2904 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const { 2905 if (r.IsSmi() && !has_smi_value_) return NULL; 2906 if (r.IsInteger32() && !has_int32_value_) return NULL; 2907 if (r.IsDouble() && !has_double_value_) return NULL; 2908 if (r.IsExternal() && !has_external_reference_value_) return NULL; 2909 if (has_int32_value_) { 2910 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, object_); 2911 } 2912 if (has_double_value_) { 2913 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, object_); 2914 } 2915 if (has_external_reference_value_) { 2916 return new(zone) HConstant(external_reference_value_); 2917 } 2918 ASSERT(!object_.handle().is_null()); 2919 return new(zone) HConstant(object_, 2920 object_map_, 2921 has_stable_map_value_, 2922 r, 2923 type_, 2924 is_not_in_new_space_, 2925 boolean_value_, 2926 is_undetectable_, 2927 instance_type_); 2928 } 2929 2930 2931 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) { 2932 HConstant* res = NULL; 2933 if (has_int32_value_) { 2934 res = new(zone) HConstant(int32_value_, 2935 Representation::Integer32(), 2936 is_not_in_new_space_, 2937 object_); 2938 } else if (has_double_value_) { 2939 res = new(zone) HConstant(DoubleToInt32(double_value_), 2940 Representation::Integer32(), 2941 is_not_in_new_space_, 2942 object_); 2943 } 2944 return Maybe<HConstant*>(res != NULL, res); 2945 } 2946 2947 2948 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) { 2949 HConstant* res = NULL; 2950 Handle<Object> handle = this->handle(zone->isolate()); 2951 if (handle->IsBoolean()) { 2952 res = handle->BooleanValue() ? 2953 new(zone) HConstant(1) : new(zone) HConstant(0); 2954 } else if (handle->IsUndefined()) { 2955 res = new(zone) HConstant(OS::nan_value()); 2956 } else if (handle->IsNull()) { 2957 res = new(zone) HConstant(0); 2958 } 2959 return Maybe<HConstant*>(res != NULL, res); 2960 } 2961 2962 2963 void HConstant::PrintDataTo(StringStream* stream) { 2964 if (has_int32_value_) { 2965 stream->Add("%d ", int32_value_); 2966 } else if (has_double_value_) { 2967 stream->Add("%f ", FmtElm(double_value_)); 2968 } else if (has_external_reference_value_) { 2969 stream->Add("%p ", reinterpret_cast<void*>( 2970 external_reference_value_.address())); 2971 } else { 2972 handle(Isolate::Current())->ShortPrint(stream); 2973 stream->Add(" "); 2974 if (HasStableMapValue()) { 2975 stream->Add("[stable-map] "); 2976 } 2977 if (HasObjectMap()) { 2978 stream->Add("[map %p] ", *ObjectMap().handle()); 2979 } 2980 } 2981 if (!is_not_in_new_space_) { 2982 stream->Add("[new space] "); 2983 } 2984 } 2985 2986 2987 void HBinaryOperation::PrintDataTo(StringStream* stream) { 2988 left()->PrintNameTo(stream); 2989 stream->Add(" "); 2990 right()->PrintNameTo(stream); 2991 if (CheckFlag(kCanOverflow)) stream->Add(" !"); 2992 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?"); 2993 } 2994 2995 2996 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) { 2997 ASSERT(CheckFlag(kFlexibleRepresentation)); 2998 Representation new_rep = RepresentationFromInputs(); 2999 UpdateRepresentation(new_rep, h_infer, "inputs"); 3000 3001 if (representation().IsSmi() && HasNonSmiUse()) { 3002 UpdateRepresentation( 3003 Representation::Integer32(), h_infer, "use requirements"); 3004 } 3005 3006 if (observed_output_representation_.IsNone()) { 3007 new_rep = RepresentationFromUses(); 3008 UpdateRepresentation(new_rep, h_infer, "uses"); 3009 } else { 3010 new_rep = RepresentationFromOutput(); 3011 UpdateRepresentation(new_rep, h_infer, "output"); 3012 } 3013 } 3014 3015 3016 Representation HBinaryOperation::RepresentationFromInputs() { 3017 // Determine the worst case of observed input representations and 3018 // the currently assumed output representation. 3019 Representation rep = representation(); 3020 for (int i = 1; i <= 2; ++i) { 3021 rep = rep.generalize(observed_input_representation(i)); 3022 } 3023 // If any of the actual input representation is more general than what we 3024 // have so far but not Tagged, use that representation instead. 3025 Representation left_rep = left()->representation(); 3026 Representation right_rep = right()->representation(); 3027 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep); 3028 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep); 3029 3030 return rep; 3031 } 3032 3033 3034 bool HBinaryOperation::IgnoreObservedOutputRepresentation( 3035 Representation current_rep) { 3036 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) || 3037 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) && 3038 // Mul in Integer32 mode would be too precise. 3039 (!this->IsMul() || HMul::cast(this)->MulMinusOne()); 3040 } 3041 3042 3043 Representation HBinaryOperation::RepresentationFromOutput() { 3044 Representation rep = representation(); 3045 // Consider observed output representation, but ignore it if it's Double, 3046 // this instruction is not a division, and all its uses are truncating 3047 // to Integer32. 3048 if (observed_output_representation_.is_more_general_than(rep) && 3049 !IgnoreObservedOutputRepresentation(rep)) { 3050 return observed_output_representation_; 3051 } 3052 return Representation::None(); 3053 } 3054 3055 3056 void HBinaryOperation::AssumeRepresentation(Representation r) { 3057 set_observed_input_representation(1, r); 3058 set_observed_input_representation(2, r); 3059 HValue::AssumeRepresentation(r); 3060 } 3061 3062 3063 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) { 3064 ASSERT(CheckFlag(kFlexibleRepresentation)); 3065 Representation new_rep = RepresentationFromInputs(); 3066 UpdateRepresentation(new_rep, h_infer, "inputs"); 3067 // Do not care about uses. 3068 } 3069 3070 3071 Range* HBitwise::InferRange(Zone* zone) { 3072 if (op() == Token::BIT_XOR) { 3073 if (left()->HasRange() && right()->HasRange()) { 3074 // The maximum value has the high bit, and all bits below, set: 3075 // (1 << high) - 1. 3076 // If the range can be negative, the minimum int is a negative number with 3077 // the high bit, and all bits below, unset: 3078 // -(1 << high). 3079 // If it cannot be negative, conservatively choose 0 as minimum int. 3080 int64_t left_upper = left()->range()->upper(); 3081 int64_t left_lower = left()->range()->lower(); 3082 int64_t right_upper = right()->range()->upper(); 3083 int64_t right_lower = right()->range()->lower(); 3084 3085 if (left_upper < 0) left_upper = ~left_upper; 3086 if (left_lower < 0) left_lower = ~left_lower; 3087 if (right_upper < 0) right_upper = ~right_upper; 3088 if (right_lower < 0) right_lower = ~right_lower; 3089 3090 int high = MostSignificantBit( 3091 static_cast<uint32_t>( 3092 left_upper | left_lower | right_upper | right_lower)); 3093 3094 int64_t limit = 1; 3095 limit <<= high; 3096 int32_t min = (left()->range()->CanBeNegative() || 3097 right()->range()->CanBeNegative()) 3098 ? static_cast<int32_t>(-limit) : 0; 3099 return new(zone) Range(min, static_cast<int32_t>(limit - 1)); 3100 } 3101 Range* result = HValue::InferRange(zone); 3102 result->set_can_be_minus_zero(false); 3103 return result; 3104 } 3105 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff); 3106 int32_t left_mask = (left()->range() != NULL) 3107 ? left()->range()->Mask() 3108 : kDefaultMask; 3109 int32_t right_mask = (right()->range() != NULL) 3110 ? right()->range()->Mask() 3111 : kDefaultMask; 3112 int32_t result_mask = (op() == Token::BIT_AND) 3113 ? left_mask & right_mask 3114 : left_mask | right_mask; 3115 if (result_mask >= 0) return new(zone) Range(0, result_mask); 3116 3117 Range* result = HValue::InferRange(zone); 3118 result->set_can_be_minus_zero(false); 3119 return result; 3120 } 3121 3122 3123 Range* HSar::InferRange(Zone* zone) { 3124 if (right()->IsConstant()) { 3125 HConstant* c = HConstant::cast(right()); 3126 if (c->HasInteger32Value()) { 3127 Range* result = (left()->range() != NULL) 3128 ? left()->range()->Copy(zone) 3129 : new(zone) Range(); 3130 result->Sar(c->Integer32Value()); 3131 return result; 3132 } 3133 } 3134 return HValue::InferRange(zone); 3135 } 3136 3137 3138 Range* HShr::InferRange(Zone* zone) { 3139 if (right()->IsConstant()) { 3140 HConstant* c = HConstant::cast(right()); 3141 if (c->HasInteger32Value()) { 3142 int shift_count = c->Integer32Value() & 0x1f; 3143 if (left()->range()->CanBeNegative()) { 3144 // Only compute bounds if the result always fits into an int32. 3145 return (shift_count >= 1) 3146 ? new(zone) Range(0, 3147 static_cast<uint32_t>(0xffffffff) >> shift_count) 3148 : new(zone) Range(); 3149 } else { 3150 // For positive inputs we can use the >> operator. 3151 Range* result = (left()->range() != NULL) 3152 ? left()->range()->Copy(zone) 3153 : new(zone) Range(); 3154 result->Sar(c->Integer32Value()); 3155 return result; 3156 } 3157 } 3158 } 3159 return HValue::InferRange(zone); 3160 } 3161 3162 3163 Range* HShl::InferRange(Zone* zone) { 3164 if (right()->IsConstant()) { 3165 HConstant* c = HConstant::cast(right()); 3166 if (c->HasInteger32Value()) { 3167 Range* result = (left()->range() != NULL) 3168 ? left()->range()->Copy(zone) 3169 : new(zone) Range(); 3170 result->Shl(c->Integer32Value()); 3171 return result; 3172 } 3173 } 3174 return HValue::InferRange(zone); 3175 } 3176 3177 3178 Range* HLoadNamedField::InferRange(Zone* zone) { 3179 if (access().representation().IsInteger8()) { 3180 return new(zone) Range(kMinInt8, kMaxInt8); 3181 } 3182 if (access().representation().IsUInteger8()) { 3183 return new(zone) Range(kMinUInt8, kMaxUInt8); 3184 } 3185 if (access().representation().IsInteger16()) { 3186 return new(zone) Range(kMinInt16, kMaxInt16); 3187 } 3188 if (access().representation().IsUInteger16()) { 3189 return new(zone) Range(kMinUInt16, kMaxUInt16); 3190 } 3191 if (access().IsStringLength()) { 3192 return new(zone) Range(0, String::kMaxLength); 3193 } 3194 return HValue::InferRange(zone); 3195 } 3196 3197 3198 Range* HLoadKeyed::InferRange(Zone* zone) { 3199 switch (elements_kind()) { 3200 case EXTERNAL_INT8_ELEMENTS: 3201 return new(zone) Range(kMinInt8, kMaxInt8); 3202 case EXTERNAL_UINT8_ELEMENTS: 3203 case EXTERNAL_UINT8_CLAMPED_ELEMENTS: 3204 return new(zone) Range(kMinUInt8, kMaxUInt8); 3205 case EXTERNAL_INT16_ELEMENTS: 3206 return new(zone) Range(kMinInt16, kMaxInt16); 3207 case EXTERNAL_UINT16_ELEMENTS: 3208 return new(zone) Range(kMinUInt16, kMaxUInt16); 3209 default: 3210 return HValue::InferRange(zone); 3211 } 3212 } 3213 3214 3215 void HCompareGeneric::PrintDataTo(StringStream* stream) { 3216 stream->Add(Token::Name(token())); 3217 stream->Add(" "); 3218 HBinaryOperation::PrintDataTo(stream); 3219 } 3220 3221 3222 void HStringCompareAndBranch::PrintDataTo(StringStream* stream) { 3223 stream->Add(Token::Name(token())); 3224 stream->Add(" "); 3225 HControlInstruction::PrintDataTo(stream); 3226 } 3227 3228 3229 void HCompareNumericAndBranch::PrintDataTo(StringStream* stream) { 3230 stream->Add(Token::Name(token())); 3231 stream->Add(" "); 3232 left()->PrintNameTo(stream); 3233 stream->Add(" "); 3234 right()->PrintNameTo(stream); 3235 HControlInstruction::PrintDataTo(stream); 3236 } 3237 3238 3239 void HCompareObjectEqAndBranch::PrintDataTo(StringStream* stream) { 3240 left()->PrintNameTo(stream); 3241 stream->Add(" "); 3242 right()->PrintNameTo(stream); 3243 HControlInstruction::PrintDataTo(stream); 3244 } 3245 3246 3247 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 3248 if (known_successor_index() != kNoKnownSuccessorIndex) { 3249 *block = SuccessorAt(known_successor_index()); 3250 return true; 3251 } 3252 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) { 3253 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right())) 3254 ? FirstSuccessor() : SecondSuccessor(); 3255 return true; 3256 } 3257 *block = NULL; 3258 return false; 3259 } 3260 3261 3262 bool ConstantIsObject(HConstant* constant, Isolate* isolate) { 3263 if (constant->HasNumberValue()) return false; 3264 if (constant->GetUnique().IsKnownGlobal(isolate->heap()->null_value())) { 3265 return true; 3266 } 3267 if (constant->IsUndetectable()) return false; 3268 InstanceType type = constant->GetInstanceType(); 3269 return (FIRST_NONCALLABLE_SPEC_OBJECT_TYPE <= type) && 3270 (type <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 3271 } 3272 3273 3274 bool HIsObjectAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 3275 if (FLAG_fold_constants && value()->IsConstant()) { 3276 *block = ConstantIsObject(HConstant::cast(value()), isolate()) 3277 ? FirstSuccessor() : SecondSuccessor(); 3278 return true; 3279 } 3280 *block = NULL; 3281 return false; 3282 } 3283 3284 3285 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 3286 if (known_successor_index() != kNoKnownSuccessorIndex) { 3287 *block = SuccessorAt(known_successor_index()); 3288 return true; 3289 } 3290 if (FLAG_fold_constants && value()->IsConstant()) { 3291 *block = HConstant::cast(value())->HasStringValue() 3292 ? FirstSuccessor() : SecondSuccessor(); 3293 return true; 3294 } 3295 if (value()->type().IsString()) { 3296 *block = FirstSuccessor(); 3297 return true; 3298 } 3299 if (value()->type().IsSmi() || 3300 value()->type().IsNull() || 3301 value()->type().IsBoolean() || 3302 value()->type().IsUndefined() || 3303 value()->type().IsJSObject()) { 3304 *block = SecondSuccessor(); 3305 return true; 3306 } 3307 *block = NULL; 3308 return false; 3309 } 3310 3311 3312 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 3313 if (FLAG_fold_constants && value()->IsConstant()) { 3314 *block = HConstant::cast(value())->IsUndetectable() 3315 ? FirstSuccessor() : SecondSuccessor(); 3316 return true; 3317 } 3318 *block = NULL; 3319 return false; 3320 } 3321 3322 3323 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 3324 if (FLAG_fold_constants && value()->IsConstant()) { 3325 InstanceType type = HConstant::cast(value())->GetInstanceType(); 3326 *block = (from_ <= type) && (type <= to_) 3327 ? FirstSuccessor() : SecondSuccessor(); 3328 return true; 3329 } 3330 *block = NULL; 3331 return false; 3332 } 3333 3334 3335 void HCompareHoleAndBranch::InferRepresentation( 3336 HInferRepresentationPhase* h_infer) { 3337 ChangeRepresentation(value()->representation()); 3338 } 3339 3340 3341 bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 3342 if (left() == right() && 3343 left()->representation().IsSmiOrInteger32()) { 3344 *block = (token() == Token::EQ || 3345 token() == Token::EQ_STRICT || 3346 token() == Token::LTE || 3347 token() == Token::GTE) 3348 ? FirstSuccessor() : SecondSuccessor(); 3349 return true; 3350 } 3351 *block = NULL; 3352 return false; 3353 } 3354 3355 3356 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 3357 if (FLAG_fold_constants && value()->IsConstant()) { 3358 HConstant* constant = HConstant::cast(value()); 3359 if (constant->HasDoubleValue()) { 3360 *block = IsMinusZero(constant->DoubleValue()) 3361 ? FirstSuccessor() : SecondSuccessor(); 3362 return true; 3363 } 3364 } 3365 if (value()->representation().IsSmiOrInteger32()) { 3366 // A Smi or Integer32 cannot contain minus zero. 3367 *block = SecondSuccessor(); 3368 return true; 3369 } 3370 *block = NULL; 3371 return false; 3372 } 3373 3374 3375 void HCompareMinusZeroAndBranch::InferRepresentation( 3376 HInferRepresentationPhase* h_infer) { 3377 ChangeRepresentation(value()->representation()); 3378 } 3379 3380 3381 3382 void HGoto::PrintDataTo(StringStream* stream) { 3383 stream->Add("B%d", SuccessorAt(0)->block_id()); 3384 } 3385 3386 3387 void HCompareNumericAndBranch::InferRepresentation( 3388 HInferRepresentationPhase* h_infer) { 3389 Representation left_rep = left()->representation(); 3390 Representation right_rep = right()->representation(); 3391 Representation observed_left = observed_input_representation(0); 3392 Representation observed_right = observed_input_representation(1); 3393 3394 Representation rep = Representation::None(); 3395 rep = rep.generalize(observed_left); 3396 rep = rep.generalize(observed_right); 3397 if (rep.IsNone() || rep.IsSmiOrInteger32()) { 3398 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep); 3399 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep); 3400 } else { 3401 rep = Representation::Double(); 3402 } 3403 3404 if (rep.IsDouble()) { 3405 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, === 3406 // and !=) have special handling of undefined, e.g. undefined == undefined 3407 // is 'true'. Relational comparisons have a different semantic, first 3408 // calling ToPrimitive() on their arguments. The standard Crankshaft 3409 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's 3410 // inputs are doubles caused 'undefined' to be converted to NaN. That's 3411 // compatible out-of-the box with ordered relational comparisons (<, >, <=, 3412 // >=). However, for equality comparisons (and for 'in' and 'instanceof'), 3413 // it is not consistent with the spec. For example, it would cause undefined 3414 // == undefined (should be true) to be evaluated as NaN == NaN 3415 // (false). Therefore, any comparisons other than ordered relational 3416 // comparisons must cause a deopt when one of their arguments is undefined. 3417 // See also v8:1434 3418 if (Token::IsOrderedRelationalCompareOp(token_)) { 3419 SetFlag(kAllowUndefinedAsNaN); 3420 } 3421 } 3422 ChangeRepresentation(rep); 3423 } 3424 3425 3426 void HParameter::PrintDataTo(StringStream* stream) { 3427 stream->Add("%u", index()); 3428 } 3429 3430 3431 void HLoadNamedField::PrintDataTo(StringStream* stream) { 3432 object()->PrintNameTo(stream); 3433 access_.PrintTo(stream); 3434 3435 if (maps() != NULL) { 3436 stream->Add(" [%p", *maps()->at(0).handle()); 3437 for (int i = 1; i < maps()->size(); ++i) { 3438 stream->Add(",%p", *maps()->at(i).handle()); 3439 } 3440 stream->Add("]"); 3441 } 3442 3443 if (HasDependency()) { 3444 stream->Add(" "); 3445 dependency()->PrintNameTo(stream); 3446 } 3447 } 3448 3449 3450 void HLoadNamedGeneric::PrintDataTo(StringStream* stream) { 3451 object()->PrintNameTo(stream); 3452 stream->Add("."); 3453 stream->Add(String::cast(*name())->ToCString().get()); 3454 } 3455 3456 3457 void HLoadKeyed::PrintDataTo(StringStream* stream) { 3458 if (!is_external()) { 3459 elements()->PrintNameTo(stream); 3460 } else { 3461 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND && 3462 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND); 3463 elements()->PrintNameTo(stream); 3464 stream->Add("."); 3465 stream->Add(ElementsKindToString(elements_kind())); 3466 } 3467 3468 stream->Add("["); 3469 key()->PrintNameTo(stream); 3470 if (IsDehoisted()) { 3471 stream->Add(" + %d]", base_offset()); 3472 } else { 3473 stream->Add("]"); 3474 } 3475 3476 if (HasDependency()) { 3477 stream->Add(" "); 3478 dependency()->PrintNameTo(stream); 3479 } 3480 3481 if (RequiresHoleCheck()) { 3482 stream->Add(" check_hole"); 3483 } 3484 } 3485 3486 3487 bool HLoadKeyed::UsesMustHandleHole() const { 3488 if (IsFastPackedElementsKind(elements_kind())) { 3489 return false; 3490 } 3491 3492 if (IsExternalArrayElementsKind(elements_kind())) { 3493 return false; 3494 } 3495 3496 if (hole_mode() == ALLOW_RETURN_HOLE) { 3497 if (IsFastDoubleElementsKind(elements_kind())) { 3498 return AllUsesCanTreatHoleAsNaN(); 3499 } 3500 return true; 3501 } 3502 3503 if (IsFastDoubleElementsKind(elements_kind())) { 3504 return false; 3505 } 3506 3507 // Holes are only returned as tagged values. 3508 if (!representation().IsTagged()) { 3509 return false; 3510 } 3511 3512 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 3513 HValue* use = it.value(); 3514 if (!use->IsChange()) return false; 3515 } 3516 3517 return true; 3518 } 3519 3520 3521 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const { 3522 return IsFastDoubleElementsKind(elements_kind()) && 3523 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN); 3524 } 3525 3526 3527 bool HLoadKeyed::RequiresHoleCheck() const { 3528 if (IsFastPackedElementsKind(elements_kind())) { 3529 return false; 3530 } 3531 3532 if (IsExternalArrayElementsKind(elements_kind())) { 3533 return false; 3534 } 3535 3536 return !UsesMustHandleHole(); 3537 } 3538 3539 3540 void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) { 3541 object()->PrintNameTo(stream); 3542 stream->Add("["); 3543 key()->PrintNameTo(stream); 3544 stream->Add("]"); 3545 } 3546 3547 3548 HValue* HLoadKeyedGeneric::Canonicalize() { 3549 // Recognize generic keyed loads that use property name generated 3550 // by for-in statement as a key and rewrite them into fast property load 3551 // by index. 3552 if (key()->IsLoadKeyed()) { 3553 HLoadKeyed* key_load = HLoadKeyed::cast(key()); 3554 if (key_load->elements()->IsForInCacheArray()) { 3555 HForInCacheArray* names_cache = 3556 HForInCacheArray::cast(key_load->elements()); 3557 3558 if (names_cache->enumerable() == object()) { 3559 HForInCacheArray* index_cache = 3560 names_cache->index_cache(); 3561 HCheckMapValue* map_check = 3562 HCheckMapValue::New(block()->graph()->zone(), 3563 block()->graph()->GetInvalidContext(), 3564 object(), 3565 names_cache->map()); 3566 HInstruction* index = HLoadKeyed::New( 3567 block()->graph()->zone(), 3568 block()->graph()->GetInvalidContext(), 3569 index_cache, 3570 key_load->key(), 3571 key_load->key(), 3572 key_load->elements_kind()); 3573 map_check->InsertBefore(this); 3574 index->InsertBefore(this); 3575 return Prepend(new(block()->zone()) HLoadFieldByIndex( 3576 object(), index)); 3577 } 3578 } 3579 } 3580 3581 return this; 3582 } 3583 3584 3585 void HStoreNamedGeneric::PrintDataTo(StringStream* stream) { 3586 object()->PrintNameTo(stream); 3587 stream->Add("."); 3588 ASSERT(name()->IsString()); 3589 stream->Add(String::cast(*name())->ToCString().get()); 3590 stream->Add(" = "); 3591 value()->PrintNameTo(stream); 3592 } 3593 3594 3595 void HStoreNamedField::PrintDataTo(StringStream* stream) { 3596 object()->PrintNameTo(stream); 3597 access_.PrintTo(stream); 3598 stream->Add(" = "); 3599 value()->PrintNameTo(stream); 3600 if (NeedsWriteBarrier()) { 3601 stream->Add(" (write-barrier)"); 3602 } 3603 if (has_transition()) { 3604 stream->Add(" (transition map %p)", *transition_map()); 3605 } 3606 } 3607 3608 3609 void HStoreKeyed::PrintDataTo(StringStream* stream) { 3610 if (!is_external()) { 3611 elements()->PrintNameTo(stream); 3612 } else { 3613 elements()->PrintNameTo(stream); 3614 stream->Add("."); 3615 stream->Add(ElementsKindToString(elements_kind())); 3616 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND && 3617 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND); 3618 } 3619 3620 stream->Add("["); 3621 key()->PrintNameTo(stream); 3622 if (IsDehoisted()) { 3623 stream->Add(" + %d] = ", base_offset()); 3624 } else { 3625 stream->Add("] = "); 3626 } 3627 3628 value()->PrintNameTo(stream); 3629 } 3630 3631 3632 void HStoreKeyedGeneric::PrintDataTo(StringStream* stream) { 3633 object()->PrintNameTo(stream); 3634 stream->Add("["); 3635 key()->PrintNameTo(stream); 3636 stream->Add("] = "); 3637 value()->PrintNameTo(stream); 3638 } 3639 3640 3641 void HTransitionElementsKind::PrintDataTo(StringStream* stream) { 3642 object()->PrintNameTo(stream); 3643 ElementsKind from_kind = original_map().handle()->elements_kind(); 3644 ElementsKind to_kind = transitioned_map().handle()->elements_kind(); 3645 stream->Add(" %p [%s] -> %p [%s]", 3646 *original_map().handle(), 3647 ElementsAccessor::ForKind(from_kind)->name(), 3648 *transitioned_map().handle(), 3649 ElementsAccessor::ForKind(to_kind)->name()); 3650 if (IsSimpleMapChangeTransition(from_kind, to_kind)) stream->Add(" (simple)"); 3651 } 3652 3653 3654 void HLoadGlobalCell::PrintDataTo(StringStream* stream) { 3655 stream->Add("[%p]", *cell().handle()); 3656 if (!details_.IsDontDelete()) stream->Add(" (deleteable)"); 3657 if (details_.IsReadOnly()) stream->Add(" (read-only)"); 3658 } 3659 3660 3661 bool HLoadGlobalCell::RequiresHoleCheck() const { 3662 if (details_.IsDontDelete() && !details_.IsReadOnly()) return false; 3663 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 3664 HValue* use = it.value(); 3665 if (!use->IsChange()) return true; 3666 } 3667 return false; 3668 } 3669 3670 3671 void HLoadGlobalGeneric::PrintDataTo(StringStream* stream) { 3672 stream->Add("%o ", *name()); 3673 } 3674 3675 3676 void HInnerAllocatedObject::PrintDataTo(StringStream* stream) { 3677 base_object()->PrintNameTo(stream); 3678 stream->Add(" offset "); 3679 offset()->PrintTo(stream); 3680 } 3681 3682 3683 void HStoreGlobalCell::PrintDataTo(StringStream* stream) { 3684 stream->Add("[%p] = ", *cell().handle()); 3685 value()->PrintNameTo(stream); 3686 if (!details_.IsDontDelete()) stream->Add(" (deleteable)"); 3687 if (details_.IsReadOnly()) stream->Add(" (read-only)"); 3688 } 3689 3690 3691 void HLoadContextSlot::PrintDataTo(StringStream* stream) { 3692 value()->PrintNameTo(stream); 3693 stream->Add("[%d]", slot_index()); 3694 } 3695 3696 3697 void HStoreContextSlot::PrintDataTo(StringStream* stream) { 3698 context()->PrintNameTo(stream); 3699 stream->Add("[%d] = ", slot_index()); 3700 value()->PrintNameTo(stream); 3701 } 3702 3703 3704 // Implementation of type inference and type conversions. Calculates 3705 // the inferred type of this instruction based on the input operands. 3706 3707 HType HValue::CalculateInferredType() { 3708 return type_; 3709 } 3710 3711 3712 HType HPhi::CalculateInferredType() { 3713 if (OperandCount() == 0) return HType::Tagged(); 3714 HType result = OperandAt(0)->type(); 3715 for (int i = 1; i < OperandCount(); ++i) { 3716 HType current = OperandAt(i)->type(); 3717 result = result.Combine(current); 3718 } 3719 return result; 3720 } 3721 3722 3723 HType HChange::CalculateInferredType() { 3724 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber(); 3725 return type(); 3726 } 3727 3728 3729 Representation HUnaryMathOperation::RepresentationFromInputs() { 3730 if (SupportsFlexibleFloorAndRound() && 3731 (op_ == kMathFloor || op_ == kMathRound)) { 3732 // Floor and Round always take a double input. The integral result can be 3733 // used as an integer or a double. Infer the representation from the uses. 3734 return Representation::None(); 3735 } 3736 Representation rep = representation(); 3737 // If any of the actual input representation is more general than what we 3738 // have so far but not Tagged, use that representation instead. 3739 Representation input_rep = value()->representation(); 3740 if (!input_rep.IsTagged()) { 3741 rep = rep.generalize(input_rep); 3742 } 3743 return rep; 3744 } 3745 3746 3747 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect, 3748 HValue* dominator) { 3749 ASSERT(side_effect == kNewSpacePromotion); 3750 Zone* zone = block()->zone(); 3751 if (!FLAG_use_allocation_folding) return false; 3752 3753 // Try to fold allocations together with their dominating allocations. 3754 if (!dominator->IsAllocate()) { 3755 if (FLAG_trace_allocation_folding) { 3756 PrintF("#%d (%s) cannot fold into #%d (%s)\n", 3757 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3758 } 3759 return false; 3760 } 3761 3762 // Check whether we are folding within the same block for local folding. 3763 if (FLAG_use_local_allocation_folding && dominator->block() != block()) { 3764 if (FLAG_trace_allocation_folding) { 3765 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n", 3766 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3767 } 3768 return false; 3769 } 3770 3771 HAllocate* dominator_allocate = HAllocate::cast(dominator); 3772 HValue* dominator_size = dominator_allocate->size(); 3773 HValue* current_size = size(); 3774 3775 // TODO(hpayer): Add support for non-constant allocation in dominator. 3776 if (!dominator_size->IsInteger32Constant()) { 3777 if (FLAG_trace_allocation_folding) { 3778 PrintF("#%d (%s) cannot fold into #%d (%s), " 3779 "dynamic allocation size in dominator\n", 3780 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3781 } 3782 return false; 3783 } 3784 3785 dominator_allocate = GetFoldableDominator(dominator_allocate); 3786 if (dominator_allocate == NULL) { 3787 return false; 3788 } 3789 3790 if (!has_size_upper_bound()) { 3791 if (FLAG_trace_allocation_folding) { 3792 PrintF("#%d (%s) cannot fold into #%d (%s), " 3793 "can't estimate total allocation size\n", 3794 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3795 } 3796 return false; 3797 } 3798 3799 if (!current_size->IsInteger32Constant()) { 3800 // If it's not constant then it is a size_in_bytes calculation graph 3801 // like this: (const_header_size + const_element_size * size). 3802 ASSERT(current_size->IsInstruction()); 3803 3804 HInstruction* current_instr = HInstruction::cast(current_size); 3805 if (!current_instr->Dominates(dominator_allocate)) { 3806 if (FLAG_trace_allocation_folding) { 3807 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic size " 3808 "value does not dominate target allocation\n", 3809 id(), Mnemonic(), dominator_allocate->id(), 3810 dominator_allocate->Mnemonic()); 3811 } 3812 return false; 3813 } 3814 } 3815 3816 ASSERT((IsNewSpaceAllocation() && 3817 dominator_allocate->IsNewSpaceAllocation()) || 3818 (IsOldDataSpaceAllocation() && 3819 dominator_allocate->IsOldDataSpaceAllocation()) || 3820 (IsOldPointerSpaceAllocation() && 3821 dominator_allocate->IsOldPointerSpaceAllocation())); 3822 3823 // First update the size of the dominator allocate instruction. 3824 dominator_size = dominator_allocate->size(); 3825 int32_t original_object_size = 3826 HConstant::cast(dominator_size)->GetInteger32Constant(); 3827 int32_t dominator_size_constant = original_object_size; 3828 3829 if (MustAllocateDoubleAligned()) { 3830 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) { 3831 dominator_size_constant += kDoubleSize / 2; 3832 } 3833 } 3834 3835 int32_t current_size_max_value = size_upper_bound()->GetInteger32Constant(); 3836 int32_t new_dominator_size = dominator_size_constant + current_size_max_value; 3837 3838 // Since we clear the first word after folded memory, we cannot use the 3839 // whole Page::kMaxRegularHeapObjectSize memory. 3840 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) { 3841 if (FLAG_trace_allocation_folding) { 3842 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n", 3843 id(), Mnemonic(), dominator_allocate->id(), 3844 dominator_allocate->Mnemonic(), new_dominator_size); 3845 } 3846 return false; 3847 } 3848 3849 HInstruction* new_dominator_size_value; 3850 3851 if (current_size->IsInteger32Constant()) { 3852 new_dominator_size_value = 3853 HConstant::CreateAndInsertBefore(zone, 3854 context(), 3855 new_dominator_size, 3856 Representation::None(), 3857 dominator_allocate); 3858 } else { 3859 HValue* new_dominator_size_constant = 3860 HConstant::CreateAndInsertBefore(zone, 3861 context(), 3862 dominator_size_constant, 3863 Representation::Integer32(), 3864 dominator_allocate); 3865 3866 // Add old and new size together and insert. 3867 current_size->ChangeRepresentation(Representation::Integer32()); 3868 3869 new_dominator_size_value = HAdd::New(zone, context(), 3870 new_dominator_size_constant, current_size); 3871 new_dominator_size_value->ClearFlag(HValue::kCanOverflow); 3872 new_dominator_size_value->ChangeRepresentation(Representation::Integer32()); 3873 3874 new_dominator_size_value->InsertBefore(dominator_allocate); 3875 } 3876 3877 dominator_allocate->UpdateSize(new_dominator_size_value); 3878 3879 if (MustAllocateDoubleAligned()) { 3880 if (!dominator_allocate->MustAllocateDoubleAligned()) { 3881 dominator_allocate->MakeDoubleAligned(); 3882 } 3883 } 3884 3885 bool keep_new_space_iterable = FLAG_log_gc || FLAG_heap_stats; 3886 #ifdef VERIFY_HEAP 3887 keep_new_space_iterable = keep_new_space_iterable || FLAG_verify_heap; 3888 #endif 3889 3890 if (keep_new_space_iterable && dominator_allocate->IsNewSpaceAllocation()) { 3891 dominator_allocate->MakePrefillWithFiller(); 3892 } else { 3893 // TODO(hpayer): This is a short-term hack to make allocation mementos 3894 // work again in new space. 3895 dominator_allocate->ClearNextMapWord(original_object_size); 3896 } 3897 3898 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord()); 3899 3900 // After that replace the dominated allocate instruction. 3901 HInstruction* inner_offset = HConstant::CreateAndInsertBefore( 3902 zone, 3903 context(), 3904 dominator_size_constant, 3905 Representation::None(), 3906 this); 3907 3908 HInstruction* dominated_allocate_instr = 3909 HInnerAllocatedObject::New(zone, 3910 context(), 3911 dominator_allocate, 3912 inner_offset, 3913 type()); 3914 dominated_allocate_instr->InsertBefore(this); 3915 DeleteAndReplaceWith(dominated_allocate_instr); 3916 if (FLAG_trace_allocation_folding) { 3917 PrintF("#%d (%s) folded into #%d (%s)\n", 3918 id(), Mnemonic(), dominator_allocate->id(), 3919 dominator_allocate->Mnemonic()); 3920 } 3921 return true; 3922 } 3923 3924 3925 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) { 3926 if (!IsFoldable(dominator)) { 3927 // We cannot hoist old space allocations over new space allocations. 3928 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) { 3929 if (FLAG_trace_allocation_folding) { 3930 PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n", 3931 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3932 } 3933 return NULL; 3934 } 3935 3936 HAllocate* dominator_dominator = dominator->dominating_allocate_; 3937 3938 // We can hoist old data space allocations over an old pointer space 3939 // allocation and vice versa. For that we have to check the dominator 3940 // of the dominator allocate instruction. 3941 if (dominator_dominator == NULL) { 3942 dominating_allocate_ = dominator; 3943 if (FLAG_trace_allocation_folding) { 3944 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", 3945 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3946 } 3947 return NULL; 3948 } 3949 3950 // We can just fold old space allocations that are in the same basic block, 3951 // since it is not guaranteed that we fill up the whole allocated old 3952 // space memory. 3953 // TODO(hpayer): Remove this limitation and add filler maps for each each 3954 // allocation as soon as we have store elimination. 3955 if (block()->block_id() != dominator_dominator->block()->block_id()) { 3956 if (FLAG_trace_allocation_folding) { 3957 PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n", 3958 id(), Mnemonic(), dominator_dominator->id(), 3959 dominator_dominator->Mnemonic()); 3960 } 3961 return NULL; 3962 } 3963 3964 ASSERT((IsOldDataSpaceAllocation() && 3965 dominator_dominator->IsOldDataSpaceAllocation()) || 3966 (IsOldPointerSpaceAllocation() && 3967 dominator_dominator->IsOldPointerSpaceAllocation())); 3968 3969 int32_t current_size = HConstant::cast(size())->GetInteger32Constant(); 3970 HStoreNamedField* dominator_free_space_size = 3971 dominator->filler_free_space_size_; 3972 if (dominator_free_space_size != NULL) { 3973 // We already hoisted one old space allocation, i.e., we already installed 3974 // a filler map. Hence, we just have to update the free space size. 3975 dominator->UpdateFreeSpaceFiller(current_size); 3976 } else { 3977 // This is the first old space allocation that gets hoisted. We have to 3978 // install a filler map since the follwing allocation may cause a GC. 3979 dominator->CreateFreeSpaceFiller(current_size); 3980 } 3981 3982 // We can hoist the old space allocation over the actual dominator. 3983 return dominator_dominator; 3984 } 3985 return dominator; 3986 } 3987 3988 3989 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) { 3990 ASSERT(filler_free_space_size_ != NULL); 3991 Zone* zone = block()->zone(); 3992 // We must explicitly force Smi representation here because on x64 we 3993 // would otherwise automatically choose int32, but the actual store 3994 // requires a Smi-tagged value. 3995 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore( 3996 zone, 3997 context(), 3998 filler_free_space_size_->value()->GetInteger32Constant() + 3999 free_space_size, 4000 Representation::Smi(), 4001 filler_free_space_size_); 4002 filler_free_space_size_->UpdateValue(new_free_space_size); 4003 } 4004 4005 4006 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) { 4007 ASSERT(filler_free_space_size_ == NULL); 4008 Zone* zone = block()->zone(); 4009 HInstruction* free_space_instr = 4010 HInnerAllocatedObject::New(zone, context(), dominating_allocate_, 4011 dominating_allocate_->size(), type()); 4012 free_space_instr->InsertBefore(this); 4013 HConstant* filler_map = HConstant::CreateAndInsertAfter( 4014 zone, Unique<Map>::CreateImmovable( 4015 isolate()->factory()->free_space_map()), true, free_space_instr); 4016 HInstruction* store_map = HStoreNamedField::New(zone, context(), 4017 free_space_instr, HObjectAccess::ForMap(), filler_map); 4018 store_map->SetFlag(HValue::kHasNoObservableSideEffects); 4019 store_map->InsertAfter(filler_map); 4020 4021 // We must explicitly force Smi representation here because on x64 we 4022 // would otherwise automatically choose int32, but the actual store 4023 // requires a Smi-tagged value. 4024 HConstant* filler_size = HConstant::CreateAndInsertAfter( 4025 zone, context(), free_space_size, Representation::Smi(), store_map); 4026 // Must force Smi representation for x64 (see comment above). 4027 HObjectAccess access = 4028 HObjectAccess::ForMapAndOffset(isolate()->factory()->free_space_map(), 4029 FreeSpace::kSizeOffset, 4030 Representation::Smi()); 4031 HStoreNamedField* store_size = HStoreNamedField::New(zone, context(), 4032 free_space_instr, access, filler_size); 4033 store_size->SetFlag(HValue::kHasNoObservableSideEffects); 4034 store_size->InsertAfter(filler_size); 4035 filler_free_space_size_ = store_size; 4036 } 4037 4038 4039 void HAllocate::ClearNextMapWord(int offset) { 4040 if (MustClearNextMapWord()) { 4041 Zone* zone = block()->zone(); 4042 HObjectAccess access = 4043 HObjectAccess::ForObservableJSObjectOffset(offset); 4044 HStoreNamedField* clear_next_map = 4045 HStoreNamedField::New(zone, context(), this, access, 4046 block()->graph()->GetConstant0()); 4047 clear_next_map->ClearAllSideEffects(); 4048 clear_next_map->InsertAfter(this); 4049 } 4050 } 4051 4052 4053 void HAllocate::PrintDataTo(StringStream* stream) { 4054 size()->PrintNameTo(stream); 4055 stream->Add(" ("); 4056 if (IsNewSpaceAllocation()) stream->Add("N"); 4057 if (IsOldPointerSpaceAllocation()) stream->Add("P"); 4058 if (IsOldDataSpaceAllocation()) stream->Add("D"); 4059 if (MustAllocateDoubleAligned()) stream->Add("A"); 4060 if (MustPrefillWithFiller()) stream->Add("F"); 4061 stream->Add(")"); 4062 } 4063 4064 4065 bool HStoreKeyed::NeedsCanonicalization() { 4066 // If value is an integer or smi or comes from the result of a keyed load or 4067 // constant then it is either be a non-hole value or in the case of a constant 4068 // the hole is only being stored explicitly: no need for canonicalization. 4069 // 4070 // The exception to that is keyed loads from external float or double arrays: 4071 // these can load arbitrary representation of NaN. 4072 4073 if (value()->IsConstant()) { 4074 return false; 4075 } 4076 4077 if (value()->IsLoadKeyed()) { 4078 return IsExternalFloatOrDoubleElementsKind( 4079 HLoadKeyed::cast(value())->elements_kind()); 4080 } 4081 4082 if (value()->IsChange()) { 4083 if (HChange::cast(value())->from().IsSmiOrInteger32()) { 4084 return false; 4085 } 4086 if (HChange::cast(value())->value()->type().IsSmi()) { 4087 return false; 4088 } 4089 } 4090 return true; 4091 } 4092 4093 4094 #define H_CONSTANT_INT(val) \ 4095 HConstant::New(zone, context, static_cast<int32_t>(val)) 4096 #define H_CONSTANT_DOUBLE(val) \ 4097 HConstant::New(zone, context, static_cast<double>(val)) 4098 4099 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \ 4100 HInstruction* HInstr::New( \ 4101 Zone* zone, HValue* context, HValue* left, HValue* right) { \ 4102 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ 4103 HConstant* c_left = HConstant::cast(left); \ 4104 HConstant* c_right = HConstant::cast(right); \ 4105 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ 4106 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \ 4107 if (IsInt32Double(double_res)) { \ 4108 return H_CONSTANT_INT(double_res); \ 4109 } \ 4110 return H_CONSTANT_DOUBLE(double_res); \ 4111 } \ 4112 } \ 4113 return new(zone) HInstr(context, left, right); \ 4114 } 4115 4116 4117 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +) 4118 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *) 4119 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -) 4120 4121 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR 4122 4123 4124 HInstruction* HStringAdd::New(Zone* zone, 4125 HValue* context, 4126 HValue* left, 4127 HValue* right, 4128 PretenureFlag pretenure_flag, 4129 StringAddFlags flags, 4130 Handle<AllocationSite> allocation_site) { 4131 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 4132 HConstant* c_right = HConstant::cast(right); 4133 HConstant* c_left = HConstant::cast(left); 4134 if (c_left->HasStringValue() && c_right->HasStringValue()) { 4135 Handle<String> left_string = c_left->StringValue(); 4136 Handle<String> right_string = c_right->StringValue(); 4137 // Prevent possible exception by invalid string length. 4138 if (left_string->length() + right_string->length() < String::kMaxLength) { 4139 Handle<String> concat = zone->isolate()->factory()->NewFlatConcatString( 4140 c_left->StringValue(), c_right->StringValue()); 4141 ASSERT(!concat.is_null()); 4142 return HConstant::New(zone, context, concat); 4143 } 4144 } 4145 } 4146 return new(zone) HStringAdd( 4147 context, left, right, pretenure_flag, flags, allocation_site); 4148 } 4149 4150 4151 void HStringAdd::PrintDataTo(StringStream* stream) { 4152 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) { 4153 stream->Add("_CheckBoth"); 4154 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) { 4155 stream->Add("_CheckLeft"); 4156 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) { 4157 stream->Add("_CheckRight"); 4158 } 4159 HBinaryOperation::PrintDataTo(stream); 4160 stream->Add(" ("); 4161 if (pretenure_flag() == NOT_TENURED) stream->Add("N"); 4162 else if (pretenure_flag() == TENURED) stream->Add("D"); 4163 stream->Add(")"); 4164 } 4165 4166 4167 HInstruction* HStringCharFromCode::New( 4168 Zone* zone, HValue* context, HValue* char_code) { 4169 if (FLAG_fold_constants && char_code->IsConstant()) { 4170 HConstant* c_code = HConstant::cast(char_code); 4171 Isolate* isolate = zone->isolate(); 4172 if (c_code->HasNumberValue()) { 4173 if (std::isfinite(c_code->DoubleValue())) { 4174 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff; 4175 return HConstant::New(zone, context, 4176 isolate->factory()->LookupSingleCharacterStringFromCode(code)); 4177 } 4178 return HConstant::New(zone, context, isolate->factory()->empty_string()); 4179 } 4180 } 4181 return new(zone) HStringCharFromCode(context, char_code); 4182 } 4183 4184 4185 HInstruction* HUnaryMathOperation::New( 4186 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op) { 4187 do { 4188 if (!FLAG_fold_constants) break; 4189 if (!value->IsConstant()) break; 4190 HConstant* constant = HConstant::cast(value); 4191 if (!constant->HasNumberValue()) break; 4192 double d = constant->DoubleValue(); 4193 if (std::isnan(d)) { // NaN poisons everything. 4194 return H_CONSTANT_DOUBLE(OS::nan_value()); 4195 } 4196 if (std::isinf(d)) { // +Infinity and -Infinity. 4197 switch (op) { 4198 case kMathExp: 4199 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0); 4200 case kMathLog: 4201 case kMathSqrt: 4202 return H_CONSTANT_DOUBLE((d > 0.0) ? d : OS::nan_value()); 4203 case kMathPowHalf: 4204 case kMathAbs: 4205 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d); 4206 case kMathRound: 4207 case kMathFloor: 4208 return H_CONSTANT_DOUBLE(d); 4209 case kMathClz32: 4210 return H_CONSTANT_INT(32); 4211 default: 4212 UNREACHABLE(); 4213 break; 4214 } 4215 } 4216 switch (op) { 4217 case kMathExp: 4218 return H_CONSTANT_DOUBLE(fast_exp(d)); 4219 case kMathLog: 4220 return H_CONSTANT_DOUBLE(std::log(d)); 4221 case kMathSqrt: 4222 return H_CONSTANT_DOUBLE(fast_sqrt(d)); 4223 case kMathPowHalf: 4224 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5)); 4225 case kMathAbs: 4226 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d); 4227 case kMathRound: 4228 // -0.5 .. -0.0 round to -0.0. 4229 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0); 4230 // Doubles are represented as Significant * 2 ^ Exponent. If the 4231 // Exponent is not negative, the double value is already an integer. 4232 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d); 4233 return H_CONSTANT_DOUBLE(std::floor(d + 0.5)); 4234 case kMathFloor: 4235 return H_CONSTANT_DOUBLE(std::floor(d)); 4236 case kMathClz32: { 4237 uint32_t i = DoubleToUint32(d); 4238 return H_CONSTANT_INT( 4239 (i == 0) ? 32 : CompilerIntrinsics::CountLeadingZeros(i)); 4240 } 4241 default: 4242 UNREACHABLE(); 4243 break; 4244 } 4245 } while (false); 4246 return new(zone) HUnaryMathOperation(context, value, op); 4247 } 4248 4249 4250 Representation HUnaryMathOperation::RepresentationFromUses() { 4251 if (op_ != kMathFloor && op_ != kMathRound) { 4252 return HValue::RepresentationFromUses(); 4253 } 4254 4255 // The instruction can have an int32 or double output. Prefer a double 4256 // representation if there are double uses. 4257 bool use_double = false; 4258 4259 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 4260 HValue* use = it.value(); 4261 int use_index = it.index(); 4262 Representation rep_observed = use->observed_input_representation(use_index); 4263 Representation rep_required = use->RequiredInputRepresentation(use_index); 4264 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble()); 4265 if (use_double && !FLAG_trace_representation) { 4266 // Having seen one double is enough. 4267 break; 4268 } 4269 if (FLAG_trace_representation) { 4270 if (!rep_required.IsDouble() || rep_observed.IsDouble()) { 4271 PrintF("#%d %s is used by #%d %s as %s%s\n", 4272 id(), Mnemonic(), use->id(), 4273 use->Mnemonic(), rep_observed.Mnemonic(), 4274 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : "")); 4275 } else { 4276 PrintF("#%d %s is required by #%d %s as %s%s\n", 4277 id(), Mnemonic(), use->id(), 4278 use->Mnemonic(), rep_required.Mnemonic(), 4279 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : "")); 4280 } 4281 } 4282 } 4283 return use_double ? Representation::Double() : Representation::Integer32(); 4284 } 4285 4286 4287 HInstruction* HPower::New(Zone* zone, 4288 HValue* context, 4289 HValue* left, 4290 HValue* right) { 4291 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 4292 HConstant* c_left = HConstant::cast(left); 4293 HConstant* c_right = HConstant::cast(right); 4294 if (c_left->HasNumberValue() && c_right->HasNumberValue()) { 4295 double result = power_helper(c_left->DoubleValue(), 4296 c_right->DoubleValue()); 4297 return H_CONSTANT_DOUBLE(std::isnan(result) ? OS::nan_value() : result); 4298 } 4299 } 4300 return new(zone) HPower(left, right); 4301 } 4302 4303 4304 HInstruction* HMathMinMax::New( 4305 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) { 4306 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 4307 HConstant* c_left = HConstant::cast(left); 4308 HConstant* c_right = HConstant::cast(right); 4309 if (c_left->HasNumberValue() && c_right->HasNumberValue()) { 4310 double d_left = c_left->DoubleValue(); 4311 double d_right = c_right->DoubleValue(); 4312 if (op == kMathMin) { 4313 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right); 4314 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left); 4315 if (d_left == d_right) { 4316 // Handle +0 and -0. 4317 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left 4318 : d_right); 4319 } 4320 } else { 4321 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right); 4322 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left); 4323 if (d_left == d_right) { 4324 // Handle +0 and -0. 4325 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right 4326 : d_left); 4327 } 4328 } 4329 // All comparisons failed, must be NaN. 4330 return H_CONSTANT_DOUBLE(OS::nan_value()); 4331 } 4332 } 4333 return new(zone) HMathMinMax(context, left, right, op); 4334 } 4335 4336 4337 HInstruction* HMod::New(Zone* zone, 4338 HValue* context, 4339 HValue* left, 4340 HValue* right) { 4341 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 4342 HConstant* c_left = HConstant::cast(left); 4343 HConstant* c_right = HConstant::cast(right); 4344 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) { 4345 int32_t dividend = c_left->Integer32Value(); 4346 int32_t divisor = c_right->Integer32Value(); 4347 if (dividend == kMinInt && divisor == -1) { 4348 return H_CONSTANT_DOUBLE(-0.0); 4349 } 4350 if (divisor != 0) { 4351 int32_t res = dividend % divisor; 4352 if ((res == 0) && (dividend < 0)) { 4353 return H_CONSTANT_DOUBLE(-0.0); 4354 } 4355 return H_CONSTANT_INT(res); 4356 } 4357 } 4358 } 4359 return new(zone) HMod(context, left, right); 4360 } 4361 4362 4363 HInstruction* HDiv::New( 4364 Zone* zone, HValue* context, HValue* left, HValue* right) { 4365 // If left and right are constant values, try to return a constant value. 4366 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 4367 HConstant* c_left = HConstant::cast(left); 4368 HConstant* c_right = HConstant::cast(right); 4369 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { 4370 if (c_right->DoubleValue() != 0) { 4371 double double_res = c_left->DoubleValue() / c_right->DoubleValue(); 4372 if (IsInt32Double(double_res)) { 4373 return H_CONSTANT_INT(double_res); 4374 } 4375 return H_CONSTANT_DOUBLE(double_res); 4376 } else { 4377 int sign = Double(c_left->DoubleValue()).Sign() * 4378 Double(c_right->DoubleValue()).Sign(); // Right could be -0. 4379 return H_CONSTANT_DOUBLE(sign * V8_INFINITY); 4380 } 4381 } 4382 } 4383 return new(zone) HDiv(context, left, right); 4384 } 4385 4386 4387 HInstruction* HBitwise::New( 4388 Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) { 4389 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 4390 HConstant* c_left = HConstant::cast(left); 4391 HConstant* c_right = HConstant::cast(right); 4392 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { 4393 int32_t result; 4394 int32_t v_left = c_left->NumberValueAsInteger32(); 4395 int32_t v_right = c_right->NumberValueAsInteger32(); 4396 switch (op) { 4397 case Token::BIT_XOR: 4398 result = v_left ^ v_right; 4399 break; 4400 case Token::BIT_AND: 4401 result = v_left & v_right; 4402 break; 4403 case Token::BIT_OR: 4404 result = v_left | v_right; 4405 break; 4406 default: 4407 result = 0; // Please the compiler. 4408 UNREACHABLE(); 4409 } 4410 return H_CONSTANT_INT(result); 4411 } 4412 } 4413 return new(zone) HBitwise(context, op, left, right); 4414 } 4415 4416 4417 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \ 4418 HInstruction* HInstr::New( \ 4419 Zone* zone, HValue* context, HValue* left, HValue* right) { \ 4420 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ 4421 HConstant* c_left = HConstant::cast(left); \ 4422 HConstant* c_right = HConstant::cast(right); \ 4423 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ 4424 return H_CONSTANT_INT(result); \ 4425 } \ 4426 } \ 4427 return new(zone) HInstr(context, left, right); \ 4428 } 4429 4430 4431 DEFINE_NEW_H_BITWISE_INSTR(HSar, 4432 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f)) 4433 DEFINE_NEW_H_BITWISE_INSTR(HShl, 4434 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f)) 4435 4436 #undef DEFINE_NEW_H_BITWISE_INSTR 4437 4438 4439 HInstruction* HShr::New( 4440 Zone* zone, HValue* context, HValue* left, HValue* right) { 4441 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 4442 HConstant* c_left = HConstant::cast(left); 4443 HConstant* c_right = HConstant::cast(right); 4444 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { 4445 int32_t left_val = c_left->NumberValueAsInteger32(); 4446 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f; 4447 if ((right_val == 0) && (left_val < 0)) { 4448 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val)); 4449 } 4450 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val); 4451 } 4452 } 4453 return new(zone) HShr(context, left, right); 4454 } 4455 4456 4457 HInstruction* HSeqStringGetChar::New(Zone* zone, 4458 HValue* context, 4459 String::Encoding encoding, 4460 HValue* string, 4461 HValue* index) { 4462 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) { 4463 HConstant* c_string = HConstant::cast(string); 4464 HConstant* c_index = HConstant::cast(index); 4465 if (c_string->HasStringValue() && c_index->HasInteger32Value()) { 4466 Handle<String> s = c_string->StringValue(); 4467 int32_t i = c_index->Integer32Value(); 4468 ASSERT_LE(0, i); 4469 ASSERT_LT(i, s->length()); 4470 return H_CONSTANT_INT(s->Get(i)); 4471 } 4472 } 4473 return new(zone) HSeqStringGetChar(encoding, string, index); 4474 } 4475 4476 4477 #undef H_CONSTANT_INT 4478 #undef H_CONSTANT_DOUBLE 4479 4480 4481 void HBitwise::PrintDataTo(StringStream* stream) { 4482 stream->Add(Token::Name(op_)); 4483 stream->Add(" "); 4484 HBitwiseBinaryOperation::PrintDataTo(stream); 4485 } 4486 4487 4488 void HPhi::SimplifyConstantInputs() { 4489 // Convert constant inputs to integers when all uses are truncating. 4490 // This must happen before representation inference takes place. 4491 if (!CheckUsesForFlag(kTruncatingToInt32)) return; 4492 for (int i = 0; i < OperandCount(); ++i) { 4493 if (!OperandAt(i)->IsConstant()) return; 4494 } 4495 HGraph* graph = block()->graph(); 4496 for (int i = 0; i < OperandCount(); ++i) { 4497 HConstant* operand = HConstant::cast(OperandAt(i)); 4498 if (operand->HasInteger32Value()) { 4499 continue; 4500 } else if (operand->HasDoubleValue()) { 4501 HConstant* integer_input = 4502 HConstant::New(graph->zone(), graph->GetInvalidContext(), 4503 DoubleToInt32(operand->DoubleValue())); 4504 integer_input->InsertAfter(operand); 4505 SetOperandAt(i, integer_input); 4506 } else if (operand->HasBooleanValue()) { 4507 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1() 4508 : graph->GetConstant0()); 4509 } else if (operand->ImmortalImmovable()) { 4510 SetOperandAt(i, graph->GetConstant0()); 4511 } 4512 } 4513 // Overwrite observed input representations because they are likely Tagged. 4514 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 4515 HValue* use = it.value(); 4516 if (use->IsBinaryOperation()) { 4517 HBinaryOperation::cast(use)->set_observed_input_representation( 4518 it.index(), Representation::Smi()); 4519 } 4520 } 4521 } 4522 4523 4524 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) { 4525 ASSERT(CheckFlag(kFlexibleRepresentation)); 4526 Representation new_rep = RepresentationFromInputs(); 4527 UpdateRepresentation(new_rep, h_infer, "inputs"); 4528 new_rep = RepresentationFromUses(); 4529 UpdateRepresentation(new_rep, h_infer, "uses"); 4530 new_rep = RepresentationFromUseRequirements(); 4531 UpdateRepresentation(new_rep, h_infer, "use requirements"); 4532 } 4533 4534 4535 Representation HPhi::RepresentationFromInputs() { 4536 Representation r = Representation::None(); 4537 for (int i = 0; i < OperandCount(); ++i) { 4538 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation()); 4539 } 4540 return r; 4541 } 4542 4543 4544 // Returns a representation if all uses agree on the same representation. 4545 // Integer32 is also returned when some uses are Smi but others are Integer32. 4546 Representation HValue::RepresentationFromUseRequirements() { 4547 Representation rep = Representation::None(); 4548 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 4549 // Ignore the use requirement from never run code 4550 if (it.value()->block()->IsUnreachable()) continue; 4551 4552 // We check for observed_input_representation elsewhere. 4553 Representation use_rep = 4554 it.value()->RequiredInputRepresentation(it.index()); 4555 if (rep.IsNone()) { 4556 rep = use_rep; 4557 continue; 4558 } 4559 if (use_rep.IsNone() || rep.Equals(use_rep)) continue; 4560 if (rep.generalize(use_rep).IsInteger32()) { 4561 rep = Representation::Integer32(); 4562 continue; 4563 } 4564 return Representation::None(); 4565 } 4566 return rep; 4567 } 4568 4569 4570 bool HValue::HasNonSmiUse() { 4571 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 4572 // We check for observed_input_representation elsewhere. 4573 Representation use_rep = 4574 it.value()->RequiredInputRepresentation(it.index()); 4575 if (!use_rep.IsNone() && 4576 !use_rep.IsSmi() && 4577 !use_rep.IsTagged()) { 4578 return true; 4579 } 4580 } 4581 return false; 4582 } 4583 4584 4585 // Node-specific verification code is only included in debug mode. 4586 #ifdef DEBUG 4587 4588 void HPhi::Verify() { 4589 ASSERT(OperandCount() == block()->predecessors()->length()); 4590 for (int i = 0; i < OperandCount(); ++i) { 4591 HValue* value = OperandAt(i); 4592 HBasicBlock* defining_block = value->block(); 4593 HBasicBlock* predecessor_block = block()->predecessors()->at(i); 4594 ASSERT(defining_block == predecessor_block || 4595 defining_block->Dominates(predecessor_block)); 4596 } 4597 } 4598 4599 4600 void HSimulate::Verify() { 4601 HInstruction::Verify(); 4602 ASSERT(HasAstId() || next()->IsEnterInlined()); 4603 } 4604 4605 4606 void HCheckHeapObject::Verify() { 4607 HInstruction::Verify(); 4608 ASSERT(HasNoUses()); 4609 } 4610 4611 4612 void HCheckValue::Verify() { 4613 HInstruction::Verify(); 4614 ASSERT(HasNoUses()); 4615 } 4616 4617 #endif 4618 4619 4620 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) { 4621 ASSERT(offset >= 0); 4622 ASSERT(offset < FixedArray::kHeaderSize); 4623 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength(); 4624 return HObjectAccess(kInobject, offset); 4625 } 4626 4627 4628 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset, 4629 Representation representation) { 4630 ASSERT(offset >= 0); 4631 Portion portion = kInobject; 4632 4633 if (offset == JSObject::kElementsOffset) { 4634 portion = kElementsPointer; 4635 } else if (offset == JSObject::kMapOffset) { 4636 portion = kMaps; 4637 } 4638 bool existing_inobject_property = true; 4639 if (!map.is_null()) { 4640 existing_inobject_property = (offset < 4641 map->instance_size() - map->unused_property_fields() * kPointerSize); 4642 } 4643 return HObjectAccess(portion, offset, representation, Handle<String>::null(), 4644 false, existing_inobject_property); 4645 } 4646 4647 4648 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) { 4649 switch (offset) { 4650 case AllocationSite::kTransitionInfoOffset: 4651 return HObjectAccess(kInobject, offset, Representation::Tagged()); 4652 case AllocationSite::kNestedSiteOffset: 4653 return HObjectAccess(kInobject, offset, Representation::Tagged()); 4654 case AllocationSite::kPretenureDataOffset: 4655 return HObjectAccess(kInobject, offset, Representation::Smi()); 4656 case AllocationSite::kPretenureCreateCountOffset: 4657 return HObjectAccess(kInobject, offset, Representation::Smi()); 4658 case AllocationSite::kDependentCodeOffset: 4659 return HObjectAccess(kInobject, offset, Representation::Tagged()); 4660 case AllocationSite::kWeakNextOffset: 4661 return HObjectAccess(kInobject, offset, Representation::Tagged()); 4662 default: 4663 UNREACHABLE(); 4664 } 4665 return HObjectAccess(kInobject, offset); 4666 } 4667 4668 4669 HObjectAccess HObjectAccess::ForContextSlot(int index) { 4670 ASSERT(index >= 0); 4671 Portion portion = kInobject; 4672 int offset = Context::kHeaderSize + index * kPointerSize; 4673 ASSERT_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag); 4674 return HObjectAccess(portion, offset, Representation::Tagged()); 4675 } 4676 4677 4678 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) { 4679 ASSERT(offset >= 0); 4680 Portion portion = kInobject; 4681 4682 if (offset == JSObject::kElementsOffset) { 4683 portion = kElementsPointer; 4684 } else if (offset == JSArray::kLengthOffset) { 4685 portion = kArrayLengths; 4686 } else if (offset == JSObject::kMapOffset) { 4687 portion = kMaps; 4688 } 4689 return HObjectAccess(portion, offset); 4690 } 4691 4692 4693 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset, 4694 Representation representation) { 4695 ASSERT(offset >= 0); 4696 return HObjectAccess(kBackingStore, offset, representation, 4697 Handle<String>::null(), false, false); 4698 } 4699 4700 4701 HObjectAccess HObjectAccess::ForField(Handle<Map> map, 4702 LookupResult* lookup, 4703 Handle<String> name) { 4704 ASSERT(lookup->IsField() || lookup->IsTransitionToField()); 4705 int index; 4706 Representation representation; 4707 if (lookup->IsField()) { 4708 index = lookup->GetLocalFieldIndexFromMap(*map); 4709 representation = lookup->representation(); 4710 } else { 4711 Map* transition = lookup->GetTransitionTarget(); 4712 int descriptor = transition->LastAdded(); 4713 index = transition->instance_descriptors()->GetFieldIndex(descriptor) - 4714 map->inobject_properties(); 4715 PropertyDetails details = 4716 transition->instance_descriptors()->GetDetails(descriptor); 4717 representation = details.representation(); 4718 } 4719 if (index < 0) { 4720 // Negative property indices are in-object properties, indexed 4721 // from the end of the fixed part of the object. 4722 int offset = (index * kPointerSize) + map->instance_size(); 4723 return HObjectAccess(kInobject, offset, representation, name, false, true); 4724 } else { 4725 // Non-negative property indices are in the properties array. 4726 int offset = (index * kPointerSize) + FixedArray::kHeaderSize; 4727 return HObjectAccess(kBackingStore, offset, representation, name, 4728 false, false); 4729 } 4730 } 4731 4732 4733 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) { 4734 return HObjectAccess( 4735 kInobject, Cell::kValueOffset, Representation::Tagged(), 4736 Handle<String>(isolate->heap()->cell_value_string())); 4737 } 4738 4739 4740 void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) { 4741 // set the appropriate GVN flags for a given load or store instruction 4742 if (access_type == STORE) { 4743 // track dominating allocations in order to eliminate write barriers 4744 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion); 4745 instr->SetFlag(HValue::kTrackSideEffectDominators); 4746 } else { 4747 // try to GVN loads, but don't hoist above map changes 4748 instr->SetFlag(HValue::kUseGVN); 4749 instr->SetDependsOnFlag(::v8::internal::kMaps); 4750 } 4751 4752 switch (portion()) { 4753 case kArrayLengths: 4754 if (access_type == STORE) { 4755 instr->SetChangesFlag(::v8::internal::kArrayLengths); 4756 } else { 4757 instr->SetDependsOnFlag(::v8::internal::kArrayLengths); 4758 } 4759 break; 4760 case kStringLengths: 4761 if (access_type == STORE) { 4762 instr->SetChangesFlag(::v8::internal::kStringLengths); 4763 } else { 4764 instr->SetDependsOnFlag(::v8::internal::kStringLengths); 4765 } 4766 break; 4767 case kInobject: 4768 if (access_type == STORE) { 4769 instr->SetChangesFlag(::v8::internal::kInobjectFields); 4770 } else { 4771 instr->SetDependsOnFlag(::v8::internal::kInobjectFields); 4772 } 4773 break; 4774 case kDouble: 4775 if (access_type == STORE) { 4776 instr->SetChangesFlag(::v8::internal::kDoubleFields); 4777 } else { 4778 instr->SetDependsOnFlag(::v8::internal::kDoubleFields); 4779 } 4780 break; 4781 case kBackingStore: 4782 if (access_type == STORE) { 4783 instr->SetChangesFlag(::v8::internal::kBackingStoreFields); 4784 } else { 4785 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields); 4786 } 4787 break; 4788 case kElementsPointer: 4789 if (access_type == STORE) { 4790 instr->SetChangesFlag(::v8::internal::kElementsPointer); 4791 } else { 4792 instr->SetDependsOnFlag(::v8::internal::kElementsPointer); 4793 } 4794 break; 4795 case kMaps: 4796 if (access_type == STORE) { 4797 instr->SetChangesFlag(::v8::internal::kMaps); 4798 } else { 4799 instr->SetDependsOnFlag(::v8::internal::kMaps); 4800 } 4801 break; 4802 case kExternalMemory: 4803 if (access_type == STORE) { 4804 instr->SetChangesFlag(::v8::internal::kExternalMemory); 4805 } else { 4806 instr->SetDependsOnFlag(::v8::internal::kExternalMemory); 4807 } 4808 break; 4809 } 4810 } 4811 4812 4813 void HObjectAccess::PrintTo(StringStream* stream) const { 4814 stream->Add("."); 4815 4816 switch (portion()) { 4817 case kArrayLengths: 4818 case kStringLengths: 4819 stream->Add("%length"); 4820 break; 4821 case kElementsPointer: 4822 stream->Add("%elements"); 4823 break; 4824 case kMaps: 4825 stream->Add("%map"); 4826 break; 4827 case kDouble: // fall through 4828 case kInobject: 4829 if (!name_.is_null()) { 4830 stream->Add(String::cast(*name_)->ToCString().get()); 4831 } 4832 stream->Add("[in-object]"); 4833 break; 4834 case kBackingStore: 4835 if (!name_.is_null()) { 4836 stream->Add(String::cast(*name_)->ToCString().get()); 4837 } 4838 stream->Add("[backing-store]"); 4839 break; 4840 case kExternalMemory: 4841 stream->Add("[external-memory]"); 4842 break; 4843 } 4844 4845 stream->Add("@%d", offset()); 4846 } 4847 4848 } } // namespace v8::internal 4849