1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "double.h" 31 #include "factory.h" 32 #include "hydrogen-infer-representation.h" 33 34 #if V8_TARGET_ARCH_IA32 35 #include "ia32/lithium-ia32.h" 36 #elif V8_TARGET_ARCH_X64 37 #include "x64/lithium-x64.h" 38 #elif V8_TARGET_ARCH_ARM 39 #include "arm/lithium-arm.h" 40 #elif V8_TARGET_ARCH_MIPS 41 #include "mips/lithium-mips.h" 42 #else 43 #error Unsupported target architecture. 44 #endif 45 46 namespace v8 { 47 namespace internal { 48 49 #define DEFINE_COMPILE(type) \ 50 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \ 51 return builder->Do##type(this); \ 52 } 53 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE) 54 #undef DEFINE_COMPILE 55 56 57 int HValue::LoopWeight() const { 58 const int w = FLAG_loop_weight; 59 static const int weights[] = { 1, w, w*w, w*w*w, w*w*w*w }; 60 return weights[Min(block()->LoopNestingDepth(), 61 static_cast<int>(ARRAY_SIZE(weights)-1))]; 62 } 63 64 65 Isolate* HValue::isolate() const { 66 ASSERT(block() != NULL); 67 return block()->isolate(); 68 } 69 70 71 void HValue::AssumeRepresentation(Representation r) { 72 if (CheckFlag(kFlexibleRepresentation)) { 73 ChangeRepresentation(r); 74 // The representation of the value is dictated by type feedback and 75 // will not be changed later. 76 ClearFlag(kFlexibleRepresentation); 77 } 78 } 79 80 81 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) { 82 ASSERT(CheckFlag(kFlexibleRepresentation)); 83 Representation new_rep = RepresentationFromInputs(); 84 UpdateRepresentation(new_rep, h_infer, "inputs"); 85 new_rep = RepresentationFromUses(); 86 UpdateRepresentation(new_rep, h_infer, "uses"); 87 if (representation().IsSmi() && HasNonSmiUse()) { 88 UpdateRepresentation( 89 Representation::Integer32(), h_infer, "use requirements"); 90 } 91 } 92 93 94 Representation HValue::RepresentationFromUses() { 95 if (HasNoUses()) return Representation::None(); 96 97 // Array of use counts for each representation. 98 int use_count[Representation::kNumRepresentations] = { 0 }; 99 100 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 101 HValue* use = it.value(); 102 Representation rep = use->observed_input_representation(it.index()); 103 if (rep.IsNone()) continue; 104 if (FLAG_trace_representation) { 105 PrintF("#%d %s is used by #%d %s as %s%s\n", 106 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(), 107 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : "")); 108 } 109 use_count[rep.kind()] += use->LoopWeight(); 110 } 111 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]); 112 int tagged_count = use_count[Representation::kTagged]; 113 int double_count = use_count[Representation::kDouble]; 114 int int32_count = use_count[Representation::kInteger32]; 115 int smi_count = use_count[Representation::kSmi]; 116 117 if (tagged_count > 0) return Representation::Tagged(); 118 if (double_count > 0) return Representation::Double(); 119 if (int32_count > 0) return Representation::Integer32(); 120 if (smi_count > 0) return Representation::Smi(); 121 122 return Representation::None(); 123 } 124 125 126 void HValue::UpdateRepresentation(Representation new_rep, 127 HInferRepresentationPhase* h_infer, 128 const char* reason) { 129 Representation r = representation(); 130 if (new_rep.is_more_general_than(r)) { 131 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return; 132 if (FLAG_trace_representation) { 133 PrintF("Changing #%d %s representation %s -> %s based on %s\n", 134 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason); 135 } 136 ChangeRepresentation(new_rep); 137 AddDependantsToWorklist(h_infer); 138 } 139 } 140 141 142 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) { 143 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 144 h_infer->AddToWorklist(it.value()); 145 } 146 for (int i = 0; i < OperandCount(); ++i) { 147 h_infer->AddToWorklist(OperandAt(i)); 148 } 149 } 150 151 152 static int32_t ConvertAndSetOverflow(Representation r, 153 int64_t result, 154 bool* overflow) { 155 if (r.IsSmi()) { 156 if (result > Smi::kMaxValue) { 157 *overflow = true; 158 return Smi::kMaxValue; 159 } 160 if (result < Smi::kMinValue) { 161 *overflow = true; 162 return Smi::kMinValue; 163 } 164 } else { 165 if (result > kMaxInt) { 166 *overflow = true; 167 return kMaxInt; 168 } 169 if (result < kMinInt) { 170 *overflow = true; 171 return kMinInt; 172 } 173 } 174 return static_cast<int32_t>(result); 175 } 176 177 178 static int32_t AddWithoutOverflow(Representation r, 179 int32_t a, 180 int32_t b, 181 bool* overflow) { 182 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b); 183 return ConvertAndSetOverflow(r, result, overflow); 184 } 185 186 187 static int32_t SubWithoutOverflow(Representation r, 188 int32_t a, 189 int32_t b, 190 bool* overflow) { 191 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b); 192 return ConvertAndSetOverflow(r, result, overflow); 193 } 194 195 196 static int32_t MulWithoutOverflow(const Representation& r, 197 int32_t a, 198 int32_t b, 199 bool* overflow) { 200 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b); 201 return ConvertAndSetOverflow(r, result, overflow); 202 } 203 204 205 int32_t Range::Mask() const { 206 if (lower_ == upper_) return lower_; 207 if (lower_ >= 0) { 208 int32_t res = 1; 209 while (res < upper_) { 210 res = (res << 1) | 1; 211 } 212 return res; 213 } 214 return 0xffffffff; 215 } 216 217 218 void Range::AddConstant(int32_t value) { 219 if (value == 0) return; 220 bool may_overflow = false; // Overflow is ignored here. 221 Representation r = Representation::Integer32(); 222 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow); 223 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow); 224 #ifdef DEBUG 225 Verify(); 226 #endif 227 } 228 229 230 void Range::Intersect(Range* other) { 231 upper_ = Min(upper_, other->upper_); 232 lower_ = Max(lower_, other->lower_); 233 bool b = CanBeMinusZero() && other->CanBeMinusZero(); 234 set_can_be_minus_zero(b); 235 } 236 237 238 void Range::Union(Range* other) { 239 upper_ = Max(upper_, other->upper_); 240 lower_ = Min(lower_, other->lower_); 241 bool b = CanBeMinusZero() || other->CanBeMinusZero(); 242 set_can_be_minus_zero(b); 243 } 244 245 246 void Range::CombinedMax(Range* other) { 247 upper_ = Max(upper_, other->upper_); 248 lower_ = Max(lower_, other->lower_); 249 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero()); 250 } 251 252 253 void Range::CombinedMin(Range* other) { 254 upper_ = Min(upper_, other->upper_); 255 lower_ = Min(lower_, other->lower_); 256 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero()); 257 } 258 259 260 void Range::Sar(int32_t value) { 261 int32_t bits = value & 0x1F; 262 lower_ = lower_ >> bits; 263 upper_ = upper_ >> bits; 264 set_can_be_minus_zero(false); 265 } 266 267 268 void Range::Shl(int32_t value) { 269 int32_t bits = value & 0x1F; 270 int old_lower = lower_; 271 int old_upper = upper_; 272 lower_ = lower_ << bits; 273 upper_ = upper_ << bits; 274 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) { 275 upper_ = kMaxInt; 276 lower_ = kMinInt; 277 } 278 set_can_be_minus_zero(false); 279 } 280 281 282 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) { 283 bool may_overflow = false; 284 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow); 285 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow); 286 KeepOrder(); 287 #ifdef DEBUG 288 Verify(); 289 #endif 290 return may_overflow; 291 } 292 293 294 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) { 295 bool may_overflow = false; 296 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow); 297 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow); 298 KeepOrder(); 299 #ifdef DEBUG 300 Verify(); 301 #endif 302 return may_overflow; 303 } 304 305 306 void Range::KeepOrder() { 307 if (lower_ > upper_) { 308 int32_t tmp = lower_; 309 lower_ = upper_; 310 upper_ = tmp; 311 } 312 } 313 314 315 #ifdef DEBUG 316 void Range::Verify() const { 317 ASSERT(lower_ <= upper_); 318 } 319 #endif 320 321 322 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) { 323 bool may_overflow = false; 324 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow); 325 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow); 326 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow); 327 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow); 328 lower_ = Min(Min(v1, v2), Min(v3, v4)); 329 upper_ = Max(Max(v1, v2), Max(v3, v4)); 330 #ifdef DEBUG 331 Verify(); 332 #endif 333 return may_overflow; 334 } 335 336 337 const char* HType::ToString() { 338 // Note: The c1visualizer syntax for locals allows only a sequence of the 339 // following characters: A-Za-z0-9_-|: 340 switch (type_) { 341 case kNone: return "none"; 342 case kTagged: return "tagged"; 343 case kTaggedPrimitive: return "primitive"; 344 case kTaggedNumber: return "number"; 345 case kSmi: return "smi"; 346 case kHeapNumber: return "heap-number"; 347 case kString: return "string"; 348 case kBoolean: return "boolean"; 349 case kNonPrimitive: return "non-primitive"; 350 case kJSArray: return "array"; 351 case kJSObject: return "object"; 352 } 353 UNREACHABLE(); 354 return "unreachable"; 355 } 356 357 358 HType HType::TypeFromValue(Handle<Object> value) { 359 HType result = HType::Tagged(); 360 if (value->IsSmi()) { 361 result = HType::Smi(); 362 } else if (value->IsHeapNumber()) { 363 result = HType::HeapNumber(); 364 } else if (value->IsString()) { 365 result = HType::String(); 366 } else if (value->IsBoolean()) { 367 result = HType::Boolean(); 368 } else if (value->IsJSObject()) { 369 result = HType::JSObject(); 370 } else if (value->IsJSArray()) { 371 result = HType::JSArray(); 372 } 373 return result; 374 } 375 376 377 bool HValue::IsDefinedAfter(HBasicBlock* other) const { 378 return block()->block_id() > other->block_id(); 379 } 380 381 382 HUseListNode* HUseListNode::tail() { 383 // Skip and remove dead items in the use list. 384 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) { 385 tail_ = tail_->tail_; 386 } 387 return tail_; 388 } 389 390 391 bool HValue::CheckUsesForFlag(Flag f) const { 392 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 393 if (it.value()->IsSimulate()) continue; 394 if (!it.value()->CheckFlag(f)) return false; 395 } 396 return true; 397 } 398 399 400 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const { 401 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 402 if (it.value()->IsSimulate()) continue; 403 if (!it.value()->CheckFlag(f)) { 404 *value = it.value(); 405 return false; 406 } 407 } 408 return true; 409 } 410 411 412 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const { 413 bool return_value = false; 414 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 415 if (it.value()->IsSimulate()) continue; 416 if (!it.value()->CheckFlag(f)) return false; 417 return_value = true; 418 } 419 return return_value; 420 } 421 422 423 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) { 424 Advance(); 425 } 426 427 428 void HUseIterator::Advance() { 429 current_ = next_; 430 if (current_ != NULL) { 431 next_ = current_->tail(); 432 value_ = current_->value(); 433 index_ = current_->index(); 434 } 435 } 436 437 438 int HValue::UseCount() const { 439 int count = 0; 440 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count; 441 return count; 442 } 443 444 445 HUseListNode* HValue::RemoveUse(HValue* value, int index) { 446 HUseListNode* previous = NULL; 447 HUseListNode* current = use_list_; 448 while (current != NULL) { 449 if (current->value() == value && current->index() == index) { 450 if (previous == NULL) { 451 use_list_ = current->tail(); 452 } else { 453 previous->set_tail(current->tail()); 454 } 455 break; 456 } 457 458 previous = current; 459 current = current->tail(); 460 } 461 462 #ifdef DEBUG 463 // Do not reuse use list nodes in debug mode, zap them. 464 if (current != NULL) { 465 HUseListNode* temp = 466 new(block()->zone()) 467 HUseListNode(current->value(), current->index(), NULL); 468 current->Zap(); 469 current = temp; 470 } 471 #endif 472 return current; 473 } 474 475 476 bool HValue::Equals(HValue* other) { 477 if (other->opcode() != opcode()) return false; 478 if (!other->representation().Equals(representation())) return false; 479 if (!other->type_.Equals(type_)) return false; 480 if (other->flags() != flags()) return false; 481 if (OperandCount() != other->OperandCount()) return false; 482 for (int i = 0; i < OperandCount(); ++i) { 483 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false; 484 } 485 bool result = DataEquals(other); 486 ASSERT(!result || Hashcode() == other->Hashcode()); 487 return result; 488 } 489 490 491 intptr_t HValue::Hashcode() { 492 intptr_t result = opcode(); 493 int count = OperandCount(); 494 for (int i = 0; i < count; ++i) { 495 result = result * 19 + OperandAt(i)->id() + (result >> 7); 496 } 497 return result; 498 } 499 500 501 const char* HValue::Mnemonic() const { 502 switch (opcode()) { 503 #define MAKE_CASE(type) case k##type: return #type; 504 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE) 505 #undef MAKE_CASE 506 case kPhi: return "Phi"; 507 default: return ""; 508 } 509 } 510 511 512 bool HValue::CanReplaceWithDummyUses() { 513 return FLAG_unreachable_code_elimination && 514 !(block()->IsReachable() || 515 IsBlockEntry() || 516 IsControlInstruction() || 517 IsSimulate() || 518 IsEnterInlined() || 519 IsLeaveInlined()); 520 } 521 522 523 bool HValue::IsInteger32Constant() { 524 return IsConstant() && HConstant::cast(this)->HasInteger32Value(); 525 } 526 527 528 int32_t HValue::GetInteger32Constant() { 529 return HConstant::cast(this)->Integer32Value(); 530 } 531 532 533 bool HValue::EqualsInteger32Constant(int32_t value) { 534 return IsInteger32Constant() && GetInteger32Constant() == value; 535 } 536 537 538 void HValue::SetOperandAt(int index, HValue* value) { 539 RegisterUse(index, value); 540 InternalSetOperandAt(index, value); 541 } 542 543 544 void HValue::DeleteAndReplaceWith(HValue* other) { 545 // We replace all uses first, so Delete can assert that there are none. 546 if (other != NULL) ReplaceAllUsesWith(other); 547 Kill(); 548 DeleteFromGraph(); 549 } 550 551 552 void HValue::ReplaceAllUsesWith(HValue* other) { 553 while (use_list_ != NULL) { 554 HUseListNode* list_node = use_list_; 555 HValue* value = list_node->value(); 556 ASSERT(!value->block()->IsStartBlock()); 557 value->InternalSetOperandAt(list_node->index(), other); 558 use_list_ = list_node->tail(); 559 list_node->set_tail(other->use_list_); 560 other->use_list_ = list_node; 561 } 562 } 563 564 565 void HValue::Kill() { 566 // Instead of going through the entire use list of each operand, we only 567 // check the first item in each use list and rely on the tail() method to 568 // skip dead items, removing them lazily next time we traverse the list. 569 SetFlag(kIsDead); 570 for (int i = 0; i < OperandCount(); ++i) { 571 HValue* operand = OperandAt(i); 572 if (operand == NULL) continue; 573 HUseListNode* first = operand->use_list_; 574 if (first != NULL && first->value()->CheckFlag(kIsDead)) { 575 operand->use_list_ = first->tail(); 576 } 577 } 578 } 579 580 581 void HValue::SetBlock(HBasicBlock* block) { 582 ASSERT(block_ == NULL || block == NULL); 583 block_ = block; 584 if (id_ == kNoNumber && block != NULL) { 585 id_ = block->graph()->GetNextValueID(this); 586 } 587 } 588 589 590 void HValue::PrintTypeTo(StringStream* stream) { 591 if (!representation().IsTagged() || type().Equals(HType::Tagged())) return; 592 stream->Add(" type:%s", type().ToString()); 593 } 594 595 596 void HValue::PrintRangeTo(StringStream* stream) { 597 if (range() == NULL || range()->IsMostGeneric()) return; 598 // Note: The c1visualizer syntax for locals allows only a sequence of the 599 // following characters: A-Za-z0-9_-|: 600 stream->Add(" range:%d_%d%s", 601 range()->lower(), 602 range()->upper(), 603 range()->CanBeMinusZero() ? "_m0" : ""); 604 } 605 606 607 void HValue::PrintChangesTo(StringStream* stream) { 608 GVNFlagSet changes_flags = ChangesFlags(); 609 if (changes_flags.IsEmpty()) return; 610 stream->Add(" changes["); 611 if (changes_flags == AllSideEffectsFlagSet()) { 612 stream->Add("*"); 613 } else { 614 bool add_comma = false; 615 #define PRINT_DO(type) \ 616 if (changes_flags.Contains(kChanges##type)) { \ 617 if (add_comma) stream->Add(","); \ 618 add_comma = true; \ 619 stream->Add(#type); \ 620 } 621 GVN_TRACKED_FLAG_LIST(PRINT_DO); 622 GVN_UNTRACKED_FLAG_LIST(PRINT_DO); 623 #undef PRINT_DO 624 } 625 stream->Add("]"); 626 } 627 628 629 void HValue::PrintNameTo(StringStream* stream) { 630 stream->Add("%s%d", representation_.Mnemonic(), id()); 631 } 632 633 634 bool HValue::HasMonomorphicJSObjectType() { 635 return !GetMonomorphicJSObjectMap().is_null(); 636 } 637 638 639 bool HValue::UpdateInferredType() { 640 HType type = CalculateInferredType(); 641 bool result = (!type.Equals(type_)); 642 type_ = type; 643 return result; 644 } 645 646 647 void HValue::RegisterUse(int index, HValue* new_value) { 648 HValue* old_value = OperandAt(index); 649 if (old_value == new_value) return; 650 651 HUseListNode* removed = NULL; 652 if (old_value != NULL) { 653 removed = old_value->RemoveUse(this, index); 654 } 655 656 if (new_value != NULL) { 657 if (removed == NULL) { 658 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode( 659 this, index, new_value->use_list_); 660 } else { 661 removed->set_tail(new_value->use_list_); 662 new_value->use_list_ = removed; 663 } 664 } 665 } 666 667 668 void HValue::AddNewRange(Range* r, Zone* zone) { 669 if (!HasRange()) ComputeInitialRange(zone); 670 if (!HasRange()) range_ = new(zone) Range(); 671 ASSERT(HasRange()); 672 r->StackUpon(range_); 673 range_ = r; 674 } 675 676 677 void HValue::RemoveLastAddedRange() { 678 ASSERT(HasRange()); 679 ASSERT(range_->next() != NULL); 680 range_ = range_->next(); 681 } 682 683 684 void HValue::ComputeInitialRange(Zone* zone) { 685 ASSERT(!HasRange()); 686 range_ = InferRange(zone); 687 ASSERT(HasRange()); 688 } 689 690 691 void HInstruction::PrintTo(StringStream* stream) { 692 PrintMnemonicTo(stream); 693 PrintDataTo(stream); 694 PrintRangeTo(stream); 695 PrintChangesTo(stream); 696 PrintTypeTo(stream); 697 if (CheckFlag(HValue::kHasNoObservableSideEffects)) { 698 stream->Add(" [noOSE]"); 699 } 700 } 701 702 703 void HInstruction::PrintDataTo(StringStream *stream) { 704 for (int i = 0; i < OperandCount(); ++i) { 705 if (i > 0) stream->Add(" "); 706 OperandAt(i)->PrintNameTo(stream); 707 } 708 } 709 710 711 void HInstruction::PrintMnemonicTo(StringStream* stream) { 712 stream->Add("%s ", Mnemonic()); 713 } 714 715 716 void HInstruction::Unlink() { 717 ASSERT(IsLinked()); 718 ASSERT(!IsControlInstruction()); // Must never move control instructions. 719 ASSERT(!IsBlockEntry()); // Doesn't make sense to delete these. 720 ASSERT(previous_ != NULL); 721 previous_->next_ = next_; 722 if (next_ == NULL) { 723 ASSERT(block()->last() == this); 724 block()->set_last(previous_); 725 } else { 726 next_->previous_ = previous_; 727 } 728 clear_block(); 729 } 730 731 732 void HInstruction::InsertBefore(HInstruction* next) { 733 ASSERT(!IsLinked()); 734 ASSERT(!next->IsBlockEntry()); 735 ASSERT(!IsControlInstruction()); 736 ASSERT(!next->block()->IsStartBlock()); 737 ASSERT(next->previous_ != NULL); 738 HInstruction* prev = next->previous(); 739 prev->next_ = this; 740 next->previous_ = this; 741 next_ = next; 742 previous_ = prev; 743 SetBlock(next->block()); 744 if (position() == RelocInfo::kNoPosition && 745 next->position() != RelocInfo::kNoPosition) { 746 set_position(next->position()); 747 } 748 } 749 750 751 void HInstruction::InsertAfter(HInstruction* previous) { 752 ASSERT(!IsLinked()); 753 ASSERT(!previous->IsControlInstruction()); 754 ASSERT(!IsControlInstruction() || previous->next_ == NULL); 755 HBasicBlock* block = previous->block(); 756 // Never insert anything except constants into the start block after finishing 757 // it. 758 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) { 759 ASSERT(block->end()->SecondSuccessor() == NULL); 760 InsertAfter(block->end()->FirstSuccessor()->first()); 761 return; 762 } 763 764 // If we're inserting after an instruction with side-effects that is 765 // followed by a simulate instruction, we need to insert after the 766 // simulate instruction instead. 767 HInstruction* next = previous->next_; 768 if (previous->HasObservableSideEffects() && next != NULL) { 769 ASSERT(next->IsSimulate()); 770 previous = next; 771 next = previous->next_; 772 } 773 774 previous_ = previous; 775 next_ = next; 776 SetBlock(block); 777 previous->next_ = this; 778 if (next != NULL) next->previous_ = this; 779 if (block->last() == previous) { 780 block->set_last(this); 781 } 782 if (position() == RelocInfo::kNoPosition && 783 previous->position() != RelocInfo::kNoPosition) { 784 set_position(previous->position()); 785 } 786 } 787 788 789 #ifdef DEBUG 790 void HInstruction::Verify() { 791 // Verify that input operands are defined before use. 792 HBasicBlock* cur_block = block(); 793 for (int i = 0; i < OperandCount(); ++i) { 794 HValue* other_operand = OperandAt(i); 795 if (other_operand == NULL) continue; 796 HBasicBlock* other_block = other_operand->block(); 797 if (cur_block == other_block) { 798 if (!other_operand->IsPhi()) { 799 HInstruction* cur = this->previous(); 800 while (cur != NULL) { 801 if (cur == other_operand) break; 802 cur = cur->previous(); 803 } 804 // Must reach other operand in the same block! 805 ASSERT(cur == other_operand); 806 } 807 } else { 808 // If the following assert fires, you may have forgotten an 809 // AddInstruction. 810 ASSERT(other_block->Dominates(cur_block)); 811 } 812 } 813 814 // Verify that instructions that may have side-effects are followed 815 // by a simulate instruction. 816 if (HasObservableSideEffects() && !IsOsrEntry()) { 817 ASSERT(next()->IsSimulate()); 818 } 819 820 // Verify that instructions that can be eliminated by GVN have overridden 821 // HValue::DataEquals. The default implementation is UNREACHABLE. We 822 // don't actually care whether DataEquals returns true or false here. 823 if (CheckFlag(kUseGVN)) DataEquals(this); 824 825 // Verify that all uses are in the graph. 826 for (HUseIterator use = uses(); !use.Done(); use.Advance()) { 827 if (use.value()->IsInstruction()) { 828 ASSERT(HInstruction::cast(use.value())->IsLinked()); 829 } 830 } 831 } 832 #endif 833 834 835 void HDummyUse::PrintDataTo(StringStream* stream) { 836 value()->PrintNameTo(stream); 837 } 838 839 840 void HEnvironmentMarker::PrintDataTo(StringStream* stream) { 841 stream->Add("%s var[%d]", kind() == BIND ? "bind" : "lookup", index()); 842 } 843 844 845 void HUnaryCall::PrintDataTo(StringStream* stream) { 846 value()->PrintNameTo(stream); 847 stream->Add(" "); 848 stream->Add("#%d", argument_count()); 849 } 850 851 852 void HBinaryCall::PrintDataTo(StringStream* stream) { 853 first()->PrintNameTo(stream); 854 stream->Add(" "); 855 second()->PrintNameTo(stream); 856 stream->Add(" "); 857 stream->Add("#%d", argument_count()); 858 } 859 860 861 void HBoundsCheck::ApplyIndexChange() { 862 if (skip_check()) return; 863 864 DecompositionResult decomposition; 865 bool index_is_decomposable = index()->TryDecompose(&decomposition); 866 if (index_is_decomposable) { 867 ASSERT(decomposition.base() == base()); 868 if (decomposition.offset() == offset() && 869 decomposition.scale() == scale()) return; 870 } else { 871 return; 872 } 873 874 ReplaceAllUsesWith(index()); 875 876 HValue* current_index = decomposition.base(); 877 int actual_offset = decomposition.offset() + offset(); 878 int actual_scale = decomposition.scale() + scale(); 879 880 Zone* zone = block()->graph()->zone(); 881 HValue* context = block()->graph()->GetInvalidContext(); 882 if (actual_offset != 0) { 883 HConstant* add_offset = HConstant::New(zone, context, actual_offset); 884 add_offset->InsertBefore(this); 885 HInstruction* add = HAdd::New(zone, context, 886 current_index, add_offset); 887 add->InsertBefore(this); 888 add->AssumeRepresentation(index()->representation()); 889 add->ClearFlag(kCanOverflow); 890 current_index = add; 891 } 892 893 if (actual_scale != 0) { 894 HConstant* sar_scale = HConstant::New(zone, context, actual_scale); 895 sar_scale->InsertBefore(this); 896 HInstruction* sar = HSar::New(zone, context, 897 current_index, sar_scale); 898 sar->InsertBefore(this); 899 sar->AssumeRepresentation(index()->representation()); 900 current_index = sar; 901 } 902 903 SetOperandAt(0, current_index); 904 905 base_ = NULL; 906 offset_ = 0; 907 scale_ = 0; 908 } 909 910 911 void HBoundsCheck::PrintDataTo(StringStream* stream) { 912 index()->PrintNameTo(stream); 913 stream->Add(" "); 914 length()->PrintNameTo(stream); 915 if (base() != NULL && (offset() != 0 || scale() != 0)) { 916 stream->Add(" base: (("); 917 if (base() != index()) { 918 index()->PrintNameTo(stream); 919 } else { 920 stream->Add("index"); 921 } 922 stream->Add(" + %d) >> %d)", offset(), scale()); 923 } 924 if (skip_check()) { 925 stream->Add(" [DISABLED]"); 926 } 927 } 928 929 930 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) { 931 ASSERT(CheckFlag(kFlexibleRepresentation)); 932 HValue* actual_index = index()->ActualValue(); 933 HValue* actual_length = length()->ActualValue(); 934 Representation index_rep = actual_index->representation(); 935 Representation length_rep = actual_length->representation(); 936 if (index_rep.IsTagged() && actual_index->type().IsSmi()) { 937 index_rep = Representation::Smi(); 938 } 939 if (length_rep.IsTagged() && actual_length->type().IsSmi()) { 940 length_rep = Representation::Smi(); 941 } 942 Representation r = index_rep.generalize(length_rep); 943 if (r.is_more_general_than(Representation::Integer32())) { 944 r = Representation::Integer32(); 945 } 946 UpdateRepresentation(r, h_infer, "boundscheck"); 947 } 948 949 950 Range* HBoundsCheck::InferRange(Zone* zone) { 951 Representation r = representation(); 952 if (r.IsSmiOrInteger32() && length()->HasRange()) { 953 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1); 954 int lower = 0; 955 956 Range* result = new(zone) Range(lower, upper); 957 if (index()->HasRange()) { 958 result->Intersect(index()->range()); 959 } 960 961 // In case of Smi representation, clamp result to Smi::kMaxValue. 962 if (r.IsSmi()) result->ClampToSmi(); 963 return result; 964 } 965 return HValue::InferRange(zone); 966 } 967 968 969 void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) { 970 stream->Add("base: "); 971 base_index()->PrintNameTo(stream); 972 stream->Add(", check: "); 973 base_index()->PrintNameTo(stream); 974 } 975 976 977 void HCallConstantFunction::PrintDataTo(StringStream* stream) { 978 if (IsApplyFunction()) { 979 stream->Add("optimized apply "); 980 } else { 981 stream->Add("%o ", function()->shared()->DebugName()); 982 } 983 stream->Add("#%d", argument_count()); 984 } 985 986 987 void HCallNamed::PrintDataTo(StringStream* stream) { 988 stream->Add("%o ", *name()); 989 HUnaryCall::PrintDataTo(stream); 990 } 991 992 993 void HCallGlobal::PrintDataTo(StringStream* stream) { 994 stream->Add("%o ", *name()); 995 HUnaryCall::PrintDataTo(stream); 996 } 997 998 999 void HCallKnownGlobal::PrintDataTo(StringStream* stream) { 1000 stream->Add("%o ", target()->shared()->DebugName()); 1001 stream->Add("#%d", argument_count()); 1002 } 1003 1004 1005 void HCallNewArray::PrintDataTo(StringStream* stream) { 1006 stream->Add(ElementsKindToString(elements_kind())); 1007 stream->Add(" "); 1008 HBinaryCall::PrintDataTo(stream); 1009 } 1010 1011 1012 void HCallRuntime::PrintDataTo(StringStream* stream) { 1013 stream->Add("%o ", *name()); 1014 if (save_doubles() == kSaveFPRegs) { 1015 stream->Add("[save doubles] "); 1016 } 1017 stream->Add("#%d", argument_count()); 1018 } 1019 1020 1021 void HClassOfTestAndBranch::PrintDataTo(StringStream* stream) { 1022 stream->Add("class_of_test("); 1023 value()->PrintNameTo(stream); 1024 stream->Add(", \"%o\")", *class_name()); 1025 } 1026 1027 1028 void HWrapReceiver::PrintDataTo(StringStream* stream) { 1029 receiver()->PrintNameTo(stream); 1030 stream->Add(" "); 1031 function()->PrintNameTo(stream); 1032 } 1033 1034 1035 void HAccessArgumentsAt::PrintDataTo(StringStream* stream) { 1036 arguments()->PrintNameTo(stream); 1037 stream->Add("["); 1038 index()->PrintNameTo(stream); 1039 stream->Add("], length "); 1040 length()->PrintNameTo(stream); 1041 } 1042 1043 1044 void HControlInstruction::PrintDataTo(StringStream* stream) { 1045 stream->Add(" goto ("); 1046 bool first_block = true; 1047 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) { 1048 stream->Add(first_block ? "B%d" : ", B%d", it.Current()->block_id()); 1049 first_block = false; 1050 } 1051 stream->Add(")"); 1052 } 1053 1054 1055 void HUnaryControlInstruction::PrintDataTo(StringStream* stream) { 1056 value()->PrintNameTo(stream); 1057 HControlInstruction::PrintDataTo(stream); 1058 } 1059 1060 1061 void HReturn::PrintDataTo(StringStream* stream) { 1062 value()->PrintNameTo(stream); 1063 stream->Add(" (pop "); 1064 parameter_count()->PrintNameTo(stream); 1065 stream->Add(" values)"); 1066 } 1067 1068 1069 Representation HBranch::observed_input_representation(int index) { 1070 static const ToBooleanStub::Types tagged_types( 1071 ToBooleanStub::NULL_TYPE | 1072 ToBooleanStub::SPEC_OBJECT | 1073 ToBooleanStub::STRING | 1074 ToBooleanStub::SYMBOL); 1075 if (expected_input_types_.ContainsAnyOf(tagged_types)) { 1076 return Representation::Tagged(); 1077 } 1078 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) { 1079 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) { 1080 return Representation::Double(); 1081 } 1082 return Representation::Tagged(); 1083 } 1084 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) { 1085 return Representation::Double(); 1086 } 1087 if (expected_input_types_.Contains(ToBooleanStub::SMI)) { 1088 return Representation::Smi(); 1089 } 1090 return Representation::None(); 1091 } 1092 1093 1094 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) { 1095 HValue* value = this->value(); 1096 if (value->EmitAtUses()) { 1097 ASSERT(value->IsConstant()); 1098 ASSERT(!value->representation().IsDouble()); 1099 *block = HConstant::cast(value)->BooleanValue() 1100 ? FirstSuccessor() 1101 : SecondSuccessor(); 1102 return true; 1103 } 1104 *block = NULL; 1105 return false; 1106 } 1107 1108 1109 void HCompareMap::PrintDataTo(StringStream* stream) { 1110 value()->PrintNameTo(stream); 1111 stream->Add(" (%p)", *map().handle()); 1112 HControlInstruction::PrintDataTo(stream); 1113 } 1114 1115 1116 const char* HUnaryMathOperation::OpName() const { 1117 switch (op()) { 1118 case kMathFloor: return "floor"; 1119 case kMathRound: return "round"; 1120 case kMathAbs: return "abs"; 1121 case kMathLog: return "log"; 1122 case kMathSin: return "sin"; 1123 case kMathCos: return "cos"; 1124 case kMathTan: return "tan"; 1125 case kMathExp: return "exp"; 1126 case kMathSqrt: return "sqrt"; 1127 case kMathPowHalf: return "pow-half"; 1128 default: 1129 UNREACHABLE(); 1130 return NULL; 1131 } 1132 } 1133 1134 1135 Range* HUnaryMathOperation::InferRange(Zone* zone) { 1136 Representation r = representation(); 1137 if (r.IsSmiOrInteger32() && value()->HasRange()) { 1138 if (op() == kMathAbs) { 1139 int upper = value()->range()->upper(); 1140 int lower = value()->range()->lower(); 1141 bool spans_zero = value()->range()->CanBeZero(); 1142 // Math.abs(kMinInt) overflows its representation, on which the 1143 // instruction deopts. Hence clamp it to kMaxInt. 1144 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper); 1145 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower); 1146 Range* result = 1147 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper), 1148 Max(abs_lower, abs_upper)); 1149 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to 1150 // Smi::kMaxValue. 1151 if (r.IsSmi()) result->ClampToSmi(); 1152 return result; 1153 } 1154 } 1155 return HValue::InferRange(zone); 1156 } 1157 1158 1159 void HUnaryMathOperation::PrintDataTo(StringStream* stream) { 1160 const char* name = OpName(); 1161 stream->Add("%s ", name); 1162 value()->PrintNameTo(stream); 1163 } 1164 1165 1166 void HUnaryOperation::PrintDataTo(StringStream* stream) { 1167 value()->PrintNameTo(stream); 1168 } 1169 1170 1171 void HHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) { 1172 value()->PrintNameTo(stream); 1173 switch (from_) { 1174 case FIRST_JS_RECEIVER_TYPE: 1175 if (to_ == LAST_TYPE) stream->Add(" spec_object"); 1176 break; 1177 case JS_REGEXP_TYPE: 1178 if (to_ == JS_REGEXP_TYPE) stream->Add(" reg_exp"); 1179 break; 1180 case JS_ARRAY_TYPE: 1181 if (to_ == JS_ARRAY_TYPE) stream->Add(" array"); 1182 break; 1183 case JS_FUNCTION_TYPE: 1184 if (to_ == JS_FUNCTION_TYPE) stream->Add(" function"); 1185 break; 1186 default: 1187 break; 1188 } 1189 } 1190 1191 1192 void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) { 1193 value()->PrintNameTo(stream); 1194 stream->Add(" == %o", *type_literal_); 1195 HControlInstruction::PrintDataTo(stream); 1196 } 1197 1198 1199 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 1200 if (value()->representation().IsSpecialization()) { 1201 if (compares_number_type()) { 1202 *block = FirstSuccessor(); 1203 } else { 1204 *block = SecondSuccessor(); 1205 } 1206 return true; 1207 } 1208 *block = NULL; 1209 return false; 1210 } 1211 1212 1213 void HCheckMapValue::PrintDataTo(StringStream* stream) { 1214 value()->PrintNameTo(stream); 1215 stream->Add(" "); 1216 map()->PrintNameTo(stream); 1217 } 1218 1219 1220 void HForInPrepareMap::PrintDataTo(StringStream* stream) { 1221 enumerable()->PrintNameTo(stream); 1222 } 1223 1224 1225 void HForInCacheArray::PrintDataTo(StringStream* stream) { 1226 enumerable()->PrintNameTo(stream); 1227 stream->Add(" "); 1228 map()->PrintNameTo(stream); 1229 stream->Add("[%d]", idx_); 1230 } 1231 1232 1233 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) { 1234 object()->PrintNameTo(stream); 1235 stream->Add(" "); 1236 index()->PrintNameTo(stream); 1237 } 1238 1239 1240 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) { 1241 if (!l->EqualsInteger32Constant(~0)) return false; 1242 *negated = r; 1243 return true; 1244 } 1245 1246 1247 static bool MatchNegationViaXor(HValue* instr, HValue** negated) { 1248 if (!instr->IsBitwise()) return false; 1249 HBitwise* b = HBitwise::cast(instr); 1250 return (b->op() == Token::BIT_XOR) && 1251 (MatchLeftIsOnes(b->left(), b->right(), negated) || 1252 MatchLeftIsOnes(b->right(), b->left(), negated)); 1253 } 1254 1255 1256 static bool MatchDoubleNegation(HValue* instr, HValue** arg) { 1257 HValue* negated; 1258 return MatchNegationViaXor(instr, &negated) && 1259 MatchNegationViaXor(negated, arg); 1260 } 1261 1262 1263 HValue* HBitwise::Canonicalize() { 1264 if (!representation().IsSmiOrInteger32()) return this; 1265 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x. 1266 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0; 1267 if (left()->EqualsInteger32Constant(nop_constant) && 1268 !right()->CheckFlag(kUint32)) { 1269 return right(); 1270 } 1271 if (right()->EqualsInteger32Constant(nop_constant) && 1272 !left()->CheckFlag(kUint32)) { 1273 return left(); 1274 } 1275 // Optimize double negation, a common pattern used for ToInt32(x). 1276 HValue* arg; 1277 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) { 1278 return arg; 1279 } 1280 return this; 1281 } 1282 1283 1284 Representation HAdd::RepresentationFromInputs() { 1285 Representation left_rep = left()->representation(); 1286 if (left_rep.IsExternal()) { 1287 return Representation::External(); 1288 } 1289 return HArithmeticBinaryOperation::RepresentationFromInputs(); 1290 } 1291 1292 1293 Representation HAdd::RequiredInputRepresentation(int index) { 1294 if (index == 2) { 1295 Representation left_rep = left()->representation(); 1296 if (left_rep.IsExternal()) { 1297 return Representation::Integer32(); 1298 } 1299 } 1300 return HArithmeticBinaryOperation::RequiredInputRepresentation(index); 1301 } 1302 1303 1304 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) { 1305 return arg1->representation().IsSpecialization() && 1306 arg2->EqualsInteger32Constant(identity); 1307 } 1308 1309 1310 HValue* HAdd::Canonicalize() { 1311 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0 1312 if (IsIdentityOperation(left(), right(), 0) && 1313 !left()->representation().IsDouble()) { // Left could be -0. 1314 return left(); 1315 } 1316 if (IsIdentityOperation(right(), left(), 0) && 1317 !left()->representation().IsDouble()) { // Right could be -0. 1318 return right(); 1319 } 1320 return this; 1321 } 1322 1323 1324 HValue* HSub::Canonicalize() { 1325 if (IsIdentityOperation(left(), right(), 0)) return left(); 1326 return this; 1327 } 1328 1329 1330 HValue* HMul::Canonicalize() { 1331 if (IsIdentityOperation(left(), right(), 1)) return left(); 1332 if (IsIdentityOperation(right(), left(), 1)) return right(); 1333 return this; 1334 } 1335 1336 1337 bool HMul::MulMinusOne() { 1338 if (left()->EqualsInteger32Constant(-1) || 1339 right()->EqualsInteger32Constant(-1)) { 1340 return true; 1341 } 1342 1343 return false; 1344 } 1345 1346 1347 HValue* HMod::Canonicalize() { 1348 return this; 1349 } 1350 1351 1352 HValue* HDiv::Canonicalize() { 1353 if (IsIdentityOperation(left(), right(), 1)) return left(); 1354 return this; 1355 } 1356 1357 1358 HValue* HChange::Canonicalize() { 1359 return (from().Equals(to())) ? value() : this; 1360 } 1361 1362 1363 HValue* HWrapReceiver::Canonicalize() { 1364 if (HasNoUses()) return NULL; 1365 if (receiver()->type().IsJSObject()) { 1366 return receiver(); 1367 } 1368 return this; 1369 } 1370 1371 1372 void HTypeof::PrintDataTo(StringStream* stream) { 1373 value()->PrintNameTo(stream); 1374 } 1375 1376 1377 HInstruction* HForceRepresentation::New(Zone* zone, HValue* context, 1378 HValue* value, Representation required_representation) { 1379 if (FLAG_fold_constants && value->IsConstant()) { 1380 HConstant* c = HConstant::cast(value); 1381 if (c->HasNumberValue()) { 1382 double double_res = c->DoubleValue(); 1383 if (TypeInfo::IsInt32Double(double_res)) { 1384 return HConstant::New(zone, context, 1385 static_cast<int32_t>(double_res), 1386 required_representation); 1387 } 1388 } 1389 } 1390 return new(zone) HForceRepresentation(value, required_representation); 1391 } 1392 1393 1394 void HForceRepresentation::PrintDataTo(StringStream* stream) { 1395 stream->Add("%s ", representation().Mnemonic()); 1396 value()->PrintNameTo(stream); 1397 } 1398 1399 1400 void HChange::PrintDataTo(StringStream* stream) { 1401 HUnaryOperation::PrintDataTo(stream); 1402 stream->Add(" %s to %s", from().Mnemonic(), to().Mnemonic()); 1403 1404 if (CanTruncateToInt32()) stream->Add(" truncating-int32"); 1405 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?"); 1406 if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(" allow-undefined-as-nan"); 1407 } 1408 1409 1410 static HValue* SimplifiedDividendForMathFloorOfDiv(HValue* dividend) { 1411 // A value with an integer representation does not need to be transformed. 1412 if (dividend->representation().IsInteger32()) { 1413 return dividend; 1414 } 1415 // A change from an integer32 can be replaced by the integer32 value. 1416 if (dividend->IsChange() && 1417 HChange::cast(dividend)->from().IsInteger32()) { 1418 return HChange::cast(dividend)->value(); 1419 } 1420 return NULL; 1421 } 1422 1423 1424 HValue* HUnaryMathOperation::Canonicalize() { 1425 if (op() == kMathRound || op() == kMathFloor) { 1426 HValue* val = value(); 1427 if (val->IsChange()) val = HChange::cast(val)->value(); 1428 1429 // If the input is smi or integer32 then we replace the instruction with its 1430 // input. 1431 if (val->representation().IsSmiOrInteger32()) { 1432 if (!val->representation().Equals(representation())) { 1433 HChange* result = new(block()->zone()) HChange( 1434 val, representation(), false, false); 1435 result->InsertBefore(this); 1436 return result; 1437 } 1438 return val; 1439 } 1440 } 1441 1442 if (op() == kMathFloor) { 1443 HValue* val = value(); 1444 if (val->IsDiv() && (val->UseCount() == 1)) { 1445 HDiv* hdiv = HDiv::cast(val); 1446 HValue* left = hdiv->left(); 1447 HValue* right = hdiv->right(); 1448 // Try to simplify left and right values of the division. 1449 HValue* new_left = SimplifiedDividendForMathFloorOfDiv(left); 1450 if (new_left == NULL && 1451 hdiv->observed_input_representation(1).IsSmiOrInteger32()) { 1452 new_left = new(block()->zone()) HChange( 1453 left, Representation::Integer32(), false, false); 1454 HChange::cast(new_left)->InsertBefore(this); 1455 } 1456 HValue* new_right = 1457 LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(right); 1458 if (new_right == NULL && 1459 #if V8_TARGET_ARCH_ARM 1460 CpuFeatures::IsSupported(SUDIV) && 1461 #endif 1462 hdiv->observed_input_representation(2).IsSmiOrInteger32()) { 1463 new_right = new(block()->zone()) HChange( 1464 right, Representation::Integer32(), false, false); 1465 HChange::cast(new_right)->InsertBefore(this); 1466 } 1467 1468 // Return if left or right are not optimizable. 1469 if ((new_left == NULL) || (new_right == NULL)) return this; 1470 1471 // Insert the new values in the graph. 1472 if (new_left->IsInstruction() && 1473 !HInstruction::cast(new_left)->IsLinked()) { 1474 HInstruction::cast(new_left)->InsertBefore(this); 1475 } 1476 if (new_right->IsInstruction() && 1477 !HInstruction::cast(new_right)->IsLinked()) { 1478 HInstruction::cast(new_right)->InsertBefore(this); 1479 } 1480 HMathFloorOfDiv* instr = 1481 HMathFloorOfDiv::New(block()->zone(), context(), new_left, new_right); 1482 instr->InsertBefore(this); 1483 return instr; 1484 } 1485 } 1486 return this; 1487 } 1488 1489 1490 HValue* HCheckInstanceType::Canonicalize() { 1491 if (check_ == IS_STRING && value()->type().IsString()) { 1492 return value(); 1493 } 1494 1495 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) { 1496 if (HConstant::cast(value())->HasInternalizedStringValue()) { 1497 return value(); 1498 } 1499 } 1500 return this; 1501 } 1502 1503 1504 void HCheckInstanceType::GetCheckInterval(InstanceType* first, 1505 InstanceType* last) { 1506 ASSERT(is_interval_check()); 1507 switch (check_) { 1508 case IS_SPEC_OBJECT: 1509 *first = FIRST_SPEC_OBJECT_TYPE; 1510 *last = LAST_SPEC_OBJECT_TYPE; 1511 return; 1512 case IS_JS_ARRAY: 1513 *first = *last = JS_ARRAY_TYPE; 1514 return; 1515 default: 1516 UNREACHABLE(); 1517 } 1518 } 1519 1520 1521 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) { 1522 ASSERT(!is_interval_check()); 1523 switch (check_) { 1524 case IS_STRING: 1525 *mask = kIsNotStringMask; 1526 *tag = kStringTag; 1527 return; 1528 case IS_INTERNALIZED_STRING: 1529 *mask = kIsNotInternalizedMask; 1530 *tag = kInternalizedTag; 1531 return; 1532 default: 1533 UNREACHABLE(); 1534 } 1535 } 1536 1537 1538 void HCheckMaps::HandleSideEffectDominator(GVNFlag side_effect, 1539 HValue* dominator) { 1540 ASSERT(side_effect == kChangesMaps); 1541 // TODO(mstarzinger): For now we specialize on HStoreNamedField, but once 1542 // type information is rich enough we should generalize this to any HType 1543 // for which the map is known. 1544 if (HasNoUses() && dominator->IsStoreNamedField()) { 1545 HStoreNamedField* store = HStoreNamedField::cast(dominator); 1546 if (!store->has_transition() || store->object() != value()) return; 1547 HConstant* transition = HConstant::cast(store->transition()); 1548 if (map_set_.Contains(transition->GetUnique())) { 1549 DeleteAndReplaceWith(NULL); 1550 return; 1551 } 1552 } 1553 } 1554 1555 1556 void HCheckMaps::PrintDataTo(StringStream* stream) { 1557 value()->PrintNameTo(stream); 1558 stream->Add(" [%p", *map_set_.at(0).handle()); 1559 for (int i = 1; i < map_set_.size(); ++i) { 1560 stream->Add(",%p", *map_set_.at(i).handle()); 1561 } 1562 stream->Add("]%s", CanOmitMapChecks() ? "(omitted)" : ""); 1563 } 1564 1565 1566 void HCheckValue::PrintDataTo(StringStream* stream) { 1567 value()->PrintNameTo(stream); 1568 stream->Add(" "); 1569 object().handle()->ShortPrint(stream); 1570 } 1571 1572 1573 HValue* HCheckValue::Canonicalize() { 1574 return (value()->IsConstant() && 1575 HConstant::cast(value())->GetUnique() == object_) 1576 ? NULL 1577 : this; 1578 } 1579 1580 1581 const char* HCheckInstanceType::GetCheckName() { 1582 switch (check_) { 1583 case IS_SPEC_OBJECT: return "object"; 1584 case IS_JS_ARRAY: return "array"; 1585 case IS_STRING: return "string"; 1586 case IS_INTERNALIZED_STRING: return "internalized_string"; 1587 } 1588 UNREACHABLE(); 1589 return ""; 1590 } 1591 1592 1593 void HCheckInstanceType::PrintDataTo(StringStream* stream) { 1594 stream->Add("%s ", GetCheckName()); 1595 HUnaryOperation::PrintDataTo(stream); 1596 } 1597 1598 1599 void HCallStub::PrintDataTo(StringStream* stream) { 1600 stream->Add("%s ", 1601 CodeStub::MajorName(major_key_, false)); 1602 HUnaryCall::PrintDataTo(stream); 1603 } 1604 1605 1606 void HUnknownOSRValue::PrintDataTo(StringStream *stream) { 1607 const char* type = "expression"; 1608 if (environment_->is_local_index(index_)) type = "local"; 1609 if (environment_->is_special_index(index_)) type = "special"; 1610 if (environment_->is_parameter_index(index_)) type = "parameter"; 1611 stream->Add("%s @ %d", type, index_); 1612 } 1613 1614 1615 void HInstanceOf::PrintDataTo(StringStream* stream) { 1616 left()->PrintNameTo(stream); 1617 stream->Add(" "); 1618 right()->PrintNameTo(stream); 1619 stream->Add(" "); 1620 context()->PrintNameTo(stream); 1621 } 1622 1623 1624 Range* HValue::InferRange(Zone* zone) { 1625 Range* result; 1626 if (representation().IsSmi() || type().IsSmi()) { 1627 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue); 1628 result->set_can_be_minus_zero(false); 1629 } else { 1630 result = new(zone) Range(); 1631 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32)); 1632 // TODO(jkummerow): The range cannot be minus zero when the upper type 1633 // bound is Integer32. 1634 } 1635 return result; 1636 } 1637 1638 1639 Range* HChange::InferRange(Zone* zone) { 1640 Range* input_range = value()->range(); 1641 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) && 1642 (to().IsSmi() || 1643 (to().IsTagged() && 1644 input_range != NULL && 1645 input_range->IsInSmiRange()))) { 1646 set_type(HType::Smi()); 1647 ClearGVNFlag(kChangesNewSpacePromotion); 1648 } 1649 Range* result = (input_range != NULL) 1650 ? input_range->Copy(zone) 1651 : HValue::InferRange(zone); 1652 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() || 1653 !(CheckFlag(kAllUsesTruncatingToInt32) || 1654 CheckFlag(kAllUsesTruncatingToSmi))); 1655 if (to().IsSmi()) result->ClampToSmi(); 1656 return result; 1657 } 1658 1659 1660 Range* HConstant::InferRange(Zone* zone) { 1661 if (has_int32_value_) { 1662 Range* result = new(zone) Range(int32_value_, int32_value_); 1663 result->set_can_be_minus_zero(false); 1664 return result; 1665 } 1666 return HValue::InferRange(zone); 1667 } 1668 1669 1670 int HPhi::position() const { 1671 return block()->first()->position(); 1672 } 1673 1674 1675 Range* HPhi::InferRange(Zone* zone) { 1676 Representation r = representation(); 1677 if (r.IsSmiOrInteger32()) { 1678 if (block()->IsLoopHeader()) { 1679 Range* range = r.IsSmi() 1680 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue) 1681 : new(zone) Range(kMinInt, kMaxInt); 1682 return range; 1683 } else { 1684 Range* range = OperandAt(0)->range()->Copy(zone); 1685 for (int i = 1; i < OperandCount(); ++i) { 1686 range->Union(OperandAt(i)->range()); 1687 } 1688 return range; 1689 } 1690 } else { 1691 return HValue::InferRange(zone); 1692 } 1693 } 1694 1695 1696 Range* HAdd::InferRange(Zone* zone) { 1697 Representation r = representation(); 1698 if (r.IsSmiOrInteger32()) { 1699 Range* a = left()->range(); 1700 Range* b = right()->range(); 1701 Range* res = a->Copy(zone); 1702 if (!res->AddAndCheckOverflow(r, b) || 1703 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || 1704 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) { 1705 ClearFlag(kCanOverflow); 1706 } 1707 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && 1708 !CheckFlag(kAllUsesTruncatingToInt32) && 1709 a->CanBeMinusZero() && b->CanBeMinusZero()); 1710 return res; 1711 } else { 1712 return HValue::InferRange(zone); 1713 } 1714 } 1715 1716 1717 Range* HSub::InferRange(Zone* zone) { 1718 Representation r = representation(); 1719 if (r.IsSmiOrInteger32()) { 1720 Range* a = left()->range(); 1721 Range* b = right()->range(); 1722 Range* res = a->Copy(zone); 1723 if (!res->SubAndCheckOverflow(r, b) || 1724 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || 1725 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) { 1726 ClearFlag(kCanOverflow); 1727 } 1728 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && 1729 !CheckFlag(kAllUsesTruncatingToInt32) && 1730 a->CanBeMinusZero() && b->CanBeZero()); 1731 return res; 1732 } else { 1733 return HValue::InferRange(zone); 1734 } 1735 } 1736 1737 1738 Range* HMul::InferRange(Zone* zone) { 1739 Representation r = representation(); 1740 if (r.IsSmiOrInteger32()) { 1741 Range* a = left()->range(); 1742 Range* b = right()->range(); 1743 Range* res = a->Copy(zone); 1744 if (!res->MulAndCheckOverflow(r, b) || 1745 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || 1746 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) && 1747 MulMinusOne())) { 1748 // Truncated int multiplication is too precise and therefore not the 1749 // same as converting to Double and back. 1750 // Handle truncated integer multiplication by -1 special. 1751 ClearFlag(kCanOverflow); 1752 } 1753 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && 1754 !CheckFlag(kAllUsesTruncatingToInt32) && 1755 ((a->CanBeZero() && b->CanBeNegative()) || 1756 (a->CanBeNegative() && b->CanBeZero()))); 1757 return res; 1758 } else { 1759 return HValue::InferRange(zone); 1760 } 1761 } 1762 1763 1764 Range* HDiv::InferRange(Zone* zone) { 1765 if (representation().IsInteger32()) { 1766 Range* a = left()->range(); 1767 Range* b = right()->range(); 1768 Range* result = new(zone) Range(); 1769 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && 1770 (a->CanBeMinusZero() || 1771 (a->CanBeZero() && b->CanBeNegative()))); 1772 if (!a->Includes(kMinInt) || 1773 !b->Includes(-1) || 1774 CheckFlag(kAllUsesTruncatingToInt32)) { 1775 // It is safe to clear kCanOverflow when kAllUsesTruncatingToInt32. 1776 ClearFlag(HValue::kCanOverflow); 1777 } 1778 1779 if (!b->CanBeZero()) { 1780 ClearFlag(HValue::kCanBeDivByZero); 1781 } 1782 return result; 1783 } else { 1784 return HValue::InferRange(zone); 1785 } 1786 } 1787 1788 1789 Range* HMod::InferRange(Zone* zone) { 1790 if (representation().IsInteger32()) { 1791 Range* a = left()->range(); 1792 Range* b = right()->range(); 1793 1794 // The magnitude of the modulus is bounded by the right operand. Note that 1795 // apart for the cases involving kMinInt, the calculation below is the same 1796 // as Max(Abs(b->lower()), Abs(b->upper())) - 1. 1797 int32_t positive_bound = -(Min(NegAbs(b->lower()), NegAbs(b->upper())) + 1); 1798 1799 // The result of the modulo operation has the sign of its left operand. 1800 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative(); 1801 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0, 1802 a->CanBePositive() ? positive_bound : 0); 1803 1804 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && 1805 left_can_be_negative); 1806 1807 if (!a->Includes(kMinInt) || !b->Includes(-1)) { 1808 ClearFlag(HValue::kCanOverflow); 1809 } 1810 1811 if (!b->CanBeZero()) { 1812 ClearFlag(HValue::kCanBeDivByZero); 1813 } 1814 return result; 1815 } else { 1816 return HValue::InferRange(zone); 1817 } 1818 } 1819 1820 1821 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) { 1822 if (phi->block()->loop_information() == NULL) return NULL; 1823 if (phi->OperandCount() != 2) return NULL; 1824 int32_t candidate_increment; 1825 1826 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0)); 1827 if (candidate_increment != 0) { 1828 return new(phi->block()->graph()->zone()) 1829 InductionVariableData(phi, phi->OperandAt(1), candidate_increment); 1830 } 1831 1832 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1)); 1833 if (candidate_increment != 0) { 1834 return new(phi->block()->graph()->zone()) 1835 InductionVariableData(phi, phi->OperandAt(0), candidate_increment); 1836 } 1837 1838 return NULL; 1839 } 1840 1841 1842 /* 1843 * This function tries to match the following patterns (and all the relevant 1844 * variants related to |, & and + being commutative): 1845 * base | constant_or_mask 1846 * base & constant_and_mask 1847 * (base + constant_offset) & constant_and_mask 1848 * (base - constant_offset) & constant_and_mask 1849 */ 1850 void InductionVariableData::DecomposeBitwise( 1851 HValue* value, 1852 BitwiseDecompositionResult* result) { 1853 HValue* base = IgnoreOsrValue(value); 1854 result->base = value; 1855 1856 if (!base->representation().IsInteger32()) return; 1857 1858 if (base->IsBitwise()) { 1859 bool allow_offset = false; 1860 int32_t mask = 0; 1861 1862 HBitwise* bitwise = HBitwise::cast(base); 1863 if (bitwise->right()->IsInteger32Constant()) { 1864 mask = bitwise->right()->GetInteger32Constant(); 1865 base = bitwise->left(); 1866 } else if (bitwise->left()->IsInteger32Constant()) { 1867 mask = bitwise->left()->GetInteger32Constant(); 1868 base = bitwise->right(); 1869 } else { 1870 return; 1871 } 1872 if (bitwise->op() == Token::BIT_AND) { 1873 result->and_mask = mask; 1874 allow_offset = true; 1875 } else if (bitwise->op() == Token::BIT_OR) { 1876 result->or_mask = mask; 1877 } else { 1878 return; 1879 } 1880 1881 result->context = bitwise->context(); 1882 1883 if (allow_offset) { 1884 if (base->IsAdd()) { 1885 HAdd* add = HAdd::cast(base); 1886 if (add->right()->IsInteger32Constant()) { 1887 base = add->left(); 1888 } else if (add->left()->IsInteger32Constant()) { 1889 base = add->right(); 1890 } 1891 } else if (base->IsSub()) { 1892 HSub* sub = HSub::cast(base); 1893 if (sub->right()->IsInteger32Constant()) { 1894 base = sub->left(); 1895 } 1896 } 1897 } 1898 1899 result->base = base; 1900 } 1901 } 1902 1903 1904 void InductionVariableData::AddCheck(HBoundsCheck* check, 1905 int32_t upper_limit) { 1906 ASSERT(limit_validity() != NULL); 1907 if (limit_validity() != check->block() && 1908 !limit_validity()->Dominates(check->block())) return; 1909 if (!phi()->block()->current_loop()->IsNestedInThisLoop( 1910 check->block()->current_loop())) return; 1911 1912 ChecksRelatedToLength* length_checks = checks(); 1913 while (length_checks != NULL) { 1914 if (length_checks->length() == check->length()) break; 1915 length_checks = length_checks->next(); 1916 } 1917 if (length_checks == NULL) { 1918 length_checks = new(check->block()->zone()) 1919 ChecksRelatedToLength(check->length(), checks()); 1920 checks_ = length_checks; 1921 } 1922 1923 length_checks->AddCheck(check, upper_limit); 1924 } 1925 1926 1927 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() { 1928 if (checks() != NULL) { 1929 InductionVariableCheck* c = checks(); 1930 HBasicBlock* current_block = c->check()->block(); 1931 while (c != NULL && c->check()->block() == current_block) { 1932 c->set_upper_limit(current_upper_limit_); 1933 c = c->next(); 1934 } 1935 } 1936 } 1937 1938 1939 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock( 1940 Token::Value token, 1941 int32_t mask, 1942 HValue* index_base, 1943 HValue* context) { 1944 ASSERT(first_check_in_block() != NULL); 1945 HValue* previous_index = first_check_in_block()->index(); 1946 ASSERT(context != NULL); 1947 1948 Zone* zone = index_base->block()->graph()->zone(); 1949 set_added_constant(HConstant::New(zone, context, mask)); 1950 if (added_index() != NULL) { 1951 added_constant()->InsertBefore(added_index()); 1952 } else { 1953 added_constant()->InsertBefore(first_check_in_block()); 1954 } 1955 1956 if (added_index() == NULL) { 1957 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index()); 1958 HInstruction* new_index = HBitwise::New(zone, context, token, index_base, 1959 added_constant()); 1960 ASSERT(new_index->IsBitwise()); 1961 new_index->ClearAllSideEffects(); 1962 new_index->AssumeRepresentation(Representation::Integer32()); 1963 set_added_index(HBitwise::cast(new_index)); 1964 added_index()->InsertBefore(first_check_in_block()); 1965 } 1966 ASSERT(added_index()->op() == token); 1967 1968 added_index()->SetOperandAt(1, index_base); 1969 added_index()->SetOperandAt(2, added_constant()); 1970 first_check_in_block()->SetOperandAt(0, added_index()); 1971 if (previous_index->UseCount() == 0) { 1972 previous_index->DeleteAndReplaceWith(NULL); 1973 } 1974 } 1975 1976 void InductionVariableData::ChecksRelatedToLength::AddCheck( 1977 HBoundsCheck* check, 1978 int32_t upper_limit) { 1979 BitwiseDecompositionResult decomposition; 1980 InductionVariableData::DecomposeBitwise(check->index(), &decomposition); 1981 1982 if (first_check_in_block() == NULL || 1983 first_check_in_block()->block() != check->block()) { 1984 CloseCurrentBlock(); 1985 1986 first_check_in_block_ = check; 1987 set_added_index(NULL); 1988 set_added_constant(NULL); 1989 current_and_mask_in_block_ = decomposition.and_mask; 1990 current_or_mask_in_block_ = decomposition.or_mask; 1991 current_upper_limit_ = upper_limit; 1992 1993 InductionVariableCheck* new_check = new(check->block()->graph()->zone()) 1994 InductionVariableCheck(check, checks_, upper_limit); 1995 checks_ = new_check; 1996 return; 1997 } 1998 1999 if (upper_limit > current_upper_limit()) { 2000 current_upper_limit_ = upper_limit; 2001 } 2002 2003 if (decomposition.and_mask != 0 && 2004 current_or_mask_in_block() == 0) { 2005 if (current_and_mask_in_block() == 0 || 2006 decomposition.and_mask > current_and_mask_in_block()) { 2007 UseNewIndexInCurrentBlock(Token::BIT_AND, 2008 decomposition.and_mask, 2009 decomposition.base, 2010 decomposition.context); 2011 current_and_mask_in_block_ = decomposition.and_mask; 2012 } 2013 check->set_skip_check(); 2014 } 2015 if (current_and_mask_in_block() == 0) { 2016 if (decomposition.or_mask > current_or_mask_in_block()) { 2017 UseNewIndexInCurrentBlock(Token::BIT_OR, 2018 decomposition.or_mask, 2019 decomposition.base, 2020 decomposition.context); 2021 current_or_mask_in_block_ = decomposition.or_mask; 2022 } 2023 check->set_skip_check(); 2024 } 2025 2026 if (!check->skip_check()) { 2027 InductionVariableCheck* new_check = new(check->block()->graph()->zone()) 2028 InductionVariableCheck(check, checks_, upper_limit); 2029 checks_ = new_check; 2030 } 2031 } 2032 2033 2034 /* 2035 * This method detects if phi is an induction variable, with phi_operand as 2036 * its "incremented" value (the other operand would be the "base" value). 2037 * 2038 * It cheks is phi_operand has the form "phi + constant". 2039 * If yes, the constant is the increment that the induction variable gets at 2040 * every loop iteration. 2041 * Otherwise it returns 0. 2042 */ 2043 int32_t InductionVariableData::ComputeIncrement(HPhi* phi, 2044 HValue* phi_operand) { 2045 if (!phi_operand->representation().IsInteger32()) return 0; 2046 2047 if (phi_operand->IsAdd()) { 2048 HAdd* operation = HAdd::cast(phi_operand); 2049 if (operation->left() == phi && 2050 operation->right()->IsInteger32Constant()) { 2051 return operation->right()->GetInteger32Constant(); 2052 } else if (operation->right() == phi && 2053 operation->left()->IsInteger32Constant()) { 2054 return operation->left()->GetInteger32Constant(); 2055 } 2056 } else if (phi_operand->IsSub()) { 2057 HSub* operation = HSub::cast(phi_operand); 2058 if (operation->left() == phi && 2059 operation->right()->IsInteger32Constant()) { 2060 return -operation->right()->GetInteger32Constant(); 2061 } 2062 } 2063 2064 return 0; 2065 } 2066 2067 2068 /* 2069 * Swaps the information in "update" with the one contained in "this". 2070 * The swapping is important because this method is used while doing a 2071 * dominator tree traversal, and "update" will retain the old data that 2072 * will be restored while backtracking. 2073 */ 2074 void InductionVariableData::UpdateAdditionalLimit( 2075 InductionVariableLimitUpdate* update) { 2076 ASSERT(update->updated_variable == this); 2077 if (update->limit_is_upper) { 2078 swap(&additional_upper_limit_, &update->limit); 2079 swap(&additional_upper_limit_is_included_, &update->limit_is_included); 2080 } else { 2081 swap(&additional_lower_limit_, &update->limit); 2082 swap(&additional_lower_limit_is_included_, &update->limit_is_included); 2083 } 2084 } 2085 2086 2087 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask, 2088 int32_t or_mask) { 2089 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway. 2090 const int32_t MAX_LIMIT = 1 << 30; 2091 2092 int32_t result = MAX_LIMIT; 2093 2094 if (limit() != NULL && 2095 limit()->IsInteger32Constant()) { 2096 int32_t limit_value = limit()->GetInteger32Constant(); 2097 if (!limit_included()) { 2098 limit_value--; 2099 } 2100 if (limit_value < result) result = limit_value; 2101 } 2102 2103 if (additional_upper_limit() != NULL && 2104 additional_upper_limit()->IsInteger32Constant()) { 2105 int32_t limit_value = additional_upper_limit()->GetInteger32Constant(); 2106 if (!additional_upper_limit_is_included()) { 2107 limit_value--; 2108 } 2109 if (limit_value < result) result = limit_value; 2110 } 2111 2112 if (and_mask > 0 && and_mask < MAX_LIMIT) { 2113 if (and_mask < result) result = and_mask; 2114 return result; 2115 } 2116 2117 // Add the effect of the or_mask. 2118 result |= or_mask; 2119 2120 return result >= MAX_LIMIT ? kNoLimit : result; 2121 } 2122 2123 2124 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) { 2125 if (!v->IsPhi()) return v; 2126 HPhi* phi = HPhi::cast(v); 2127 if (phi->OperandCount() != 2) return v; 2128 if (phi->OperandAt(0)->block()->is_osr_entry()) { 2129 return phi->OperandAt(1); 2130 } else if (phi->OperandAt(1)->block()->is_osr_entry()) { 2131 return phi->OperandAt(0); 2132 } else { 2133 return v; 2134 } 2135 } 2136 2137 2138 InductionVariableData* InductionVariableData::GetInductionVariableData( 2139 HValue* v) { 2140 v = IgnoreOsrValue(v); 2141 if (v->IsPhi()) { 2142 return HPhi::cast(v)->induction_variable_data(); 2143 } 2144 return NULL; 2145 } 2146 2147 2148 /* 2149 * Check if a conditional branch to "current_branch" with token "token" is 2150 * the branch that keeps the induction loop running (and, conversely, will 2151 * terminate it if the "other_branch" is taken). 2152 * 2153 * Three conditions must be met: 2154 * - "current_branch" must be in the induction loop. 2155 * - "other_branch" must be out of the induction loop. 2156 * - "token" and the induction increment must be "compatible": the token should 2157 * be a condition that keeps the execution inside the loop until the limit is 2158 * reached. 2159 */ 2160 bool InductionVariableData::CheckIfBranchIsLoopGuard( 2161 Token::Value token, 2162 HBasicBlock* current_branch, 2163 HBasicBlock* other_branch) { 2164 if (!phi()->block()->current_loop()->IsNestedInThisLoop( 2165 current_branch->current_loop())) { 2166 return false; 2167 } 2168 2169 if (phi()->block()->current_loop()->IsNestedInThisLoop( 2170 other_branch->current_loop())) { 2171 return false; 2172 } 2173 2174 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) { 2175 return true; 2176 } 2177 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) { 2178 return true; 2179 } 2180 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) { 2181 return true; 2182 } 2183 2184 return false; 2185 } 2186 2187 2188 void InductionVariableData::ComputeLimitFromPredecessorBlock( 2189 HBasicBlock* block, 2190 LimitFromPredecessorBlock* result) { 2191 if (block->predecessors()->length() != 1) return; 2192 HBasicBlock* predecessor = block->predecessors()->at(0); 2193 HInstruction* end = predecessor->last(); 2194 2195 if (!end->IsCompareNumericAndBranch()) return; 2196 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end); 2197 2198 Token::Value token = branch->token(); 2199 if (!Token::IsArithmeticCompareOp(token)) return; 2200 2201 HBasicBlock* other_target; 2202 if (block == branch->SuccessorAt(0)) { 2203 other_target = branch->SuccessorAt(1); 2204 } else { 2205 other_target = branch->SuccessorAt(0); 2206 token = Token::NegateCompareOp(token); 2207 ASSERT(block == branch->SuccessorAt(1)); 2208 } 2209 2210 InductionVariableData* data; 2211 2212 data = GetInductionVariableData(branch->left()); 2213 HValue* limit = branch->right(); 2214 if (data == NULL) { 2215 data = GetInductionVariableData(branch->right()); 2216 token = Token::ReverseCompareOp(token); 2217 limit = branch->left(); 2218 } 2219 2220 if (data != NULL) { 2221 result->variable = data; 2222 result->token = token; 2223 result->limit = limit; 2224 result->other_target = other_target; 2225 } 2226 } 2227 2228 2229 /* 2230 * Compute the limit that is imposed on an induction variable when entering 2231 * "block" (if any). 2232 * If the limit is the "proper" induction limit (the one that makes the loop 2233 * terminate when the induction variable reaches it) it is stored directly in 2234 * the induction variable data. 2235 * Otherwise the limit is written in "additional_limit" and the method 2236 * returns true. 2237 */ 2238 bool InductionVariableData::ComputeInductionVariableLimit( 2239 HBasicBlock* block, 2240 InductionVariableLimitUpdate* additional_limit) { 2241 LimitFromPredecessorBlock limit; 2242 ComputeLimitFromPredecessorBlock(block, &limit); 2243 if (!limit.LimitIsValid()) return false; 2244 2245 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token, 2246 block, 2247 limit.other_target)) { 2248 limit.variable->limit_ = limit.limit; 2249 limit.variable->limit_included_ = limit.LimitIsIncluded(); 2250 limit.variable->limit_validity_ = block; 2251 limit.variable->induction_exit_block_ = block->predecessors()->at(0); 2252 limit.variable->induction_exit_target_ = limit.other_target; 2253 return false; 2254 } else { 2255 additional_limit->updated_variable = limit.variable; 2256 additional_limit->limit = limit.limit; 2257 additional_limit->limit_is_upper = limit.LimitIsUpper(); 2258 additional_limit->limit_is_included = limit.LimitIsIncluded(); 2259 return true; 2260 } 2261 } 2262 2263 2264 Range* HMathMinMax::InferRange(Zone* zone) { 2265 if (representation().IsSmiOrInteger32()) { 2266 Range* a = left()->range(); 2267 Range* b = right()->range(); 2268 Range* res = a->Copy(zone); 2269 if (operation_ == kMathMax) { 2270 res->CombinedMax(b); 2271 } else { 2272 ASSERT(operation_ == kMathMin); 2273 res->CombinedMin(b); 2274 } 2275 return res; 2276 } else { 2277 return HValue::InferRange(zone); 2278 } 2279 } 2280 2281 2282 void HPhi::PrintTo(StringStream* stream) { 2283 stream->Add("["); 2284 for (int i = 0; i < OperandCount(); ++i) { 2285 HValue* value = OperandAt(i); 2286 stream->Add(" "); 2287 value->PrintNameTo(stream); 2288 stream->Add(" "); 2289 } 2290 stream->Add(" uses:%d_%ds_%di_%dd_%dt", 2291 UseCount(), 2292 smi_non_phi_uses() + smi_indirect_uses(), 2293 int32_non_phi_uses() + int32_indirect_uses(), 2294 double_non_phi_uses() + double_indirect_uses(), 2295 tagged_non_phi_uses() + tagged_indirect_uses()); 2296 PrintRangeTo(stream); 2297 PrintTypeTo(stream); 2298 stream->Add("]"); 2299 } 2300 2301 2302 void HPhi::AddInput(HValue* value) { 2303 inputs_.Add(NULL, value->block()->zone()); 2304 SetOperandAt(OperandCount() - 1, value); 2305 // Mark phis that may have 'arguments' directly or indirectly as an operand. 2306 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) { 2307 SetFlag(kIsArguments); 2308 } 2309 } 2310 2311 2312 bool HPhi::HasRealUses() { 2313 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 2314 if (!it.value()->IsPhi()) return true; 2315 } 2316 return false; 2317 } 2318 2319 2320 HValue* HPhi::GetRedundantReplacement() { 2321 HValue* candidate = NULL; 2322 int count = OperandCount(); 2323 int position = 0; 2324 while (position < count && candidate == NULL) { 2325 HValue* current = OperandAt(position++); 2326 if (current != this) candidate = current; 2327 } 2328 while (position < count) { 2329 HValue* current = OperandAt(position++); 2330 if (current != this && current != candidate) return NULL; 2331 } 2332 ASSERT(candidate != this); 2333 return candidate; 2334 } 2335 2336 2337 void HPhi::DeleteFromGraph() { 2338 ASSERT(block() != NULL); 2339 block()->RemovePhi(this); 2340 ASSERT(block() == NULL); 2341 } 2342 2343 2344 void HPhi::InitRealUses(int phi_id) { 2345 // Initialize real uses. 2346 phi_id_ = phi_id; 2347 // Compute a conservative approximation of truncating uses before inferring 2348 // representations. The proper, exact computation will be done later, when 2349 // inserting representation changes. 2350 SetFlag(kTruncatingToSmi); 2351 SetFlag(kTruncatingToInt32); 2352 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 2353 HValue* value = it.value(); 2354 if (!value->IsPhi()) { 2355 Representation rep = value->observed_input_representation(it.index()); 2356 non_phi_uses_[rep.kind()] += value->LoopWeight(); 2357 if (FLAG_trace_representation) { 2358 PrintF("#%d Phi is used by real #%d %s as %s\n", 2359 id(), value->id(), value->Mnemonic(), rep.Mnemonic()); 2360 } 2361 if (!value->IsSimulate()) { 2362 if (!value->CheckFlag(kTruncatingToSmi)) { 2363 ClearFlag(kTruncatingToSmi); 2364 } 2365 if (!value->CheckFlag(kTruncatingToInt32)) { 2366 ClearFlag(kTruncatingToInt32); 2367 } 2368 } 2369 } 2370 } 2371 } 2372 2373 2374 void HPhi::AddNonPhiUsesFrom(HPhi* other) { 2375 if (FLAG_trace_representation) { 2376 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n", 2377 id(), other->id(), 2378 other->non_phi_uses_[Representation::kSmi], 2379 other->non_phi_uses_[Representation::kInteger32], 2380 other->non_phi_uses_[Representation::kDouble], 2381 other->non_phi_uses_[Representation::kTagged]); 2382 } 2383 2384 for (int i = 0; i < Representation::kNumRepresentations; i++) { 2385 indirect_uses_[i] += other->non_phi_uses_[i]; 2386 } 2387 } 2388 2389 2390 void HPhi::AddIndirectUsesTo(int* dest) { 2391 for (int i = 0; i < Representation::kNumRepresentations; i++) { 2392 dest[i] += indirect_uses_[i]; 2393 } 2394 } 2395 2396 2397 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) { 2398 while (!list->is_empty()) { 2399 HSimulate* from = list->RemoveLast(); 2400 ZoneList<HValue*>* from_values = &from->values_; 2401 for (int i = 0; i < from_values->length(); ++i) { 2402 if (from->HasAssignedIndexAt(i)) { 2403 int index = from->GetAssignedIndexAt(i); 2404 if (HasValueForIndex(index)) continue; 2405 AddAssignedValue(index, from_values->at(i)); 2406 } else { 2407 if (pop_count_ > 0) { 2408 pop_count_--; 2409 } else { 2410 AddPushedValue(from_values->at(i)); 2411 } 2412 } 2413 } 2414 pop_count_ += from->pop_count_; 2415 from->DeleteAndReplaceWith(NULL); 2416 } 2417 } 2418 2419 2420 void HSimulate::PrintDataTo(StringStream* stream) { 2421 stream->Add("id=%d", ast_id().ToInt()); 2422 if (pop_count_ > 0) stream->Add(" pop %d", pop_count_); 2423 if (values_.length() > 0) { 2424 if (pop_count_ > 0) stream->Add(" /"); 2425 for (int i = values_.length() - 1; i >= 0; --i) { 2426 if (HasAssignedIndexAt(i)) { 2427 stream->Add(" var[%d] = ", GetAssignedIndexAt(i)); 2428 } else { 2429 stream->Add(" push "); 2430 } 2431 values_[i]->PrintNameTo(stream); 2432 if (i > 0) stream->Add(","); 2433 } 2434 } 2435 } 2436 2437 2438 void HSimulate::ReplayEnvironment(HEnvironment* env) { 2439 ASSERT(env != NULL); 2440 env->set_ast_id(ast_id()); 2441 env->Drop(pop_count()); 2442 for (int i = values()->length() - 1; i >= 0; --i) { 2443 HValue* value = values()->at(i); 2444 if (HasAssignedIndexAt(i)) { 2445 env->Bind(GetAssignedIndexAt(i), value); 2446 } else { 2447 env->Push(value); 2448 } 2449 } 2450 } 2451 2452 2453 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values, 2454 HCapturedObject* other) { 2455 for (int i = 0; i < values->length(); ++i) { 2456 HValue* value = values->at(i); 2457 if (value->IsCapturedObject()) { 2458 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) { 2459 values->at(i) = other; 2460 } else { 2461 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other); 2462 } 2463 } 2464 } 2465 } 2466 2467 2468 // Replay captured objects by replacing all captured objects with the 2469 // same capture id in the current and all outer environments. 2470 void HCapturedObject::ReplayEnvironment(HEnvironment* env) { 2471 ASSERT(env != NULL); 2472 while (env != NULL) { 2473 ReplayEnvironmentNested(env->values(), this); 2474 env = env->outer(); 2475 } 2476 } 2477 2478 2479 void HCapturedObject::PrintDataTo(StringStream* stream) { 2480 stream->Add("#%d ", capture_id()); 2481 HDematerializedObject::PrintDataTo(stream); 2482 } 2483 2484 2485 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target, 2486 Zone* zone) { 2487 ASSERT(return_target->IsInlineReturnTarget()); 2488 return_targets_.Add(return_target, zone); 2489 } 2490 2491 2492 void HEnterInlined::PrintDataTo(StringStream* stream) { 2493 SmartArrayPointer<char> name = function()->debug_name()->ToCString(); 2494 stream->Add("%s, id=%d", *name, function()->id().ToInt()); 2495 } 2496 2497 2498 static bool IsInteger32(double value) { 2499 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value)); 2500 return BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(value); 2501 } 2502 2503 2504 HConstant::HConstant(Handle<Object> handle, Representation r) 2505 : HTemplateInstruction<0>(HType::TypeFromValue(handle)), 2506 object_(Unique<Object>::CreateUninitialized(handle)), 2507 has_smi_value_(false), 2508 has_int32_value_(false), 2509 has_double_value_(false), 2510 has_external_reference_value_(false), 2511 is_internalized_string_(false), 2512 is_not_in_new_space_(true), 2513 is_cell_(false), 2514 boolean_value_(handle->BooleanValue()) { 2515 if (handle->IsHeapObject()) { 2516 Heap* heap = Handle<HeapObject>::cast(handle)->GetHeap(); 2517 is_not_in_new_space_ = !heap->InNewSpace(*handle); 2518 } 2519 if (handle->IsNumber()) { 2520 double n = handle->Number(); 2521 has_int32_value_ = IsInteger32(n); 2522 int32_value_ = DoubleToInt32(n); 2523 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_); 2524 double_value_ = n; 2525 has_double_value_ = true; 2526 // TODO(titzer): if this heap number is new space, tenure a new one. 2527 } else { 2528 is_internalized_string_ = handle->IsInternalizedString(); 2529 } 2530 2531 is_cell_ = !handle.is_null() && 2532 (handle->IsCell() || handle->IsPropertyCell()); 2533 Initialize(r); 2534 } 2535 2536 2537 HConstant::HConstant(Unique<Object> unique, 2538 Representation r, 2539 HType type, 2540 bool is_internalize_string, 2541 bool is_not_in_new_space, 2542 bool is_cell, 2543 bool boolean_value) 2544 : HTemplateInstruction<0>(type), 2545 object_(unique), 2546 has_smi_value_(false), 2547 has_int32_value_(false), 2548 has_double_value_(false), 2549 has_external_reference_value_(false), 2550 is_internalized_string_(is_internalize_string), 2551 is_not_in_new_space_(is_not_in_new_space), 2552 is_cell_(is_cell), 2553 boolean_value_(boolean_value) { 2554 ASSERT(!unique.handle().is_null()); 2555 ASSERT(!type.IsTaggedNumber()); 2556 Initialize(r); 2557 } 2558 2559 2560 HConstant::HConstant(int32_t integer_value, 2561 Representation r, 2562 bool is_not_in_new_space, 2563 Unique<Object> object) 2564 : object_(object), 2565 has_smi_value_(Smi::IsValid(integer_value)), 2566 has_int32_value_(true), 2567 has_double_value_(true), 2568 has_external_reference_value_(false), 2569 is_internalized_string_(false), 2570 is_not_in_new_space_(is_not_in_new_space), 2571 is_cell_(false), 2572 boolean_value_(integer_value != 0), 2573 int32_value_(integer_value), 2574 double_value_(FastI2D(integer_value)) { 2575 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber()); 2576 Initialize(r); 2577 } 2578 2579 2580 HConstant::HConstant(double double_value, 2581 Representation r, 2582 bool is_not_in_new_space, 2583 Unique<Object> object) 2584 : object_(object), 2585 has_int32_value_(IsInteger32(double_value)), 2586 has_double_value_(true), 2587 has_external_reference_value_(false), 2588 is_internalized_string_(false), 2589 is_not_in_new_space_(is_not_in_new_space), 2590 is_cell_(false), 2591 boolean_value_(double_value != 0 && !std::isnan(double_value)), 2592 int32_value_(DoubleToInt32(double_value)), 2593 double_value_(double_value) { 2594 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_); 2595 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber()); 2596 Initialize(r); 2597 } 2598 2599 2600 HConstant::HConstant(ExternalReference reference) 2601 : HTemplateInstruction<0>(HType::None()), 2602 object_(Unique<Object>(Handle<Object>::null())), 2603 has_smi_value_(false), 2604 has_int32_value_(false), 2605 has_double_value_(false), 2606 has_external_reference_value_(true), 2607 is_internalized_string_(false), 2608 is_not_in_new_space_(true), 2609 is_cell_(false), 2610 boolean_value_(true), 2611 external_reference_value_(reference) { 2612 Initialize(Representation::External()); 2613 } 2614 2615 2616 void HConstant::Initialize(Representation r) { 2617 if (r.IsNone()) { 2618 if (has_smi_value_ && SmiValuesAre31Bits()) { 2619 r = Representation::Smi(); 2620 } else if (has_int32_value_) { 2621 r = Representation::Integer32(); 2622 } else if (has_double_value_) { 2623 r = Representation::Double(); 2624 } else if (has_external_reference_value_) { 2625 r = Representation::External(); 2626 } else { 2627 Handle<Object> object = object_.handle(); 2628 if (object->IsJSObject()) { 2629 // Try to eagerly migrate JSObjects that have deprecated maps. 2630 Handle<JSObject> js_object = Handle<JSObject>::cast(object); 2631 if (js_object->map()->is_deprecated()) { 2632 JSObject::TryMigrateInstance(js_object); 2633 } 2634 } 2635 r = Representation::Tagged(); 2636 } 2637 } 2638 set_representation(r); 2639 SetFlag(kUseGVN); 2640 } 2641 2642 2643 bool HConstant::EmitAtUses() { 2644 ASSERT(IsLinked()); 2645 if (block()->graph()->has_osr() && 2646 block()->graph()->IsStandardConstant(this)) { 2647 // TODO(titzer): this seems like a hack that should be fixed by custom OSR. 2648 return true; 2649 } 2650 if (UseCount() == 0) return true; 2651 if (IsCell()) return false; 2652 if (representation().IsDouble()) return false; 2653 return true; 2654 } 2655 2656 2657 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const { 2658 if (r.IsSmi() && !has_smi_value_) return NULL; 2659 if (r.IsInteger32() && !has_int32_value_) return NULL; 2660 if (r.IsDouble() && !has_double_value_) return NULL; 2661 if (r.IsExternal() && !has_external_reference_value_) return NULL; 2662 if (has_int32_value_) { 2663 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, object_); 2664 } 2665 if (has_double_value_) { 2666 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, object_); 2667 } 2668 if (has_external_reference_value_) { 2669 return new(zone) HConstant(external_reference_value_); 2670 } 2671 ASSERT(!object_.handle().is_null()); 2672 return new(zone) HConstant(object_, 2673 r, 2674 type_, 2675 is_internalized_string_, 2676 is_not_in_new_space_, 2677 is_cell_, 2678 boolean_value_); 2679 } 2680 2681 2682 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) { 2683 HConstant* res = NULL; 2684 if (has_int32_value_) { 2685 res = new(zone) HConstant(int32_value_, 2686 Representation::Integer32(), 2687 is_not_in_new_space_, 2688 object_); 2689 } else if (has_double_value_) { 2690 res = new(zone) HConstant(DoubleToInt32(double_value_), 2691 Representation::Integer32(), 2692 is_not_in_new_space_, 2693 object_); 2694 } 2695 return Maybe<HConstant*>(res != NULL, res); 2696 } 2697 2698 2699 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) { 2700 HConstant* res = NULL; 2701 Handle<Object> handle = this->handle(zone->isolate()); 2702 if (handle->IsBoolean()) { 2703 res = handle->BooleanValue() ? 2704 new(zone) HConstant(1) : new(zone) HConstant(0); 2705 } else if (handle->IsUndefined()) { 2706 res = new(zone) HConstant(OS::nan_value()); 2707 } else if (handle->IsNull()) { 2708 res = new(zone) HConstant(0); 2709 } 2710 return Maybe<HConstant*>(res != NULL, res); 2711 } 2712 2713 2714 void HConstant::PrintDataTo(StringStream* stream) { 2715 if (has_int32_value_) { 2716 stream->Add("%d ", int32_value_); 2717 } else if (has_double_value_) { 2718 stream->Add("%f ", FmtElm(double_value_)); 2719 } else if (has_external_reference_value_) { 2720 stream->Add("%p ", reinterpret_cast<void*>( 2721 external_reference_value_.address())); 2722 } else { 2723 handle(Isolate::Current())->ShortPrint(stream); 2724 } 2725 if (!is_not_in_new_space_) { 2726 stream->Add("[new space] "); 2727 } 2728 } 2729 2730 2731 void HBinaryOperation::PrintDataTo(StringStream* stream) { 2732 left()->PrintNameTo(stream); 2733 stream->Add(" "); 2734 right()->PrintNameTo(stream); 2735 if (CheckFlag(kCanOverflow)) stream->Add(" !"); 2736 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?"); 2737 } 2738 2739 2740 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) { 2741 ASSERT(CheckFlag(kFlexibleRepresentation)); 2742 Representation new_rep = RepresentationFromInputs(); 2743 UpdateRepresentation(new_rep, h_infer, "inputs"); 2744 2745 if (representation().IsSmi() && HasNonSmiUse()) { 2746 UpdateRepresentation( 2747 Representation::Integer32(), h_infer, "use requirements"); 2748 } 2749 2750 if (observed_output_representation_.IsNone()) { 2751 new_rep = RepresentationFromUses(); 2752 UpdateRepresentation(new_rep, h_infer, "uses"); 2753 } else { 2754 new_rep = RepresentationFromOutput(); 2755 UpdateRepresentation(new_rep, h_infer, "output"); 2756 } 2757 } 2758 2759 2760 Representation HBinaryOperation::RepresentationFromInputs() { 2761 // Determine the worst case of observed input representations and 2762 // the currently assumed output representation. 2763 Representation rep = representation(); 2764 for (int i = 1; i <= 2; ++i) { 2765 rep = rep.generalize(observed_input_representation(i)); 2766 } 2767 // If any of the actual input representation is more general than what we 2768 // have so far but not Tagged, use that representation instead. 2769 Representation left_rep = left()->representation(); 2770 Representation right_rep = right()->representation(); 2771 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep); 2772 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep); 2773 2774 return rep; 2775 } 2776 2777 2778 bool HBinaryOperation::IgnoreObservedOutputRepresentation( 2779 Representation current_rep) { 2780 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) || 2781 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) && 2782 // Mul in Integer32 mode would be too precise. 2783 (!this->IsMul() || HMul::cast(this)->MulMinusOne()); 2784 } 2785 2786 2787 Representation HBinaryOperation::RepresentationFromOutput() { 2788 Representation rep = representation(); 2789 // Consider observed output representation, but ignore it if it's Double, 2790 // this instruction is not a division, and all its uses are truncating 2791 // to Integer32. 2792 if (observed_output_representation_.is_more_general_than(rep) && 2793 !IgnoreObservedOutputRepresentation(rep)) { 2794 return observed_output_representation_; 2795 } 2796 return Representation::None(); 2797 } 2798 2799 2800 void HBinaryOperation::AssumeRepresentation(Representation r) { 2801 set_observed_input_representation(1, r); 2802 set_observed_input_representation(2, r); 2803 HValue::AssumeRepresentation(r); 2804 } 2805 2806 2807 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) { 2808 ASSERT(CheckFlag(kFlexibleRepresentation)); 2809 Representation new_rep = RepresentationFromInputs(); 2810 UpdateRepresentation(new_rep, h_infer, "inputs"); 2811 // Do not care about uses. 2812 } 2813 2814 2815 Range* HBitwise::InferRange(Zone* zone) { 2816 if (op() == Token::BIT_XOR) { 2817 if (left()->HasRange() && right()->HasRange()) { 2818 // The maximum value has the high bit, and all bits below, set: 2819 // (1 << high) - 1. 2820 // If the range can be negative, the minimum int is a negative number with 2821 // the high bit, and all bits below, unset: 2822 // -(1 << high). 2823 // If it cannot be negative, conservatively choose 0 as minimum int. 2824 int64_t left_upper = left()->range()->upper(); 2825 int64_t left_lower = left()->range()->lower(); 2826 int64_t right_upper = right()->range()->upper(); 2827 int64_t right_lower = right()->range()->lower(); 2828 2829 if (left_upper < 0) left_upper = ~left_upper; 2830 if (left_lower < 0) left_lower = ~left_lower; 2831 if (right_upper < 0) right_upper = ~right_upper; 2832 if (right_lower < 0) right_lower = ~right_lower; 2833 2834 int high = MostSignificantBit( 2835 static_cast<uint32_t>( 2836 left_upper | left_lower | right_upper | right_lower)); 2837 2838 int64_t limit = 1; 2839 limit <<= high; 2840 int32_t min = (left()->range()->CanBeNegative() || 2841 right()->range()->CanBeNegative()) 2842 ? static_cast<int32_t>(-limit) : 0; 2843 return new(zone) Range(min, static_cast<int32_t>(limit - 1)); 2844 } 2845 Range* result = HValue::InferRange(zone); 2846 result->set_can_be_minus_zero(false); 2847 return result; 2848 } 2849 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff); 2850 int32_t left_mask = (left()->range() != NULL) 2851 ? left()->range()->Mask() 2852 : kDefaultMask; 2853 int32_t right_mask = (right()->range() != NULL) 2854 ? right()->range()->Mask() 2855 : kDefaultMask; 2856 int32_t result_mask = (op() == Token::BIT_AND) 2857 ? left_mask & right_mask 2858 : left_mask | right_mask; 2859 if (result_mask >= 0) return new(zone) Range(0, result_mask); 2860 2861 Range* result = HValue::InferRange(zone); 2862 result->set_can_be_minus_zero(false); 2863 return result; 2864 } 2865 2866 2867 Range* HSar::InferRange(Zone* zone) { 2868 if (right()->IsConstant()) { 2869 HConstant* c = HConstant::cast(right()); 2870 if (c->HasInteger32Value()) { 2871 Range* result = (left()->range() != NULL) 2872 ? left()->range()->Copy(zone) 2873 : new(zone) Range(); 2874 result->Sar(c->Integer32Value()); 2875 return result; 2876 } 2877 } 2878 return HValue::InferRange(zone); 2879 } 2880 2881 2882 Range* HShr::InferRange(Zone* zone) { 2883 if (right()->IsConstant()) { 2884 HConstant* c = HConstant::cast(right()); 2885 if (c->HasInteger32Value()) { 2886 int shift_count = c->Integer32Value() & 0x1f; 2887 if (left()->range()->CanBeNegative()) { 2888 // Only compute bounds if the result always fits into an int32. 2889 return (shift_count >= 1) 2890 ? new(zone) Range(0, 2891 static_cast<uint32_t>(0xffffffff) >> shift_count) 2892 : new(zone) Range(); 2893 } else { 2894 // For positive inputs we can use the >> operator. 2895 Range* result = (left()->range() != NULL) 2896 ? left()->range()->Copy(zone) 2897 : new(zone) Range(); 2898 result->Sar(c->Integer32Value()); 2899 return result; 2900 } 2901 } 2902 } 2903 return HValue::InferRange(zone); 2904 } 2905 2906 2907 Range* HShl::InferRange(Zone* zone) { 2908 if (right()->IsConstant()) { 2909 HConstant* c = HConstant::cast(right()); 2910 if (c->HasInteger32Value()) { 2911 Range* result = (left()->range() != NULL) 2912 ? left()->range()->Copy(zone) 2913 : new(zone) Range(); 2914 result->Shl(c->Integer32Value()); 2915 return result; 2916 } 2917 } 2918 return HValue::InferRange(zone); 2919 } 2920 2921 2922 Range* HLoadNamedField::InferRange(Zone* zone) { 2923 if (access().representation().IsInteger8()) { 2924 return new(zone) Range(kMinInt8, kMaxInt8); 2925 } 2926 if (access().representation().IsUInteger8()) { 2927 return new(zone) Range(kMinUInt8, kMaxUInt8); 2928 } 2929 if (access().representation().IsInteger16()) { 2930 return new(zone) Range(kMinInt16, kMaxInt16); 2931 } 2932 if (access().representation().IsUInteger16()) { 2933 return new(zone) Range(kMinUInt16, kMaxUInt16); 2934 } 2935 if (access().IsStringLength()) { 2936 return new(zone) Range(0, String::kMaxLength); 2937 } 2938 return HValue::InferRange(zone); 2939 } 2940 2941 2942 Range* HLoadKeyed::InferRange(Zone* zone) { 2943 switch (elements_kind()) { 2944 case EXTERNAL_BYTE_ELEMENTS: 2945 return new(zone) Range(kMinInt8, kMaxInt8); 2946 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 2947 case EXTERNAL_PIXEL_ELEMENTS: 2948 return new(zone) Range(kMinUInt8, kMaxUInt8); 2949 case EXTERNAL_SHORT_ELEMENTS: 2950 return new(zone) Range(kMinInt16, kMaxInt16); 2951 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 2952 return new(zone) Range(kMinUInt16, kMaxUInt16); 2953 default: 2954 return HValue::InferRange(zone); 2955 } 2956 } 2957 2958 2959 void HCompareGeneric::PrintDataTo(StringStream* stream) { 2960 stream->Add(Token::Name(token())); 2961 stream->Add(" "); 2962 HBinaryOperation::PrintDataTo(stream); 2963 } 2964 2965 2966 void HStringCompareAndBranch::PrintDataTo(StringStream* stream) { 2967 stream->Add(Token::Name(token())); 2968 stream->Add(" "); 2969 HControlInstruction::PrintDataTo(stream); 2970 } 2971 2972 2973 void HCompareNumericAndBranch::PrintDataTo(StringStream* stream) { 2974 stream->Add(Token::Name(token())); 2975 stream->Add(" "); 2976 left()->PrintNameTo(stream); 2977 stream->Add(" "); 2978 right()->PrintNameTo(stream); 2979 HControlInstruction::PrintDataTo(stream); 2980 } 2981 2982 2983 void HCompareObjectEqAndBranch::PrintDataTo(StringStream* stream) { 2984 left()->PrintNameTo(stream); 2985 stream->Add(" "); 2986 right()->PrintNameTo(stream); 2987 HControlInstruction::PrintDataTo(stream); 2988 } 2989 2990 2991 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 2992 if (left()->IsConstant() && right()->IsConstant()) { 2993 bool comparison_result = 2994 HConstant::cast(left())->Equals(HConstant::cast(right())); 2995 *block = comparison_result 2996 ? FirstSuccessor() 2997 : SecondSuccessor(); 2998 return true; 2999 } 3000 *block = NULL; 3001 return false; 3002 } 3003 3004 3005 void HCompareHoleAndBranch::InferRepresentation( 3006 HInferRepresentationPhase* h_infer) { 3007 ChangeRepresentation(value()->representation()); 3008 } 3009 3010 3011 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) { 3012 if (value()->representation().IsSmiOrInteger32()) { 3013 // A Smi or Integer32 cannot contain minus zero. 3014 *block = SecondSuccessor(); 3015 return true; 3016 } 3017 *block = NULL; 3018 return false; 3019 } 3020 3021 3022 void HCompareMinusZeroAndBranch::InferRepresentation( 3023 HInferRepresentationPhase* h_infer) { 3024 ChangeRepresentation(value()->representation()); 3025 } 3026 3027 3028 3029 void HGoto::PrintDataTo(StringStream* stream) { 3030 stream->Add("B%d", SuccessorAt(0)->block_id()); 3031 } 3032 3033 3034 void HCompareNumericAndBranch::InferRepresentation( 3035 HInferRepresentationPhase* h_infer) { 3036 Representation left_rep = left()->representation(); 3037 Representation right_rep = right()->representation(); 3038 Representation observed_left = observed_input_representation(0); 3039 Representation observed_right = observed_input_representation(1); 3040 3041 Representation rep = Representation::None(); 3042 rep = rep.generalize(observed_left); 3043 rep = rep.generalize(observed_right); 3044 if (rep.IsNone() || rep.IsSmiOrInteger32()) { 3045 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep); 3046 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep); 3047 } else { 3048 rep = Representation::Double(); 3049 } 3050 3051 if (rep.IsDouble()) { 3052 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, === 3053 // and !=) have special handling of undefined, e.g. undefined == undefined 3054 // is 'true'. Relational comparisons have a different semantic, first 3055 // calling ToPrimitive() on their arguments. The standard Crankshaft 3056 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's 3057 // inputs are doubles caused 'undefined' to be converted to NaN. That's 3058 // compatible out-of-the box with ordered relational comparisons (<, >, <=, 3059 // >=). However, for equality comparisons (and for 'in' and 'instanceof'), 3060 // it is not consistent with the spec. For example, it would cause undefined 3061 // == undefined (should be true) to be evaluated as NaN == NaN 3062 // (false). Therefore, any comparisons other than ordered relational 3063 // comparisons must cause a deopt when one of their arguments is undefined. 3064 // See also v8:1434 3065 if (Token::IsOrderedRelationalCompareOp(token_)) { 3066 SetFlag(kAllowUndefinedAsNaN); 3067 } 3068 } 3069 ChangeRepresentation(rep); 3070 } 3071 3072 3073 void HParameter::PrintDataTo(StringStream* stream) { 3074 stream->Add("%u", index()); 3075 } 3076 3077 3078 void HLoadNamedField::PrintDataTo(StringStream* stream) { 3079 object()->PrintNameTo(stream); 3080 access_.PrintTo(stream); 3081 } 3082 3083 3084 HCheckMaps* HCheckMaps::New(Zone* zone, 3085 HValue* context, 3086 HValue* value, 3087 Handle<Map> map, 3088 CompilationInfo* info, 3089 HValue* typecheck) { 3090 HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck); 3091 check_map->Add(map, zone); 3092 if (map->CanOmitMapChecks() && 3093 value->IsConstant() && 3094 HConstant::cast(value)->HasMap(map)) { 3095 // TODO(titzer): collect dependent map checks into a list. 3096 check_map->omit_ = true; 3097 if (map->CanTransition()) { 3098 map->AddDependentCompilationInfo( 3099 DependentCode::kPrototypeCheckGroup, info); 3100 } 3101 } 3102 return check_map; 3103 } 3104 3105 3106 void HLoadNamedGeneric::PrintDataTo(StringStream* stream) { 3107 object()->PrintNameTo(stream); 3108 stream->Add("."); 3109 stream->Add(*String::cast(*name())->ToCString()); 3110 } 3111 3112 3113 void HLoadKeyed::PrintDataTo(StringStream* stream) { 3114 if (!is_external()) { 3115 elements()->PrintNameTo(stream); 3116 } else { 3117 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND && 3118 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND); 3119 elements()->PrintNameTo(stream); 3120 stream->Add("."); 3121 stream->Add(ElementsKindToString(elements_kind())); 3122 } 3123 3124 stream->Add("["); 3125 key()->PrintNameTo(stream); 3126 if (IsDehoisted()) { 3127 stream->Add(" + %d]", index_offset()); 3128 } else { 3129 stream->Add("]"); 3130 } 3131 3132 if (HasDependency()) { 3133 stream->Add(" "); 3134 dependency()->PrintNameTo(stream); 3135 } 3136 3137 if (RequiresHoleCheck()) { 3138 stream->Add(" check_hole"); 3139 } 3140 } 3141 3142 3143 bool HLoadKeyed::UsesMustHandleHole() const { 3144 if (IsFastPackedElementsKind(elements_kind())) { 3145 return false; 3146 } 3147 3148 if (IsExternalArrayElementsKind(elements_kind())) { 3149 return false; 3150 } 3151 3152 if (hole_mode() == ALLOW_RETURN_HOLE) { 3153 if (IsFastDoubleElementsKind(elements_kind())) { 3154 return AllUsesCanTreatHoleAsNaN(); 3155 } 3156 return true; 3157 } 3158 3159 if (IsFastDoubleElementsKind(elements_kind())) { 3160 return false; 3161 } 3162 3163 // Holes are only returned as tagged values. 3164 if (!representation().IsTagged()) { 3165 return false; 3166 } 3167 3168 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 3169 HValue* use = it.value(); 3170 if (!use->IsChange()) return false; 3171 } 3172 3173 return true; 3174 } 3175 3176 3177 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const { 3178 return IsFastDoubleElementsKind(elements_kind()) && 3179 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN); 3180 } 3181 3182 3183 bool HLoadKeyed::RequiresHoleCheck() const { 3184 if (IsFastPackedElementsKind(elements_kind())) { 3185 return false; 3186 } 3187 3188 if (IsExternalArrayElementsKind(elements_kind())) { 3189 return false; 3190 } 3191 3192 return !UsesMustHandleHole(); 3193 } 3194 3195 3196 void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) { 3197 object()->PrintNameTo(stream); 3198 stream->Add("["); 3199 key()->PrintNameTo(stream); 3200 stream->Add("]"); 3201 } 3202 3203 3204 HValue* HLoadKeyedGeneric::Canonicalize() { 3205 // Recognize generic keyed loads that use property name generated 3206 // by for-in statement as a key and rewrite them into fast property load 3207 // by index. 3208 if (key()->IsLoadKeyed()) { 3209 HLoadKeyed* key_load = HLoadKeyed::cast(key()); 3210 if (key_load->elements()->IsForInCacheArray()) { 3211 HForInCacheArray* names_cache = 3212 HForInCacheArray::cast(key_load->elements()); 3213 3214 if (names_cache->enumerable() == object()) { 3215 HForInCacheArray* index_cache = 3216 names_cache->index_cache(); 3217 HCheckMapValue* map_check = 3218 HCheckMapValue::New(block()->graph()->zone(), 3219 block()->graph()->GetInvalidContext(), 3220 object(), 3221 names_cache->map()); 3222 HInstruction* index = HLoadKeyed::New( 3223 block()->graph()->zone(), 3224 block()->graph()->GetInvalidContext(), 3225 index_cache, 3226 key_load->key(), 3227 key_load->key(), 3228 key_load->elements_kind()); 3229 map_check->InsertBefore(this); 3230 index->InsertBefore(this); 3231 HLoadFieldByIndex* load = new(block()->zone()) HLoadFieldByIndex( 3232 object(), index); 3233 load->InsertBefore(this); 3234 return load; 3235 } 3236 } 3237 } 3238 3239 return this; 3240 } 3241 3242 3243 void HStoreNamedGeneric::PrintDataTo(StringStream* stream) { 3244 object()->PrintNameTo(stream); 3245 stream->Add("."); 3246 ASSERT(name()->IsString()); 3247 stream->Add(*String::cast(*name())->ToCString()); 3248 stream->Add(" = "); 3249 value()->PrintNameTo(stream); 3250 } 3251 3252 3253 void HStoreNamedField::PrintDataTo(StringStream* stream) { 3254 object()->PrintNameTo(stream); 3255 access_.PrintTo(stream); 3256 stream->Add(" = "); 3257 value()->PrintNameTo(stream); 3258 if (NeedsWriteBarrier()) { 3259 stream->Add(" (write-barrier)"); 3260 } 3261 if (has_transition()) { 3262 stream->Add(" (transition map %p)", *transition_map()); 3263 } 3264 } 3265 3266 3267 void HStoreKeyed::PrintDataTo(StringStream* stream) { 3268 if (!is_external()) { 3269 elements()->PrintNameTo(stream); 3270 } else { 3271 elements()->PrintNameTo(stream); 3272 stream->Add("."); 3273 stream->Add(ElementsKindToString(elements_kind())); 3274 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND && 3275 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND); 3276 } 3277 3278 stream->Add("["); 3279 key()->PrintNameTo(stream); 3280 if (IsDehoisted()) { 3281 stream->Add(" + %d] = ", index_offset()); 3282 } else { 3283 stream->Add("] = "); 3284 } 3285 3286 value()->PrintNameTo(stream); 3287 } 3288 3289 3290 void HStoreKeyedGeneric::PrintDataTo(StringStream* stream) { 3291 object()->PrintNameTo(stream); 3292 stream->Add("["); 3293 key()->PrintNameTo(stream); 3294 stream->Add("] = "); 3295 value()->PrintNameTo(stream); 3296 } 3297 3298 3299 void HTransitionElementsKind::PrintDataTo(StringStream* stream) { 3300 object()->PrintNameTo(stream); 3301 ElementsKind from_kind = original_map().handle()->elements_kind(); 3302 ElementsKind to_kind = transitioned_map().handle()->elements_kind(); 3303 stream->Add(" %p [%s] -> %p [%s]", 3304 *original_map().handle(), 3305 ElementsAccessor::ForKind(from_kind)->name(), 3306 *transitioned_map().handle(), 3307 ElementsAccessor::ForKind(to_kind)->name()); 3308 if (IsSimpleMapChangeTransition(from_kind, to_kind)) stream->Add(" (simple)"); 3309 } 3310 3311 3312 void HLoadGlobalCell::PrintDataTo(StringStream* stream) { 3313 stream->Add("[%p]", *cell().handle()); 3314 if (!details_.IsDontDelete()) stream->Add(" (deleteable)"); 3315 if (details_.IsReadOnly()) stream->Add(" (read-only)"); 3316 } 3317 3318 3319 bool HLoadGlobalCell::RequiresHoleCheck() const { 3320 if (details_.IsDontDelete() && !details_.IsReadOnly()) return false; 3321 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 3322 HValue* use = it.value(); 3323 if (!use->IsChange()) return true; 3324 } 3325 return false; 3326 } 3327 3328 3329 void HLoadGlobalGeneric::PrintDataTo(StringStream* stream) { 3330 stream->Add("%o ", *name()); 3331 } 3332 3333 3334 void HInnerAllocatedObject::PrintDataTo(StringStream* stream) { 3335 base_object()->PrintNameTo(stream); 3336 stream->Add(" offset %d", offset()); 3337 } 3338 3339 3340 void HStoreGlobalCell::PrintDataTo(StringStream* stream) { 3341 stream->Add("[%p] = ", *cell().handle()); 3342 value()->PrintNameTo(stream); 3343 if (!details_.IsDontDelete()) stream->Add(" (deleteable)"); 3344 if (details_.IsReadOnly()) stream->Add(" (read-only)"); 3345 } 3346 3347 3348 void HStoreGlobalGeneric::PrintDataTo(StringStream* stream) { 3349 stream->Add("%o = ", *name()); 3350 value()->PrintNameTo(stream); 3351 } 3352 3353 3354 void HLoadContextSlot::PrintDataTo(StringStream* stream) { 3355 value()->PrintNameTo(stream); 3356 stream->Add("[%d]", slot_index()); 3357 } 3358 3359 3360 void HStoreContextSlot::PrintDataTo(StringStream* stream) { 3361 context()->PrintNameTo(stream); 3362 stream->Add("[%d] = ", slot_index()); 3363 value()->PrintNameTo(stream); 3364 } 3365 3366 3367 // Implementation of type inference and type conversions. Calculates 3368 // the inferred type of this instruction based on the input operands. 3369 3370 HType HValue::CalculateInferredType() { 3371 return type_; 3372 } 3373 3374 3375 HType HPhi::CalculateInferredType() { 3376 if (OperandCount() == 0) return HType::Tagged(); 3377 HType result = OperandAt(0)->type(); 3378 for (int i = 1; i < OperandCount(); ++i) { 3379 HType current = OperandAt(i)->type(); 3380 result = result.Combine(current); 3381 } 3382 return result; 3383 } 3384 3385 3386 HType HChange::CalculateInferredType() { 3387 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber(); 3388 return type(); 3389 } 3390 3391 3392 Representation HUnaryMathOperation::RepresentationFromInputs() { 3393 Representation rep = representation(); 3394 // If any of the actual input representation is more general than what we 3395 // have so far but not Tagged, use that representation instead. 3396 Representation input_rep = value()->representation(); 3397 if (!input_rep.IsTagged()) { 3398 rep = rep.generalize(input_rep); 3399 } 3400 return rep; 3401 } 3402 3403 3404 void HAllocate::HandleSideEffectDominator(GVNFlag side_effect, 3405 HValue* dominator) { 3406 ASSERT(side_effect == kChangesNewSpacePromotion); 3407 Zone* zone = block()->zone(); 3408 if (!FLAG_use_allocation_folding) return; 3409 3410 // Try to fold allocations together with their dominating allocations. 3411 if (!dominator->IsAllocate()) { 3412 if (FLAG_trace_allocation_folding) { 3413 PrintF("#%d (%s) cannot fold into #%d (%s)\n", 3414 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3415 } 3416 return; 3417 } 3418 3419 HAllocate* dominator_allocate = HAllocate::cast(dominator); 3420 HValue* dominator_size = dominator_allocate->size(); 3421 HValue* current_size = size(); 3422 3423 // TODO(hpayer): Add support for non-constant allocation in dominator. 3424 if (!current_size->IsInteger32Constant() || 3425 !dominator_size->IsInteger32Constant()) { 3426 if (FLAG_trace_allocation_folding) { 3427 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic allocation size\n", 3428 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3429 } 3430 return; 3431 } 3432 3433 dominator_allocate = GetFoldableDominator(dominator_allocate); 3434 if (dominator_allocate == NULL) { 3435 return; 3436 } 3437 3438 ASSERT((IsNewSpaceAllocation() && 3439 dominator_allocate->IsNewSpaceAllocation()) || 3440 (IsOldDataSpaceAllocation() && 3441 dominator_allocate->IsOldDataSpaceAllocation()) || 3442 (IsOldPointerSpaceAllocation() && 3443 dominator_allocate->IsOldPointerSpaceAllocation())); 3444 3445 // First update the size of the dominator allocate instruction. 3446 dominator_size = dominator_allocate->size(); 3447 int32_t original_object_size = 3448 HConstant::cast(dominator_size)->GetInteger32Constant(); 3449 int32_t dominator_size_constant = original_object_size; 3450 int32_t current_size_constant = 3451 HConstant::cast(current_size)->GetInteger32Constant(); 3452 int32_t new_dominator_size = dominator_size_constant + current_size_constant; 3453 3454 if (MustAllocateDoubleAligned()) { 3455 if (!dominator_allocate->MustAllocateDoubleAligned()) { 3456 dominator_allocate->MakeDoubleAligned(); 3457 } 3458 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) { 3459 dominator_size_constant += kDoubleSize / 2; 3460 new_dominator_size += kDoubleSize / 2; 3461 } 3462 } 3463 3464 if (new_dominator_size > isolate()->heap()->MaxRegularSpaceAllocationSize()) { 3465 if (FLAG_trace_allocation_folding) { 3466 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n", 3467 id(), Mnemonic(), dominator_allocate->id(), 3468 dominator_allocate->Mnemonic(), new_dominator_size); 3469 } 3470 return; 3471 } 3472 3473 HInstruction* new_dominator_size_constant = HConstant::CreateAndInsertBefore( 3474 zone, 3475 context(), 3476 new_dominator_size, 3477 Representation::None(), 3478 dominator_allocate); 3479 dominator_allocate->UpdateSize(new_dominator_size_constant); 3480 3481 #ifdef VERIFY_HEAP 3482 if (FLAG_verify_heap && dominator_allocate->IsNewSpaceAllocation()) { 3483 dominator_allocate->MakePrefillWithFiller(); 3484 } else { 3485 // TODO(hpayer): This is a short-term hack to make allocation mementos 3486 // work again in new space. 3487 dominator_allocate->ClearNextMapWord(original_object_size); 3488 } 3489 #else 3490 // TODO(hpayer): This is a short-term hack to make allocation mementos 3491 // work again in new space. 3492 dominator_allocate->ClearNextMapWord(original_object_size); 3493 #endif 3494 3495 dominator_allocate->clear_next_map_word_ = clear_next_map_word_; 3496 3497 // After that replace the dominated allocate instruction. 3498 HInstruction* dominated_allocate_instr = 3499 HInnerAllocatedObject::New(zone, 3500 context(), 3501 dominator_allocate, 3502 dominator_size, 3503 type()); 3504 dominated_allocate_instr->InsertBefore(this); 3505 DeleteAndReplaceWith(dominated_allocate_instr); 3506 if (FLAG_trace_allocation_folding) { 3507 PrintF("#%d (%s) folded into #%d (%s)\n", 3508 id(), Mnemonic(), dominator_allocate->id(), 3509 dominator_allocate->Mnemonic()); 3510 } 3511 } 3512 3513 3514 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) { 3515 if (!IsFoldable(dominator)) { 3516 // We cannot hoist old space allocations over new space allocations. 3517 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) { 3518 if (FLAG_trace_allocation_folding) { 3519 PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n", 3520 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3521 } 3522 return NULL; 3523 } 3524 3525 HAllocate* dominator_dominator = dominator->dominating_allocate_; 3526 3527 // We can hoist old data space allocations over an old pointer space 3528 // allocation and vice versa. For that we have to check the dominator 3529 // of the dominator allocate instruction. 3530 if (dominator_dominator == NULL) { 3531 dominating_allocate_ = dominator; 3532 if (FLAG_trace_allocation_folding) { 3533 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", 3534 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); 3535 } 3536 return NULL; 3537 } 3538 3539 // We can just fold old space allocations that are in the same basic block, 3540 // since it is not guaranteed that we fill up the whole allocated old 3541 // space memory. 3542 // TODO(hpayer): Remove this limitation and add filler maps for each each 3543 // allocation as soon as we have store elimination. 3544 if (block()->block_id() != dominator_dominator->block()->block_id()) { 3545 if (FLAG_trace_allocation_folding) { 3546 PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n", 3547 id(), Mnemonic(), dominator_dominator->id(), 3548 dominator_dominator->Mnemonic()); 3549 } 3550 return NULL; 3551 } 3552 3553 ASSERT((IsOldDataSpaceAllocation() && 3554 dominator_dominator->IsOldDataSpaceAllocation()) || 3555 (IsOldPointerSpaceAllocation() && 3556 dominator_dominator->IsOldPointerSpaceAllocation())); 3557 3558 int32_t current_size = HConstant::cast(size())->GetInteger32Constant(); 3559 HStoreNamedField* dominator_free_space_size = 3560 dominator->filler_free_space_size_; 3561 if (dominator_free_space_size != NULL) { 3562 // We already hoisted one old space allocation, i.e., we already installed 3563 // a filler map. Hence, we just have to update the free space size. 3564 dominator->UpdateFreeSpaceFiller(current_size); 3565 } else { 3566 // This is the first old space allocation that gets hoisted. We have to 3567 // install a filler map since the follwing allocation may cause a GC. 3568 dominator->CreateFreeSpaceFiller(current_size); 3569 } 3570 3571 // We can hoist the old space allocation over the actual dominator. 3572 return dominator_dominator; 3573 } 3574 return dominator; 3575 } 3576 3577 3578 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) { 3579 ASSERT(filler_free_space_size_ != NULL); 3580 Zone* zone = block()->zone(); 3581 // We must explicitly force Smi representation here because on x64 we 3582 // would otherwise automatically choose int32, but the actual store 3583 // requires a Smi-tagged value. 3584 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore( 3585 zone, 3586 context(), 3587 filler_free_space_size_->value()->GetInteger32Constant() + 3588 free_space_size, 3589 Representation::Smi(), 3590 filler_free_space_size_); 3591 filler_free_space_size_->UpdateValue(new_free_space_size); 3592 } 3593 3594 3595 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) { 3596 ASSERT(filler_free_space_size_ == NULL); 3597 Zone* zone = block()->zone(); 3598 HInstruction* free_space_instr = 3599 HInnerAllocatedObject::New(zone, context(), dominating_allocate_, 3600 dominating_allocate_->size(), type()); 3601 free_space_instr->InsertBefore(this); 3602 HConstant* filler_map = HConstant::New( 3603 zone, 3604 context(), 3605 isolate()->factory()->free_space_map()); 3606 filler_map->FinalizeUniqueness(); // TODO(titzer): should be init'd a'ready 3607 filler_map->InsertAfter(free_space_instr); 3608 HInstruction* store_map = HStoreNamedField::New(zone, context(), 3609 free_space_instr, HObjectAccess::ForMap(), filler_map); 3610 store_map->SetFlag(HValue::kHasNoObservableSideEffects); 3611 store_map->InsertAfter(filler_map); 3612 3613 // We must explicitly force Smi representation here because on x64 we 3614 // would otherwise automatically choose int32, but the actual store 3615 // requires a Smi-tagged value. 3616 HConstant* filler_size = HConstant::CreateAndInsertAfter( 3617 zone, context(), free_space_size, Representation::Smi(), store_map); 3618 // Must force Smi representation for x64 (see comment above). 3619 HObjectAccess access = 3620 HObjectAccess::ForJSObjectOffset(FreeSpace::kSizeOffset, 3621 Representation::Smi()); 3622 HStoreNamedField* store_size = HStoreNamedField::New(zone, context(), 3623 free_space_instr, access, filler_size); 3624 store_size->SetFlag(HValue::kHasNoObservableSideEffects); 3625 store_size->InsertAfter(filler_size); 3626 filler_free_space_size_ = store_size; 3627 } 3628 3629 3630 void HAllocate::ClearNextMapWord(int offset) { 3631 if (clear_next_map_word_) { 3632 Zone* zone = block()->zone(); 3633 HObjectAccess access = HObjectAccess::ForJSObjectOffset(offset); 3634 HStoreNamedField* clear_next_map = 3635 HStoreNamedField::New(zone, context(), this, access, 3636 block()->graph()->GetConstantNull()); 3637 clear_next_map->ClearAllSideEffects(); 3638 clear_next_map->InsertAfter(this); 3639 } 3640 } 3641 3642 3643 void HAllocate::PrintDataTo(StringStream* stream) { 3644 size()->PrintNameTo(stream); 3645 stream->Add(" ("); 3646 if (IsNewSpaceAllocation()) stream->Add("N"); 3647 if (IsOldPointerSpaceAllocation()) stream->Add("P"); 3648 if (IsOldDataSpaceAllocation()) stream->Add("D"); 3649 if (MustAllocateDoubleAligned()) stream->Add("A"); 3650 if (MustPrefillWithFiller()) stream->Add("F"); 3651 stream->Add(")"); 3652 } 3653 3654 3655 HValue* HUnaryMathOperation::EnsureAndPropagateNotMinusZero( 3656 BitVector* visited) { 3657 visited->Add(id()); 3658 if (representation().IsSmiOrInteger32() && 3659 !value()->representation().Equals(representation())) { 3660 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) { 3661 SetFlag(kBailoutOnMinusZero); 3662 } 3663 } 3664 if (RequiredInputRepresentation(0).IsSmiOrInteger32() && 3665 representation().Equals(RequiredInputRepresentation(0))) { 3666 return value(); 3667 } 3668 return NULL; 3669 } 3670 3671 3672 HValue* HChange::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3673 visited->Add(id()); 3674 if (from().IsSmiOrInteger32()) return NULL; 3675 if (CanTruncateToInt32()) return NULL; 3676 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) { 3677 SetFlag(kBailoutOnMinusZero); 3678 } 3679 ASSERT(!from().IsSmiOrInteger32() || !to().IsSmiOrInteger32()); 3680 return NULL; 3681 } 3682 3683 3684 HValue* HForceRepresentation::EnsureAndPropagateNotMinusZero( 3685 BitVector* visited) { 3686 visited->Add(id()); 3687 return value(); 3688 } 3689 3690 3691 HValue* HMod::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3692 visited->Add(id()); 3693 if (range() == NULL || range()->CanBeMinusZero()) { 3694 SetFlag(kBailoutOnMinusZero); 3695 return left(); 3696 } 3697 return NULL; 3698 } 3699 3700 3701 HValue* HDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3702 visited->Add(id()); 3703 if (range() == NULL || range()->CanBeMinusZero()) { 3704 SetFlag(kBailoutOnMinusZero); 3705 } 3706 return NULL; 3707 } 3708 3709 3710 HValue* HMathFloorOfDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3711 visited->Add(id()); 3712 SetFlag(kBailoutOnMinusZero); 3713 return NULL; 3714 } 3715 3716 3717 HValue* HMul::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3718 visited->Add(id()); 3719 if (range() == NULL || range()->CanBeMinusZero()) { 3720 SetFlag(kBailoutOnMinusZero); 3721 } 3722 return NULL; 3723 } 3724 3725 3726 HValue* HSub::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3727 visited->Add(id()); 3728 // Propagate to the left argument. If the left argument cannot be -0, then 3729 // the result of the add operation cannot be either. 3730 if (range() == NULL || range()->CanBeMinusZero()) { 3731 return left(); 3732 } 3733 return NULL; 3734 } 3735 3736 3737 HValue* HAdd::EnsureAndPropagateNotMinusZero(BitVector* visited) { 3738 visited->Add(id()); 3739 // Propagate to the left argument. If the left argument cannot be -0, then 3740 // the result of the sub operation cannot be either. 3741 if (range() == NULL || range()->CanBeMinusZero()) { 3742 return left(); 3743 } 3744 return NULL; 3745 } 3746 3747 3748 bool HStoreKeyed::NeedsCanonicalization() { 3749 // If value is an integer or smi or comes from the result of a keyed load or 3750 // constant then it is either be a non-hole value or in the case of a constant 3751 // the hole is only being stored explicitly: no need for canonicalization. 3752 // 3753 // The exception to that is keyed loads from external float or double arrays: 3754 // these can load arbitrary representation of NaN. 3755 3756 if (value()->IsConstant()) { 3757 return false; 3758 } 3759 3760 if (value()->IsLoadKeyed()) { 3761 return IsExternalFloatOrDoubleElementsKind( 3762 HLoadKeyed::cast(value())->elements_kind()); 3763 } 3764 3765 if (value()->IsChange()) { 3766 if (HChange::cast(value())->from().IsSmiOrInteger32()) { 3767 return false; 3768 } 3769 if (HChange::cast(value())->value()->type().IsSmi()) { 3770 return false; 3771 } 3772 } 3773 return true; 3774 } 3775 3776 3777 #define H_CONSTANT_INT(val) \ 3778 HConstant::New(zone, context, static_cast<int32_t>(val)) 3779 #define H_CONSTANT_DOUBLE(val) \ 3780 HConstant::New(zone, context, static_cast<double>(val)) 3781 3782 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \ 3783 HInstruction* HInstr::New( \ 3784 Zone* zone, HValue* context, HValue* left, HValue* right) { \ 3785 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ 3786 HConstant* c_left = HConstant::cast(left); \ 3787 HConstant* c_right = HConstant::cast(right); \ 3788 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ 3789 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \ 3790 if (TypeInfo::IsInt32Double(double_res)) { \ 3791 return H_CONSTANT_INT(double_res); \ 3792 } \ 3793 return H_CONSTANT_DOUBLE(double_res); \ 3794 } \ 3795 } \ 3796 return new(zone) HInstr(context, left, right); \ 3797 } 3798 3799 3800 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +) 3801 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *) 3802 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -) 3803 3804 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR 3805 3806 3807 HInstruction* HStringAdd::New(Zone* zone, 3808 HValue* context, 3809 HValue* left, 3810 HValue* right, 3811 StringAddFlags flags) { 3812 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3813 HConstant* c_right = HConstant::cast(right); 3814 HConstant* c_left = HConstant::cast(left); 3815 if (c_left->HasStringValue() && c_right->HasStringValue()) { 3816 Handle<String> concat = zone->isolate()->factory()->NewFlatConcatString( 3817 c_left->StringValue(), c_right->StringValue()); 3818 return HConstant::New(zone, context, concat); 3819 } 3820 } 3821 return new(zone) HStringAdd(context, left, right, flags); 3822 } 3823 3824 3825 HInstruction* HStringCharFromCode::New( 3826 Zone* zone, HValue* context, HValue* char_code) { 3827 if (FLAG_fold_constants && char_code->IsConstant()) { 3828 HConstant* c_code = HConstant::cast(char_code); 3829 Isolate* isolate = zone->isolate(); 3830 if (c_code->HasNumberValue()) { 3831 if (std::isfinite(c_code->DoubleValue())) { 3832 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff; 3833 return HConstant::New(zone, context, 3834 LookupSingleCharacterStringFromCode(isolate, code)); 3835 } 3836 return HConstant::New(zone, context, isolate->factory()->empty_string()); 3837 } 3838 } 3839 return new(zone) HStringCharFromCode(context, char_code); 3840 } 3841 3842 3843 HInstruction* HUnaryMathOperation::New( 3844 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op) { 3845 do { 3846 if (!FLAG_fold_constants) break; 3847 if (!value->IsConstant()) break; 3848 HConstant* constant = HConstant::cast(value); 3849 if (!constant->HasNumberValue()) break; 3850 double d = constant->DoubleValue(); 3851 if (std::isnan(d)) { // NaN poisons everything. 3852 return H_CONSTANT_DOUBLE(OS::nan_value()); 3853 } 3854 if (std::isinf(d)) { // +Infinity and -Infinity. 3855 switch (op) { 3856 case kMathSin: 3857 case kMathCos: 3858 case kMathTan: 3859 return H_CONSTANT_DOUBLE(OS::nan_value()); 3860 case kMathExp: 3861 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0); 3862 case kMathLog: 3863 case kMathSqrt: 3864 return H_CONSTANT_DOUBLE((d > 0.0) ? d : OS::nan_value()); 3865 case kMathPowHalf: 3866 case kMathAbs: 3867 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d); 3868 case kMathRound: 3869 case kMathFloor: 3870 return H_CONSTANT_DOUBLE(d); 3871 default: 3872 UNREACHABLE(); 3873 break; 3874 } 3875 } 3876 switch (op) { 3877 case kMathSin: 3878 return H_CONSTANT_DOUBLE(fast_sin(d)); 3879 case kMathCos: 3880 return H_CONSTANT_DOUBLE(fast_cos(d)); 3881 case kMathTan: 3882 return H_CONSTANT_DOUBLE(fast_tan(d)); 3883 case kMathExp: 3884 return H_CONSTANT_DOUBLE(fast_exp(d)); 3885 case kMathLog: 3886 return H_CONSTANT_DOUBLE(fast_log(d)); 3887 case kMathSqrt: 3888 return H_CONSTANT_DOUBLE(fast_sqrt(d)); 3889 case kMathPowHalf: 3890 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5)); 3891 case kMathAbs: 3892 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d); 3893 case kMathRound: 3894 // -0.5 .. -0.0 round to -0.0. 3895 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0); 3896 // Doubles are represented as Significant * 2 ^ Exponent. If the 3897 // Exponent is not negative, the double value is already an integer. 3898 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d); 3899 return H_CONSTANT_DOUBLE(floor(d + 0.5)); 3900 case kMathFloor: 3901 return H_CONSTANT_DOUBLE(floor(d)); 3902 default: 3903 UNREACHABLE(); 3904 break; 3905 } 3906 } while (false); 3907 return new(zone) HUnaryMathOperation(context, value, op); 3908 } 3909 3910 3911 HInstruction* HPower::New(Zone* zone, 3912 HValue* context, 3913 HValue* left, 3914 HValue* right) { 3915 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3916 HConstant* c_left = HConstant::cast(left); 3917 HConstant* c_right = HConstant::cast(right); 3918 if (c_left->HasNumberValue() && c_right->HasNumberValue()) { 3919 double result = power_helper(c_left->DoubleValue(), 3920 c_right->DoubleValue()); 3921 return H_CONSTANT_DOUBLE(std::isnan(result) ? OS::nan_value() : result); 3922 } 3923 } 3924 return new(zone) HPower(left, right); 3925 } 3926 3927 3928 HInstruction* HMathMinMax::New( 3929 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) { 3930 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3931 HConstant* c_left = HConstant::cast(left); 3932 HConstant* c_right = HConstant::cast(right); 3933 if (c_left->HasNumberValue() && c_right->HasNumberValue()) { 3934 double d_left = c_left->DoubleValue(); 3935 double d_right = c_right->DoubleValue(); 3936 if (op == kMathMin) { 3937 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right); 3938 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left); 3939 if (d_left == d_right) { 3940 // Handle +0 and -0. 3941 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left 3942 : d_right); 3943 } 3944 } else { 3945 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right); 3946 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left); 3947 if (d_left == d_right) { 3948 // Handle +0 and -0. 3949 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right 3950 : d_left); 3951 } 3952 } 3953 // All comparisons failed, must be NaN. 3954 return H_CONSTANT_DOUBLE(OS::nan_value()); 3955 } 3956 } 3957 return new(zone) HMathMinMax(context, left, right, op); 3958 } 3959 3960 3961 HInstruction* HMod::New(Zone* zone, 3962 HValue* context, 3963 HValue* left, 3964 HValue* right) { 3965 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3966 HConstant* c_left = HConstant::cast(left); 3967 HConstant* c_right = HConstant::cast(right); 3968 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) { 3969 int32_t dividend = c_left->Integer32Value(); 3970 int32_t divisor = c_right->Integer32Value(); 3971 if (dividend == kMinInt && divisor == -1) { 3972 return H_CONSTANT_DOUBLE(-0.0); 3973 } 3974 if (divisor != 0) { 3975 int32_t res = dividend % divisor; 3976 if ((res == 0) && (dividend < 0)) { 3977 return H_CONSTANT_DOUBLE(-0.0); 3978 } 3979 return H_CONSTANT_INT(res); 3980 } 3981 } 3982 } 3983 return new(zone) HMod(context, left, right); 3984 } 3985 3986 3987 HInstruction* HDiv::New( 3988 Zone* zone, HValue* context, HValue* left, HValue* right) { 3989 // If left and right are constant values, try to return a constant value. 3990 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 3991 HConstant* c_left = HConstant::cast(left); 3992 HConstant* c_right = HConstant::cast(right); 3993 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { 3994 if (c_right->DoubleValue() != 0) { 3995 double double_res = c_left->DoubleValue() / c_right->DoubleValue(); 3996 if (TypeInfo::IsInt32Double(double_res)) { 3997 return H_CONSTANT_INT(double_res); 3998 } 3999 return H_CONSTANT_DOUBLE(double_res); 4000 } else { 4001 int sign = Double(c_left->DoubleValue()).Sign() * 4002 Double(c_right->DoubleValue()).Sign(); // Right could be -0. 4003 return H_CONSTANT_DOUBLE(sign * V8_INFINITY); 4004 } 4005 } 4006 } 4007 return new(zone) HDiv(context, left, right); 4008 } 4009 4010 4011 HInstruction* HBitwise::New( 4012 Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) { 4013 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 4014 HConstant* c_left = HConstant::cast(left); 4015 HConstant* c_right = HConstant::cast(right); 4016 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { 4017 int32_t result; 4018 int32_t v_left = c_left->NumberValueAsInteger32(); 4019 int32_t v_right = c_right->NumberValueAsInteger32(); 4020 switch (op) { 4021 case Token::BIT_XOR: 4022 result = v_left ^ v_right; 4023 break; 4024 case Token::BIT_AND: 4025 result = v_left & v_right; 4026 break; 4027 case Token::BIT_OR: 4028 result = v_left | v_right; 4029 break; 4030 default: 4031 result = 0; // Please the compiler. 4032 UNREACHABLE(); 4033 } 4034 return H_CONSTANT_INT(result); 4035 } 4036 } 4037 return new(zone) HBitwise(context, op, left, right); 4038 } 4039 4040 4041 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \ 4042 HInstruction* HInstr::New( \ 4043 Zone* zone, HValue* context, HValue* left, HValue* right) { \ 4044 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ 4045 HConstant* c_left = HConstant::cast(left); \ 4046 HConstant* c_right = HConstant::cast(right); \ 4047 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ 4048 return H_CONSTANT_INT(result); \ 4049 } \ 4050 } \ 4051 return new(zone) HInstr(context, left, right); \ 4052 } 4053 4054 4055 DEFINE_NEW_H_BITWISE_INSTR(HSar, 4056 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f)) 4057 DEFINE_NEW_H_BITWISE_INSTR(HShl, 4058 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f)) 4059 4060 #undef DEFINE_NEW_H_BITWISE_INSTR 4061 4062 4063 HInstruction* HShr::New( 4064 Zone* zone, HValue* context, HValue* left, HValue* right) { 4065 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { 4066 HConstant* c_left = HConstant::cast(left); 4067 HConstant* c_right = HConstant::cast(right); 4068 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { 4069 int32_t left_val = c_left->NumberValueAsInteger32(); 4070 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f; 4071 if ((right_val == 0) && (left_val < 0)) { 4072 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val)); 4073 } 4074 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val); 4075 } 4076 } 4077 return new(zone) HShr(context, left, right); 4078 } 4079 4080 4081 HInstruction* HSeqStringGetChar::New(Zone* zone, 4082 HValue* context, 4083 String::Encoding encoding, 4084 HValue* string, 4085 HValue* index) { 4086 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) { 4087 HConstant* c_string = HConstant::cast(string); 4088 HConstant* c_index = HConstant::cast(index); 4089 if (c_string->HasStringValue() && c_index->HasInteger32Value()) { 4090 Handle<String> s = c_string->StringValue(); 4091 int32_t i = c_index->Integer32Value(); 4092 ASSERT_LE(0, i); 4093 ASSERT_LT(i, s->length()); 4094 return H_CONSTANT_INT(s->Get(i)); 4095 } 4096 } 4097 return new(zone) HSeqStringGetChar(encoding, string, index); 4098 } 4099 4100 4101 #undef H_CONSTANT_INT 4102 #undef H_CONSTANT_DOUBLE 4103 4104 4105 void HBitwise::PrintDataTo(StringStream* stream) { 4106 stream->Add(Token::Name(op_)); 4107 stream->Add(" "); 4108 HBitwiseBinaryOperation::PrintDataTo(stream); 4109 } 4110 4111 4112 void HPhi::SimplifyConstantInputs() { 4113 // Convert constant inputs to integers when all uses are truncating. 4114 // This must happen before representation inference takes place. 4115 if (!CheckUsesForFlag(kTruncatingToInt32)) return; 4116 for (int i = 0; i < OperandCount(); ++i) { 4117 if (!OperandAt(i)->IsConstant()) return; 4118 } 4119 HGraph* graph = block()->graph(); 4120 for (int i = 0; i < OperandCount(); ++i) { 4121 HConstant* operand = HConstant::cast(OperandAt(i)); 4122 if (operand->HasInteger32Value()) { 4123 continue; 4124 } else if (operand->HasDoubleValue()) { 4125 HConstant* integer_input = 4126 HConstant::New(graph->zone(), graph->GetInvalidContext(), 4127 DoubleToInt32(operand->DoubleValue())); 4128 integer_input->InsertAfter(operand); 4129 SetOperandAt(i, integer_input); 4130 } else if (operand->HasBooleanValue()) { 4131 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1() 4132 : graph->GetConstant0()); 4133 } else if (operand->ImmortalImmovable()) { 4134 SetOperandAt(i, graph->GetConstant0()); 4135 } 4136 } 4137 // Overwrite observed input representations because they are likely Tagged. 4138 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 4139 HValue* use = it.value(); 4140 if (use->IsBinaryOperation()) { 4141 HBinaryOperation::cast(use)->set_observed_input_representation( 4142 it.index(), Representation::Smi()); 4143 } 4144 } 4145 } 4146 4147 4148 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) { 4149 ASSERT(CheckFlag(kFlexibleRepresentation)); 4150 Representation new_rep = RepresentationFromInputs(); 4151 UpdateRepresentation(new_rep, h_infer, "inputs"); 4152 new_rep = RepresentationFromUses(); 4153 UpdateRepresentation(new_rep, h_infer, "uses"); 4154 new_rep = RepresentationFromUseRequirements(); 4155 UpdateRepresentation(new_rep, h_infer, "use requirements"); 4156 } 4157 4158 4159 Representation HPhi::RepresentationFromInputs() { 4160 Representation r = Representation::None(); 4161 for (int i = 0; i < OperandCount(); ++i) { 4162 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation()); 4163 } 4164 return r; 4165 } 4166 4167 4168 // Returns a representation if all uses agree on the same representation. 4169 // Integer32 is also returned when some uses are Smi but others are Integer32. 4170 Representation HValue::RepresentationFromUseRequirements() { 4171 Representation rep = Representation::None(); 4172 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 4173 // Ignore the use requirement from never run code 4174 if (it.value()->block()->IsUnreachable()) continue; 4175 4176 // We check for observed_input_representation elsewhere. 4177 Representation use_rep = 4178 it.value()->RequiredInputRepresentation(it.index()); 4179 if (rep.IsNone()) { 4180 rep = use_rep; 4181 continue; 4182 } 4183 if (use_rep.IsNone() || rep.Equals(use_rep)) continue; 4184 if (rep.generalize(use_rep).IsInteger32()) { 4185 rep = Representation::Integer32(); 4186 continue; 4187 } 4188 return Representation::None(); 4189 } 4190 return rep; 4191 } 4192 4193 4194 bool HValue::HasNonSmiUse() { 4195 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { 4196 // We check for observed_input_representation elsewhere. 4197 Representation use_rep = 4198 it.value()->RequiredInputRepresentation(it.index()); 4199 if (!use_rep.IsNone() && 4200 !use_rep.IsSmi() && 4201 !use_rep.IsTagged()) { 4202 return true; 4203 } 4204 } 4205 return false; 4206 } 4207 4208 4209 // Node-specific verification code is only included in debug mode. 4210 #ifdef DEBUG 4211 4212 void HPhi::Verify() { 4213 ASSERT(OperandCount() == block()->predecessors()->length()); 4214 for (int i = 0; i < OperandCount(); ++i) { 4215 HValue* value = OperandAt(i); 4216 HBasicBlock* defining_block = value->block(); 4217 HBasicBlock* predecessor_block = block()->predecessors()->at(i); 4218 ASSERT(defining_block == predecessor_block || 4219 defining_block->Dominates(predecessor_block)); 4220 } 4221 } 4222 4223 4224 void HSimulate::Verify() { 4225 HInstruction::Verify(); 4226 ASSERT(HasAstId()); 4227 } 4228 4229 4230 void HCheckHeapObject::Verify() { 4231 HInstruction::Verify(); 4232 ASSERT(HasNoUses()); 4233 } 4234 4235 4236 void HCheckValue::Verify() { 4237 HInstruction::Verify(); 4238 ASSERT(HasNoUses()); 4239 } 4240 4241 #endif 4242 4243 4244 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) { 4245 ASSERT(offset >= 0); 4246 ASSERT(offset < FixedArray::kHeaderSize); 4247 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength(); 4248 return HObjectAccess(kInobject, offset); 4249 } 4250 4251 4252 HObjectAccess HObjectAccess::ForJSObjectOffset(int offset, 4253 Representation representation) { 4254 ASSERT(offset >= 0); 4255 Portion portion = kInobject; 4256 4257 if (offset == JSObject::kElementsOffset) { 4258 portion = kElementsPointer; 4259 } else if (offset == JSObject::kMapOffset) { 4260 portion = kMaps; 4261 } 4262 return HObjectAccess(portion, offset, representation); 4263 } 4264 4265 4266 HObjectAccess HObjectAccess::ForContextSlot(int index) { 4267 ASSERT(index >= 0); 4268 Portion portion = kInobject; 4269 int offset = Context::kHeaderSize + index * kPointerSize; 4270 ASSERT_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag); 4271 return HObjectAccess(portion, offset, Representation::Tagged()); 4272 } 4273 4274 4275 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) { 4276 ASSERT(offset >= 0); 4277 Portion portion = kInobject; 4278 4279 if (offset == JSObject::kElementsOffset) { 4280 portion = kElementsPointer; 4281 } else if (offset == JSArray::kLengthOffset) { 4282 portion = kArrayLengths; 4283 } else if (offset == JSObject::kMapOffset) { 4284 portion = kMaps; 4285 } 4286 return HObjectAccess(portion, offset); 4287 } 4288 4289 4290 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset, 4291 Representation representation) { 4292 ASSERT(offset >= 0); 4293 return HObjectAccess(kBackingStore, offset, representation); 4294 } 4295 4296 4297 HObjectAccess HObjectAccess::ForField(Handle<Map> map, 4298 LookupResult *lookup, Handle<String> name) { 4299 ASSERT(lookup->IsField() || lookup->IsTransitionToField()); 4300 int index; 4301 Representation representation; 4302 if (lookup->IsField()) { 4303 index = lookup->GetLocalFieldIndexFromMap(*map); 4304 representation = lookup->representation(); 4305 } else { 4306 Map* transition = lookup->GetTransitionTarget(); 4307 int descriptor = transition->LastAdded(); 4308 index = transition->instance_descriptors()->GetFieldIndex(descriptor) - 4309 map->inobject_properties(); 4310 PropertyDetails details = 4311 transition->instance_descriptors()->GetDetails(descriptor); 4312 representation = details.representation(); 4313 } 4314 if (index < 0) { 4315 // Negative property indices are in-object properties, indexed 4316 // from the end of the fixed part of the object. 4317 int offset = (index * kPointerSize) + map->instance_size(); 4318 return HObjectAccess(kInobject, offset, representation); 4319 } else { 4320 // Non-negative property indices are in the properties array. 4321 int offset = (index * kPointerSize) + FixedArray::kHeaderSize; 4322 return HObjectAccess(kBackingStore, offset, representation, name); 4323 } 4324 } 4325 4326 4327 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) { 4328 return HObjectAccess( 4329 kInobject, Cell::kValueOffset, Representation::Tagged(), 4330 Handle<String>(isolate->heap()->cell_value_string())); 4331 } 4332 4333 4334 void HObjectAccess::SetGVNFlags(HValue *instr, bool is_store) { 4335 // set the appropriate GVN flags for a given load or store instruction 4336 if (is_store) { 4337 // track dominating allocations in order to eliminate write barriers 4338 instr->SetGVNFlag(kDependsOnNewSpacePromotion); 4339 instr->SetFlag(HValue::kTrackSideEffectDominators); 4340 } else { 4341 // try to GVN loads, but don't hoist above map changes 4342 instr->SetFlag(HValue::kUseGVN); 4343 instr->SetGVNFlag(kDependsOnMaps); 4344 } 4345 4346 switch (portion()) { 4347 case kArrayLengths: 4348 instr->SetGVNFlag(is_store 4349 ? kChangesArrayLengths : kDependsOnArrayLengths); 4350 break; 4351 case kStringLengths: 4352 instr->SetGVNFlag(is_store 4353 ? kChangesStringLengths : kDependsOnStringLengths); 4354 break; 4355 case kInobject: 4356 instr->SetGVNFlag(is_store 4357 ? kChangesInobjectFields : kDependsOnInobjectFields); 4358 break; 4359 case kDouble: 4360 instr->SetGVNFlag(is_store 4361 ? kChangesDoubleFields : kDependsOnDoubleFields); 4362 break; 4363 case kBackingStore: 4364 instr->SetGVNFlag(is_store 4365 ? kChangesBackingStoreFields : kDependsOnBackingStoreFields); 4366 break; 4367 case kElementsPointer: 4368 instr->SetGVNFlag(is_store 4369 ? kChangesElementsPointer : kDependsOnElementsPointer); 4370 break; 4371 case kMaps: 4372 instr->SetGVNFlag(is_store 4373 ? kChangesMaps : kDependsOnMaps); 4374 break; 4375 case kExternalMemory: 4376 instr->SetGVNFlag(is_store 4377 ? kChangesExternalMemory : kDependsOnExternalMemory); 4378 break; 4379 } 4380 } 4381 4382 4383 void HObjectAccess::PrintTo(StringStream* stream) { 4384 stream->Add("."); 4385 4386 switch (portion()) { 4387 case kArrayLengths: 4388 case kStringLengths: 4389 stream->Add("%length"); 4390 break; 4391 case kElementsPointer: 4392 stream->Add("%elements"); 4393 break; 4394 case kMaps: 4395 stream->Add("%map"); 4396 break; 4397 case kDouble: // fall through 4398 case kInobject: 4399 if (!name_.is_null()) stream->Add(*String::cast(*name_)->ToCString()); 4400 stream->Add("[in-object]"); 4401 break; 4402 case kBackingStore: 4403 if (!name_.is_null()) stream->Add(*String::cast(*name_)->ToCString()); 4404 stream->Add("[backing-store]"); 4405 break; 4406 case kExternalMemory: 4407 stream->Add("[external-memory]"); 4408 break; 4409 } 4410 4411 stream->Add("@%d", offset()); 4412 } 4413 4414 } } // namespace v8::internal 4415