1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/hydrogen.h" 6 7 #include <algorithm> 8 9 #include "src/v8.h" 10 11 #include "src/allocation-site-scopes.h" 12 #include "src/codegen.h" 13 #include "src/full-codegen.h" 14 #include "src/hashmap.h" 15 #include "src/hydrogen-bce.h" 16 #include "src/hydrogen-bch.h" 17 #include "src/hydrogen-canonicalize.h" 18 #include "src/hydrogen-check-elimination.h" 19 #include "src/hydrogen-dce.h" 20 #include "src/hydrogen-dehoist.h" 21 #include "src/hydrogen-environment-liveness.h" 22 #include "src/hydrogen-escape-analysis.h" 23 #include "src/hydrogen-gvn.h" 24 #include "src/hydrogen-infer-representation.h" 25 #include "src/hydrogen-infer-types.h" 26 #include "src/hydrogen-load-elimination.h" 27 #include "src/hydrogen-mark-deoptimize.h" 28 #include "src/hydrogen-mark-unreachable.h" 29 #include "src/hydrogen-osr.h" 30 #include "src/hydrogen-range-analysis.h" 31 #include "src/hydrogen-redundant-phi.h" 32 #include "src/hydrogen-removable-simulates.h" 33 #include "src/hydrogen-representation-changes.h" 34 #include "src/hydrogen-sce.h" 35 #include "src/hydrogen-store-elimination.h" 36 #include "src/hydrogen-uint32-analysis.h" 37 #include "src/ic/call-optimization.h" 38 #include "src/ic/ic.h" 39 // GetRootConstructor 40 #include "src/ic/ic-inl.h" 41 #include "src/lithium-allocator.h" 42 #include "src/parser.h" 43 #include "src/runtime.h" 44 #include "src/scopeinfo.h" 45 #include "src/scopes.h" 46 #include "src/typing.h" 47 48 #if V8_TARGET_ARCH_IA32 49 #include "src/ia32/lithium-codegen-ia32.h" // NOLINT 50 #elif V8_TARGET_ARCH_X64 51 #include "src/x64/lithium-codegen-x64.h" // NOLINT 52 #elif V8_TARGET_ARCH_ARM64 53 #include "src/arm64/lithium-codegen-arm64.h" // NOLINT 54 #elif V8_TARGET_ARCH_ARM 55 #include "src/arm/lithium-codegen-arm.h" // NOLINT 56 #elif V8_TARGET_ARCH_MIPS 57 #include "src/mips/lithium-codegen-mips.h" // NOLINT 58 #elif V8_TARGET_ARCH_MIPS64 59 #include "src/mips64/lithium-codegen-mips64.h" // NOLINT 60 #elif V8_TARGET_ARCH_X87 61 #include "src/x87/lithium-codegen-x87.h" // NOLINT 62 #else 63 #error Unsupported target architecture. 64 #endif 65 66 namespace v8 { 67 namespace internal { 68 69 HBasicBlock::HBasicBlock(HGraph* graph) 70 : block_id_(graph->GetNextBlockID()), 71 graph_(graph), 72 phis_(4, graph->zone()), 73 first_(NULL), 74 last_(NULL), 75 end_(NULL), 76 loop_information_(NULL), 77 predecessors_(2, graph->zone()), 78 dominator_(NULL), 79 dominated_blocks_(4, graph->zone()), 80 last_environment_(NULL), 81 argument_count_(-1), 82 first_instruction_index_(-1), 83 last_instruction_index_(-1), 84 deleted_phis_(4, graph->zone()), 85 parent_loop_header_(NULL), 86 inlined_entry_block_(NULL), 87 is_inline_return_target_(false), 88 is_reachable_(true), 89 dominates_loop_successors_(false), 90 is_osr_entry_(false), 91 is_ordered_(false) { } 92 93 94 Isolate* HBasicBlock::isolate() const { 95 return graph_->isolate(); 96 } 97 98 99 void HBasicBlock::MarkUnreachable() { 100 is_reachable_ = false; 101 } 102 103 104 void HBasicBlock::AttachLoopInformation() { 105 DCHECK(!IsLoopHeader()); 106 loop_information_ = new(zone()) HLoopInformation(this, zone()); 107 } 108 109 110 void HBasicBlock::DetachLoopInformation() { 111 DCHECK(IsLoopHeader()); 112 loop_information_ = NULL; 113 } 114 115 116 void HBasicBlock::AddPhi(HPhi* phi) { 117 DCHECK(!IsStartBlock()); 118 phis_.Add(phi, zone()); 119 phi->SetBlock(this); 120 } 121 122 123 void HBasicBlock::RemovePhi(HPhi* phi) { 124 DCHECK(phi->block() == this); 125 DCHECK(phis_.Contains(phi)); 126 phi->Kill(); 127 phis_.RemoveElement(phi); 128 phi->SetBlock(NULL); 129 } 130 131 132 void HBasicBlock::AddInstruction(HInstruction* instr, 133 HSourcePosition position) { 134 DCHECK(!IsStartBlock() || !IsFinished()); 135 DCHECK(!instr->IsLinked()); 136 DCHECK(!IsFinished()); 137 138 if (!position.IsUnknown()) { 139 instr->set_position(position); 140 } 141 if (first_ == NULL) { 142 DCHECK(last_environment() != NULL); 143 DCHECK(!last_environment()->ast_id().IsNone()); 144 HBlockEntry* entry = new(zone()) HBlockEntry(); 145 entry->InitializeAsFirst(this); 146 if (!position.IsUnknown()) { 147 entry->set_position(position); 148 } else { 149 DCHECK(!FLAG_hydrogen_track_positions || 150 !graph()->info()->IsOptimizing()); 151 } 152 first_ = last_ = entry; 153 } 154 instr->InsertAfter(last_); 155 } 156 157 158 HPhi* HBasicBlock::AddNewPhi(int merged_index) { 159 if (graph()->IsInsideNoSideEffectsScope()) { 160 merged_index = HPhi::kInvalidMergedIndex; 161 } 162 HPhi* phi = new(zone()) HPhi(merged_index, zone()); 163 AddPhi(phi); 164 return phi; 165 } 166 167 168 HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id, 169 RemovableSimulate removable) { 170 DCHECK(HasEnvironment()); 171 HEnvironment* environment = last_environment(); 172 DCHECK(ast_id.IsNone() || 173 ast_id == BailoutId::StubEntry() || 174 environment->closure()->shared()->VerifyBailoutId(ast_id)); 175 176 int push_count = environment->push_count(); 177 int pop_count = environment->pop_count(); 178 179 HSimulate* instr = 180 new(zone()) HSimulate(ast_id, pop_count, zone(), removable); 181 #ifdef DEBUG 182 instr->set_closure(environment->closure()); 183 #endif 184 // Order of pushed values: newest (top of stack) first. This allows 185 // HSimulate::MergeWith() to easily append additional pushed values 186 // that are older (from further down the stack). 187 for (int i = 0; i < push_count; ++i) { 188 instr->AddPushedValue(environment->ExpressionStackAt(i)); 189 } 190 for (GrowableBitVector::Iterator it(environment->assigned_variables(), 191 zone()); 192 !it.Done(); 193 it.Advance()) { 194 int index = it.Current(); 195 instr->AddAssignedValue(index, environment->Lookup(index)); 196 } 197 environment->ClearHistory(); 198 return instr; 199 } 200 201 202 void HBasicBlock::Finish(HControlInstruction* end, HSourcePosition position) { 203 DCHECK(!IsFinished()); 204 AddInstruction(end, position); 205 end_ = end; 206 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 207 it.Current()->RegisterPredecessor(this); 208 } 209 } 210 211 212 void HBasicBlock::Goto(HBasicBlock* block, 213 HSourcePosition position, 214 FunctionState* state, 215 bool add_simulate) { 216 bool drop_extra = state != NULL && 217 state->inlining_kind() == NORMAL_RETURN; 218 219 if (block->IsInlineReturnTarget()) { 220 HEnvironment* env = last_environment(); 221 int argument_count = env->arguments_environment()->parameter_count(); 222 AddInstruction(new(zone()) 223 HLeaveInlined(state->entry(), argument_count), 224 position); 225 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 226 } 227 228 if (add_simulate) AddNewSimulate(BailoutId::None(), position); 229 HGoto* instr = new(zone()) HGoto(block); 230 Finish(instr, position); 231 } 232 233 234 void HBasicBlock::AddLeaveInlined(HValue* return_value, 235 FunctionState* state, 236 HSourcePosition position) { 237 HBasicBlock* target = state->function_return(); 238 bool drop_extra = state->inlining_kind() == NORMAL_RETURN; 239 240 DCHECK(target->IsInlineReturnTarget()); 241 DCHECK(return_value != NULL); 242 HEnvironment* env = last_environment(); 243 int argument_count = env->arguments_environment()->parameter_count(); 244 AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count), 245 position); 246 UpdateEnvironment(last_environment()->DiscardInlined(drop_extra)); 247 last_environment()->Push(return_value); 248 AddNewSimulate(BailoutId::None(), position); 249 HGoto* instr = new(zone()) HGoto(target); 250 Finish(instr, position); 251 } 252 253 254 void HBasicBlock::SetInitialEnvironment(HEnvironment* env) { 255 DCHECK(!HasEnvironment()); 256 DCHECK(first() == NULL); 257 UpdateEnvironment(env); 258 } 259 260 261 void HBasicBlock::UpdateEnvironment(HEnvironment* env) { 262 last_environment_ = env; 263 graph()->update_maximum_environment_size(env->first_expression_index()); 264 } 265 266 267 void HBasicBlock::SetJoinId(BailoutId ast_id) { 268 int length = predecessors_.length(); 269 DCHECK(length > 0); 270 for (int i = 0; i < length; i++) { 271 HBasicBlock* predecessor = predecessors_[i]; 272 DCHECK(predecessor->end()->IsGoto()); 273 HSimulate* simulate = HSimulate::cast(predecessor->end()->previous()); 274 DCHECK(i != 0 || 275 (predecessor->last_environment()->closure().is_null() || 276 predecessor->last_environment()->closure()->shared() 277 ->VerifyBailoutId(ast_id))); 278 simulate->set_ast_id(ast_id); 279 predecessor->last_environment()->set_ast_id(ast_id); 280 } 281 } 282 283 284 bool HBasicBlock::Dominates(HBasicBlock* other) const { 285 HBasicBlock* current = other->dominator(); 286 while (current != NULL) { 287 if (current == this) return true; 288 current = current->dominator(); 289 } 290 return false; 291 } 292 293 294 bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const { 295 if (this == other) return true; 296 return Dominates(other); 297 } 298 299 300 int HBasicBlock::LoopNestingDepth() const { 301 const HBasicBlock* current = this; 302 int result = (current->IsLoopHeader()) ? 1 : 0; 303 while (current->parent_loop_header() != NULL) { 304 current = current->parent_loop_header(); 305 result++; 306 } 307 return result; 308 } 309 310 311 void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) { 312 DCHECK(IsLoopHeader()); 313 314 SetJoinId(stmt->EntryId()); 315 if (predecessors()->length() == 1) { 316 // This is a degenerated loop. 317 DetachLoopInformation(); 318 return; 319 } 320 321 // Only the first entry into the loop is from outside the loop. All other 322 // entries must be back edges. 323 for (int i = 1; i < predecessors()->length(); ++i) { 324 loop_information()->RegisterBackEdge(predecessors()->at(i)); 325 } 326 } 327 328 329 void HBasicBlock::MarkSuccEdgeUnreachable(int succ) { 330 DCHECK(IsFinished()); 331 HBasicBlock* succ_block = end()->SuccessorAt(succ); 332 333 DCHECK(succ_block->predecessors()->length() == 1); 334 succ_block->MarkUnreachable(); 335 } 336 337 338 void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) { 339 if (HasPredecessor()) { 340 // Only loop header blocks can have a predecessor added after 341 // instructions have been added to the block (they have phis for all 342 // values in the environment, these phis may be eliminated later). 343 DCHECK(IsLoopHeader() || first_ == NULL); 344 HEnvironment* incoming_env = pred->last_environment(); 345 if (IsLoopHeader()) { 346 DCHECK(phis()->length() == incoming_env->length()); 347 for (int i = 0; i < phis_.length(); ++i) { 348 phis_[i]->AddInput(incoming_env->values()->at(i)); 349 } 350 } else { 351 last_environment()->AddIncomingEdge(this, pred->last_environment()); 352 } 353 } else if (!HasEnvironment() && !IsFinished()) { 354 DCHECK(!IsLoopHeader()); 355 SetInitialEnvironment(pred->last_environment()->Copy()); 356 } 357 358 predecessors_.Add(pred, zone()); 359 } 360 361 362 void HBasicBlock::AddDominatedBlock(HBasicBlock* block) { 363 DCHECK(!dominated_blocks_.Contains(block)); 364 // Keep the list of dominated blocks sorted such that if there is two 365 // succeeding block in this list, the predecessor is before the successor. 366 int index = 0; 367 while (index < dominated_blocks_.length() && 368 dominated_blocks_[index]->block_id() < block->block_id()) { 369 ++index; 370 } 371 dominated_blocks_.InsertAt(index, block, zone()); 372 } 373 374 375 void HBasicBlock::AssignCommonDominator(HBasicBlock* other) { 376 if (dominator_ == NULL) { 377 dominator_ = other; 378 other->AddDominatedBlock(this); 379 } else if (other->dominator() != NULL) { 380 HBasicBlock* first = dominator_; 381 HBasicBlock* second = other; 382 383 while (first != second) { 384 if (first->block_id() > second->block_id()) { 385 first = first->dominator(); 386 } else { 387 second = second->dominator(); 388 } 389 DCHECK(first != NULL && second != NULL); 390 } 391 392 if (dominator_ != first) { 393 DCHECK(dominator_->dominated_blocks_.Contains(this)); 394 dominator_->dominated_blocks_.RemoveElement(this); 395 dominator_ = first; 396 first->AddDominatedBlock(this); 397 } 398 } 399 } 400 401 402 void HBasicBlock::AssignLoopSuccessorDominators() { 403 // Mark blocks that dominate all subsequent reachable blocks inside their 404 // loop. Exploit the fact that blocks are sorted in reverse post order. When 405 // the loop is visited in increasing block id order, if the number of 406 // non-loop-exiting successor edges at the dominator_candidate block doesn't 407 // exceed the number of previously encountered predecessor edges, there is no 408 // path from the loop header to any block with higher id that doesn't go 409 // through the dominator_candidate block. In this case, the 410 // dominator_candidate block is guaranteed to dominate all blocks reachable 411 // from it with higher ids. 412 HBasicBlock* last = loop_information()->GetLastBackEdge(); 413 int outstanding_successors = 1; // one edge from the pre-header 414 // Header always dominates everything. 415 MarkAsLoopSuccessorDominator(); 416 for (int j = block_id(); j <= last->block_id(); ++j) { 417 HBasicBlock* dominator_candidate = graph_->blocks()->at(j); 418 for (HPredecessorIterator it(dominator_candidate); !it.Done(); 419 it.Advance()) { 420 HBasicBlock* predecessor = it.Current(); 421 // Don't count back edges. 422 if (predecessor->block_id() < dominator_candidate->block_id()) { 423 outstanding_successors--; 424 } 425 } 426 427 // If more successors than predecessors have been seen in the loop up to 428 // now, it's not possible to guarantee that the current block dominates 429 // all of the blocks with higher IDs. In this case, assume conservatively 430 // that those paths through loop that don't go through the current block 431 // contain all of the loop's dependencies. Also be careful to record 432 // dominator information about the current loop that's being processed, 433 // and not nested loops, which will be processed when 434 // AssignLoopSuccessorDominators gets called on their header. 435 DCHECK(outstanding_successors >= 0); 436 HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header(); 437 if (outstanding_successors == 0 && 438 (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) { 439 dominator_candidate->MarkAsLoopSuccessorDominator(); 440 } 441 HControlInstruction* end = dominator_candidate->end(); 442 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 443 HBasicBlock* successor = it.Current(); 444 // Only count successors that remain inside the loop and don't loop back 445 // to a loop header. 446 if (successor->block_id() > dominator_candidate->block_id() && 447 successor->block_id() <= last->block_id()) { 448 // Backwards edges must land on loop headers. 449 DCHECK(successor->block_id() > dominator_candidate->block_id() || 450 successor->IsLoopHeader()); 451 outstanding_successors++; 452 } 453 } 454 } 455 } 456 457 458 int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const { 459 for (int i = 0; i < predecessors_.length(); ++i) { 460 if (predecessors_[i] == predecessor) return i; 461 } 462 UNREACHABLE(); 463 return -1; 464 } 465 466 467 #ifdef DEBUG 468 void HBasicBlock::Verify() { 469 // Check that every block is finished. 470 DCHECK(IsFinished()); 471 DCHECK(block_id() >= 0); 472 473 // Check that the incoming edges are in edge split form. 474 if (predecessors_.length() > 1) { 475 for (int i = 0; i < predecessors_.length(); ++i) { 476 DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL); 477 } 478 } 479 } 480 #endif 481 482 483 void HLoopInformation::RegisterBackEdge(HBasicBlock* block) { 484 this->back_edges_.Add(block, block->zone()); 485 AddBlock(block); 486 } 487 488 489 HBasicBlock* HLoopInformation::GetLastBackEdge() const { 490 int max_id = -1; 491 HBasicBlock* result = NULL; 492 for (int i = 0; i < back_edges_.length(); ++i) { 493 HBasicBlock* cur = back_edges_[i]; 494 if (cur->block_id() > max_id) { 495 max_id = cur->block_id(); 496 result = cur; 497 } 498 } 499 return result; 500 } 501 502 503 void HLoopInformation::AddBlock(HBasicBlock* block) { 504 if (block == loop_header()) return; 505 if (block->parent_loop_header() == loop_header()) return; 506 if (block->parent_loop_header() != NULL) { 507 AddBlock(block->parent_loop_header()); 508 } else { 509 block->set_parent_loop_header(loop_header()); 510 blocks_.Add(block, block->zone()); 511 for (int i = 0; i < block->predecessors()->length(); ++i) { 512 AddBlock(block->predecessors()->at(i)); 513 } 514 } 515 } 516 517 518 #ifdef DEBUG 519 520 // Checks reachability of the blocks in this graph and stores a bit in 521 // the BitVector "reachable()" for every block that can be reached 522 // from the start block of the graph. If "dont_visit" is non-null, the given 523 // block is treated as if it would not be part of the graph. "visited_count()" 524 // returns the number of reachable blocks. 525 class ReachabilityAnalyzer BASE_EMBEDDED { 526 public: 527 ReachabilityAnalyzer(HBasicBlock* entry_block, 528 int block_count, 529 HBasicBlock* dont_visit) 530 : visited_count_(0), 531 stack_(16, entry_block->zone()), 532 reachable_(block_count, entry_block->zone()), 533 dont_visit_(dont_visit) { 534 PushBlock(entry_block); 535 Analyze(); 536 } 537 538 int visited_count() const { return visited_count_; } 539 const BitVector* reachable() const { return &reachable_; } 540 541 private: 542 void PushBlock(HBasicBlock* block) { 543 if (block != NULL && block != dont_visit_ && 544 !reachable_.Contains(block->block_id())) { 545 reachable_.Add(block->block_id()); 546 stack_.Add(block, block->zone()); 547 visited_count_++; 548 } 549 } 550 551 void Analyze() { 552 while (!stack_.is_empty()) { 553 HControlInstruction* end = stack_.RemoveLast()->end(); 554 for (HSuccessorIterator it(end); !it.Done(); it.Advance()) { 555 PushBlock(it.Current()); 556 } 557 } 558 } 559 560 int visited_count_; 561 ZoneList<HBasicBlock*> stack_; 562 BitVector reachable_; 563 HBasicBlock* dont_visit_; 564 }; 565 566 567 void HGraph::Verify(bool do_full_verify) const { 568 Heap::RelocationLock relocation_lock(isolate()->heap()); 569 AllowHandleDereference allow_deref; 570 AllowDeferredHandleDereference allow_deferred_deref; 571 for (int i = 0; i < blocks_.length(); i++) { 572 HBasicBlock* block = blocks_.at(i); 573 574 block->Verify(); 575 576 // Check that every block contains at least one node and that only the last 577 // node is a control instruction. 578 HInstruction* current = block->first(); 579 DCHECK(current != NULL && current->IsBlockEntry()); 580 while (current != NULL) { 581 DCHECK((current->next() == NULL) == current->IsControlInstruction()); 582 DCHECK(current->block() == block); 583 current->Verify(); 584 current = current->next(); 585 } 586 587 // Check that successors are correctly set. 588 HBasicBlock* first = block->end()->FirstSuccessor(); 589 HBasicBlock* second = block->end()->SecondSuccessor(); 590 DCHECK(second == NULL || first != NULL); 591 592 // Check that the predecessor array is correct. 593 if (first != NULL) { 594 DCHECK(first->predecessors()->Contains(block)); 595 if (second != NULL) { 596 DCHECK(second->predecessors()->Contains(block)); 597 } 598 } 599 600 // Check that phis have correct arguments. 601 for (int j = 0; j < block->phis()->length(); j++) { 602 HPhi* phi = block->phis()->at(j); 603 phi->Verify(); 604 } 605 606 // Check that all join blocks have predecessors that end with an 607 // unconditional goto and agree on their environment node id. 608 if (block->predecessors()->length() >= 2) { 609 BailoutId id = 610 block->predecessors()->first()->last_environment()->ast_id(); 611 for (int k = 0; k < block->predecessors()->length(); k++) { 612 HBasicBlock* predecessor = block->predecessors()->at(k); 613 DCHECK(predecessor->end()->IsGoto() || 614 predecessor->end()->IsDeoptimize()); 615 DCHECK(predecessor->last_environment()->ast_id() == id); 616 } 617 } 618 } 619 620 // Check special property of first block to have no predecessors. 621 DCHECK(blocks_.at(0)->predecessors()->is_empty()); 622 623 if (do_full_verify) { 624 // Check that the graph is fully connected. 625 ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL); 626 DCHECK(analyzer.visited_count() == blocks_.length()); 627 628 // Check that entry block dominator is NULL. 629 DCHECK(entry_block_->dominator() == NULL); 630 631 // Check dominators. 632 for (int i = 0; i < blocks_.length(); ++i) { 633 HBasicBlock* block = blocks_.at(i); 634 if (block->dominator() == NULL) { 635 // Only start block may have no dominator assigned to. 636 DCHECK(i == 0); 637 } else { 638 // Assert that block is unreachable if dominator must not be visited. 639 ReachabilityAnalyzer dominator_analyzer(entry_block_, 640 blocks_.length(), 641 block->dominator()); 642 DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id())); 643 } 644 } 645 } 646 } 647 648 #endif 649 650 651 HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer, 652 int32_t value) { 653 if (!pointer->is_set()) { 654 // Can't pass GetInvalidContext() to HConstant::New, because that will 655 // recursively call GetConstant 656 HConstant* constant = HConstant::New(zone(), NULL, value); 657 constant->InsertAfter(entry_block()->first()); 658 pointer->set(constant); 659 return constant; 660 } 661 return ReinsertConstantIfNecessary(pointer->get()); 662 } 663 664 665 HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) { 666 if (!constant->IsLinked()) { 667 // The constant was removed from the graph. Reinsert. 668 constant->ClearFlag(HValue::kIsDead); 669 constant->InsertAfter(entry_block()->first()); 670 } 671 return constant; 672 } 673 674 675 HConstant* HGraph::GetConstant0() { 676 return GetConstant(&constant_0_, 0); 677 } 678 679 680 HConstant* HGraph::GetConstant1() { 681 return GetConstant(&constant_1_, 1); 682 } 683 684 685 HConstant* HGraph::GetConstantMinus1() { 686 return GetConstant(&constant_minus1_, -1); 687 } 688 689 690 #define DEFINE_GET_CONSTANT(Name, name, type, htype, boolean_value) \ 691 HConstant* HGraph::GetConstant##Name() { \ 692 if (!constant_##name##_.is_set()) { \ 693 HConstant* constant = new(zone()) HConstant( \ 694 Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \ 695 Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()), \ 696 false, \ 697 Representation::Tagged(), \ 698 htype, \ 699 true, \ 700 boolean_value, \ 701 false, \ 702 ODDBALL_TYPE); \ 703 constant->InsertAfter(entry_block()->first()); \ 704 constant_##name##_.set(constant); \ 705 } \ 706 return ReinsertConstantIfNecessary(constant_##name##_.get()); \ 707 } 708 709 710 DEFINE_GET_CONSTANT(Undefined, undefined, undefined, HType::Undefined(), false) 711 DEFINE_GET_CONSTANT(True, true, boolean, HType::Boolean(), true) 712 DEFINE_GET_CONSTANT(False, false, boolean, HType::Boolean(), false) 713 DEFINE_GET_CONSTANT(Hole, the_hole, the_hole, HType::None(), false) 714 DEFINE_GET_CONSTANT(Null, null, null, HType::Null(), false) 715 716 717 #undef DEFINE_GET_CONSTANT 718 719 #define DEFINE_IS_CONSTANT(Name, name) \ 720 bool HGraph::IsConstant##Name(HConstant* constant) { \ 721 return constant_##name##_.is_set() && constant == constant_##name##_.get(); \ 722 } 723 DEFINE_IS_CONSTANT(Undefined, undefined) 724 DEFINE_IS_CONSTANT(0, 0) 725 DEFINE_IS_CONSTANT(1, 1) 726 DEFINE_IS_CONSTANT(Minus1, minus1) 727 DEFINE_IS_CONSTANT(True, true) 728 DEFINE_IS_CONSTANT(False, false) 729 DEFINE_IS_CONSTANT(Hole, the_hole) 730 DEFINE_IS_CONSTANT(Null, null) 731 732 #undef DEFINE_IS_CONSTANT 733 734 735 HConstant* HGraph::GetInvalidContext() { 736 return GetConstant(&constant_invalid_context_, 0xFFFFC0C7); 737 } 738 739 740 bool HGraph::IsStandardConstant(HConstant* constant) { 741 if (IsConstantUndefined(constant)) return true; 742 if (IsConstant0(constant)) return true; 743 if (IsConstant1(constant)) return true; 744 if (IsConstantMinus1(constant)) return true; 745 if (IsConstantTrue(constant)) return true; 746 if (IsConstantFalse(constant)) return true; 747 if (IsConstantHole(constant)) return true; 748 if (IsConstantNull(constant)) return true; 749 return false; 750 } 751 752 753 HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {} 754 755 756 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder) 757 : needs_compare_(true) { 758 Initialize(builder); 759 } 760 761 762 HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder, 763 HIfContinuation* continuation) 764 : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) { 765 InitializeDontCreateBlocks(builder); 766 continuation->Continue(&first_true_block_, &first_false_block_); 767 } 768 769 770 void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks( 771 HGraphBuilder* builder) { 772 builder_ = builder; 773 finished_ = false; 774 did_then_ = false; 775 did_else_ = false; 776 did_else_if_ = false; 777 did_and_ = false; 778 did_or_ = false; 779 captured_ = false; 780 pending_merge_block_ = false; 781 split_edge_merge_block_ = NULL; 782 merge_at_join_blocks_ = NULL; 783 normal_merge_at_join_block_count_ = 0; 784 deopt_merge_at_join_block_count_ = 0; 785 } 786 787 788 void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) { 789 InitializeDontCreateBlocks(builder); 790 HEnvironment* env = builder->environment(); 791 first_true_block_ = builder->CreateBasicBlock(env->Copy()); 792 first_false_block_ = builder->CreateBasicBlock(env->Copy()); 793 } 794 795 796 HControlInstruction* HGraphBuilder::IfBuilder::AddCompare( 797 HControlInstruction* compare) { 798 DCHECK(did_then_ == did_else_); 799 if (did_else_) { 800 // Handle if-then-elseif 801 did_else_if_ = true; 802 did_else_ = false; 803 did_then_ = false; 804 did_and_ = false; 805 did_or_ = false; 806 pending_merge_block_ = false; 807 split_edge_merge_block_ = NULL; 808 HEnvironment* env = builder()->environment(); 809 first_true_block_ = builder()->CreateBasicBlock(env->Copy()); 810 first_false_block_ = builder()->CreateBasicBlock(env->Copy()); 811 } 812 if (split_edge_merge_block_ != NULL) { 813 HEnvironment* env = first_false_block_->last_environment(); 814 HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy()); 815 if (did_or_) { 816 compare->SetSuccessorAt(0, split_edge); 817 compare->SetSuccessorAt(1, first_false_block_); 818 } else { 819 compare->SetSuccessorAt(0, first_true_block_); 820 compare->SetSuccessorAt(1, split_edge); 821 } 822 builder()->GotoNoSimulate(split_edge, split_edge_merge_block_); 823 } else { 824 compare->SetSuccessorAt(0, first_true_block_); 825 compare->SetSuccessorAt(1, first_false_block_); 826 } 827 builder()->FinishCurrentBlock(compare); 828 needs_compare_ = false; 829 return compare; 830 } 831 832 833 void HGraphBuilder::IfBuilder::Or() { 834 DCHECK(!needs_compare_); 835 DCHECK(!did_and_); 836 did_or_ = true; 837 HEnvironment* env = first_false_block_->last_environment(); 838 if (split_edge_merge_block_ == NULL) { 839 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy()); 840 builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_); 841 first_true_block_ = split_edge_merge_block_; 842 } 843 builder()->set_current_block(first_false_block_); 844 first_false_block_ = builder()->CreateBasicBlock(env->Copy()); 845 } 846 847 848 void HGraphBuilder::IfBuilder::And() { 849 DCHECK(!needs_compare_); 850 DCHECK(!did_or_); 851 did_and_ = true; 852 HEnvironment* env = first_false_block_->last_environment(); 853 if (split_edge_merge_block_ == NULL) { 854 split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy()); 855 builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_); 856 first_false_block_ = split_edge_merge_block_; 857 } 858 builder()->set_current_block(first_true_block_); 859 first_true_block_ = builder()->CreateBasicBlock(env->Copy()); 860 } 861 862 863 void HGraphBuilder::IfBuilder::CaptureContinuation( 864 HIfContinuation* continuation) { 865 DCHECK(!did_else_if_); 866 DCHECK(!finished_); 867 DCHECK(!captured_); 868 869 HBasicBlock* true_block = NULL; 870 HBasicBlock* false_block = NULL; 871 Finish(&true_block, &false_block); 872 DCHECK(true_block != NULL); 873 DCHECK(false_block != NULL); 874 continuation->Capture(true_block, false_block); 875 captured_ = true; 876 builder()->set_current_block(NULL); 877 End(); 878 } 879 880 881 void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) { 882 DCHECK(!did_else_if_); 883 DCHECK(!finished_); 884 DCHECK(!captured_); 885 HBasicBlock* true_block = NULL; 886 HBasicBlock* false_block = NULL; 887 Finish(&true_block, &false_block); 888 merge_at_join_blocks_ = NULL; 889 if (true_block != NULL && !true_block->IsFinished()) { 890 DCHECK(continuation->IsTrueReachable()); 891 builder()->GotoNoSimulate(true_block, continuation->true_branch()); 892 } 893 if (false_block != NULL && !false_block->IsFinished()) { 894 DCHECK(continuation->IsFalseReachable()); 895 builder()->GotoNoSimulate(false_block, continuation->false_branch()); 896 } 897 captured_ = true; 898 End(); 899 } 900 901 902 void HGraphBuilder::IfBuilder::Then() { 903 DCHECK(!captured_); 904 DCHECK(!finished_); 905 did_then_ = true; 906 if (needs_compare_) { 907 // Handle if's without any expressions, they jump directly to the "else" 908 // branch. However, we must pretend that the "then" branch is reachable, 909 // so that the graph builder visits it and sees any live range extending 910 // constructs within it. 911 HConstant* constant_false = builder()->graph()->GetConstantFalse(); 912 ToBooleanStub::Types boolean_type = ToBooleanStub::Types(); 913 boolean_type.Add(ToBooleanStub::BOOLEAN); 914 HBranch* branch = builder()->New<HBranch>( 915 constant_false, boolean_type, first_true_block_, first_false_block_); 916 builder()->FinishCurrentBlock(branch); 917 } 918 builder()->set_current_block(first_true_block_); 919 pending_merge_block_ = true; 920 } 921 922 923 void HGraphBuilder::IfBuilder::Else() { 924 DCHECK(did_then_); 925 DCHECK(!captured_); 926 DCHECK(!finished_); 927 AddMergeAtJoinBlock(false); 928 builder()->set_current_block(first_false_block_); 929 pending_merge_block_ = true; 930 did_else_ = true; 931 } 932 933 934 void HGraphBuilder::IfBuilder::Deopt(const char* reason) { 935 DCHECK(did_then_); 936 builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER); 937 AddMergeAtJoinBlock(true); 938 } 939 940 941 void HGraphBuilder::IfBuilder::Return(HValue* value) { 942 HValue* parameter_count = builder()->graph()->GetConstantMinus1(); 943 builder()->FinishExitCurrentBlock( 944 builder()->New<HReturn>(value, parameter_count)); 945 AddMergeAtJoinBlock(false); 946 } 947 948 949 void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) { 950 if (!pending_merge_block_) return; 951 HBasicBlock* block = builder()->current_block(); 952 DCHECK(block == NULL || !block->IsFinished()); 953 MergeAtJoinBlock* record = new (builder()->zone()) 954 MergeAtJoinBlock(block, deopt, merge_at_join_blocks_); 955 merge_at_join_blocks_ = record; 956 if (block != NULL) { 957 DCHECK(block->end() == NULL); 958 if (deopt) { 959 normal_merge_at_join_block_count_++; 960 } else { 961 deopt_merge_at_join_block_count_++; 962 } 963 } 964 builder()->set_current_block(NULL); 965 pending_merge_block_ = false; 966 } 967 968 969 void HGraphBuilder::IfBuilder::Finish() { 970 DCHECK(!finished_); 971 if (!did_then_) { 972 Then(); 973 } 974 AddMergeAtJoinBlock(false); 975 if (!did_else_) { 976 Else(); 977 AddMergeAtJoinBlock(false); 978 } 979 finished_ = true; 980 } 981 982 983 void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation, 984 HBasicBlock** else_continuation) { 985 Finish(); 986 987 MergeAtJoinBlock* else_record = merge_at_join_blocks_; 988 if (else_continuation != NULL) { 989 *else_continuation = else_record->block_; 990 } 991 MergeAtJoinBlock* then_record = else_record->next_; 992 if (then_continuation != NULL) { 993 *then_continuation = then_record->block_; 994 } 995 DCHECK(then_record->next_ == NULL); 996 } 997 998 999 void HGraphBuilder::IfBuilder::End() { 1000 if (captured_) return; 1001 Finish(); 1002 1003 int total_merged_blocks = normal_merge_at_join_block_count_ + 1004 deopt_merge_at_join_block_count_; 1005 DCHECK(total_merged_blocks >= 1); 1006 HBasicBlock* merge_block = 1007 total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock(); 1008 1009 // Merge non-deopt blocks first to ensure environment has right size for 1010 // padding. 1011 MergeAtJoinBlock* current = merge_at_join_blocks_; 1012 while (current != NULL) { 1013 if (!current->deopt_ && current->block_ != NULL) { 1014 // If there is only one block that makes it through to the end of the 1015 // if, then just set it as the current block and continue rather then 1016 // creating an unnecessary merge block. 1017 if (total_merged_blocks == 1) { 1018 builder()->set_current_block(current->block_); 1019 return; 1020 } 1021 builder()->GotoNoSimulate(current->block_, merge_block); 1022 } 1023 current = current->next_; 1024 } 1025 1026 // Merge deopt blocks, padding when necessary. 1027 current = merge_at_join_blocks_; 1028 while (current != NULL) { 1029 if (current->deopt_ && current->block_ != NULL) { 1030 current->block_->FinishExit(HAbnormalExit::New(builder()->zone(), NULL), 1031 HSourcePosition::Unknown()); 1032 } 1033 current = current->next_; 1034 } 1035 builder()->set_current_block(merge_block); 1036 } 1037 1038 1039 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) { 1040 Initialize(builder, NULL, kWhileTrue, NULL); 1041 } 1042 1043 1044 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context, 1045 LoopBuilder::Direction direction) { 1046 Initialize(builder, context, direction, builder->graph()->GetConstant1()); 1047 } 1048 1049 1050 HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context, 1051 LoopBuilder::Direction direction, 1052 HValue* increment_amount) { 1053 Initialize(builder, context, direction, increment_amount); 1054 increment_amount_ = increment_amount; 1055 } 1056 1057 1058 void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder, 1059 HValue* context, 1060 Direction direction, 1061 HValue* increment_amount) { 1062 builder_ = builder; 1063 context_ = context; 1064 direction_ = direction; 1065 increment_amount_ = increment_amount; 1066 1067 finished_ = false; 1068 header_block_ = builder->CreateLoopHeaderBlock(); 1069 body_block_ = NULL; 1070 exit_block_ = NULL; 1071 exit_trampoline_block_ = NULL; 1072 } 1073 1074 1075 HValue* HGraphBuilder::LoopBuilder::BeginBody( 1076 HValue* initial, 1077 HValue* terminating, 1078 Token::Value token) { 1079 DCHECK(direction_ != kWhileTrue); 1080 HEnvironment* env = builder_->environment(); 1081 phi_ = header_block_->AddNewPhi(env->values()->length()); 1082 phi_->AddInput(initial); 1083 env->Push(initial); 1084 builder_->GotoNoSimulate(header_block_); 1085 1086 HEnvironment* body_env = env->Copy(); 1087 HEnvironment* exit_env = env->Copy(); 1088 // Remove the phi from the expression stack 1089 body_env->Pop(); 1090 exit_env->Pop(); 1091 body_block_ = builder_->CreateBasicBlock(body_env); 1092 exit_block_ = builder_->CreateBasicBlock(exit_env); 1093 1094 builder_->set_current_block(header_block_); 1095 env->Pop(); 1096 builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>( 1097 phi_, terminating, token, body_block_, exit_block_)); 1098 1099 builder_->set_current_block(body_block_); 1100 if (direction_ == kPreIncrement || direction_ == kPreDecrement) { 1101 HValue* one = builder_->graph()->GetConstant1(); 1102 if (direction_ == kPreIncrement) { 1103 increment_ = HAdd::New(zone(), context_, phi_, one); 1104 } else { 1105 increment_ = HSub::New(zone(), context_, phi_, one); 1106 } 1107 increment_->ClearFlag(HValue::kCanOverflow); 1108 builder_->AddInstruction(increment_); 1109 return increment_; 1110 } else { 1111 return phi_; 1112 } 1113 } 1114 1115 1116 void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) { 1117 DCHECK(direction_ == kWhileTrue); 1118 HEnvironment* env = builder_->environment(); 1119 builder_->GotoNoSimulate(header_block_); 1120 builder_->set_current_block(header_block_); 1121 env->Drop(drop_count); 1122 } 1123 1124 1125 void HGraphBuilder::LoopBuilder::Break() { 1126 if (exit_trampoline_block_ == NULL) { 1127 // Its the first time we saw a break. 1128 if (direction_ == kWhileTrue) { 1129 HEnvironment* env = builder_->environment()->Copy(); 1130 exit_trampoline_block_ = builder_->CreateBasicBlock(env); 1131 } else { 1132 HEnvironment* env = exit_block_->last_environment()->Copy(); 1133 exit_trampoline_block_ = builder_->CreateBasicBlock(env); 1134 builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_); 1135 } 1136 } 1137 1138 builder_->GotoNoSimulate(exit_trampoline_block_); 1139 builder_->set_current_block(NULL); 1140 } 1141 1142 1143 void HGraphBuilder::LoopBuilder::EndBody() { 1144 DCHECK(!finished_); 1145 1146 if (direction_ == kPostIncrement || direction_ == kPostDecrement) { 1147 if (direction_ == kPostIncrement) { 1148 increment_ = HAdd::New(zone(), context_, phi_, increment_amount_); 1149 } else { 1150 increment_ = HSub::New(zone(), context_, phi_, increment_amount_); 1151 } 1152 increment_->ClearFlag(HValue::kCanOverflow); 1153 builder_->AddInstruction(increment_); 1154 } 1155 1156 if (direction_ != kWhileTrue) { 1157 // Push the new increment value on the expression stack to merge into 1158 // the phi. 1159 builder_->environment()->Push(increment_); 1160 } 1161 HBasicBlock* last_block = builder_->current_block(); 1162 builder_->GotoNoSimulate(last_block, header_block_); 1163 header_block_->loop_information()->RegisterBackEdge(last_block); 1164 1165 if (exit_trampoline_block_ != NULL) { 1166 builder_->set_current_block(exit_trampoline_block_); 1167 } else { 1168 builder_->set_current_block(exit_block_); 1169 } 1170 finished_ = true; 1171 } 1172 1173 1174 HGraph* HGraphBuilder::CreateGraph() { 1175 graph_ = new(zone()) HGraph(info_); 1176 if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_); 1177 CompilationPhase phase("H_Block building", info_); 1178 set_current_block(graph()->entry_block()); 1179 if (!BuildGraph()) return NULL; 1180 graph()->FinalizeUniqueness(); 1181 return graph_; 1182 } 1183 1184 1185 HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) { 1186 DCHECK(current_block() != NULL); 1187 DCHECK(!FLAG_hydrogen_track_positions || 1188 !position_.IsUnknown() || 1189 !info_->IsOptimizing()); 1190 current_block()->AddInstruction(instr, source_position()); 1191 if (graph()->IsInsideNoSideEffectsScope()) { 1192 instr->SetFlag(HValue::kHasNoObservableSideEffects); 1193 } 1194 return instr; 1195 } 1196 1197 1198 void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) { 1199 DCHECK(!FLAG_hydrogen_track_positions || 1200 !info_->IsOptimizing() || 1201 !position_.IsUnknown()); 1202 current_block()->Finish(last, source_position()); 1203 if (last->IsReturn() || last->IsAbnormalExit()) { 1204 set_current_block(NULL); 1205 } 1206 } 1207 1208 1209 void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) { 1210 DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() || 1211 !position_.IsUnknown()); 1212 current_block()->FinishExit(instruction, source_position()); 1213 if (instruction->IsReturn() || instruction->IsAbnormalExit()) { 1214 set_current_block(NULL); 1215 } 1216 } 1217 1218 1219 void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) { 1220 if (FLAG_native_code_counters && counter->Enabled()) { 1221 HValue* reference = Add<HConstant>(ExternalReference(counter)); 1222 HValue* old_value = Add<HLoadNamedField>( 1223 reference, static_cast<HValue*>(NULL), HObjectAccess::ForCounter()); 1224 HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1()); 1225 new_value->ClearFlag(HValue::kCanOverflow); // Ignore counter overflow 1226 Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(), 1227 new_value, STORE_TO_INITIALIZED_ENTRY); 1228 } 1229 } 1230 1231 1232 void HGraphBuilder::AddSimulate(BailoutId id, 1233 RemovableSimulate removable) { 1234 DCHECK(current_block() != NULL); 1235 DCHECK(!graph()->IsInsideNoSideEffectsScope()); 1236 current_block()->AddNewSimulate(id, source_position(), removable); 1237 } 1238 1239 1240 HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) { 1241 HBasicBlock* b = graph()->CreateBasicBlock(); 1242 b->SetInitialEnvironment(env); 1243 return b; 1244 } 1245 1246 1247 HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() { 1248 HBasicBlock* header = graph()->CreateBasicBlock(); 1249 HEnvironment* entry_env = environment()->CopyAsLoopHeader(header); 1250 header->SetInitialEnvironment(entry_env); 1251 header->AttachLoopInformation(); 1252 return header; 1253 } 1254 1255 1256 HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) { 1257 HValue* map = Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), 1258 HObjectAccess::ForMap()); 1259 1260 HValue* bit_field2 = Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), 1261 HObjectAccess::ForMapBitField2()); 1262 return BuildDecodeField<Map::ElementsKindBits>(bit_field2); 1263 } 1264 1265 1266 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) { 1267 if (obj->type().IsHeapObject()) return obj; 1268 return Add<HCheckHeapObject>(obj); 1269 } 1270 1271 1272 void HGraphBuilder::FinishExitWithHardDeoptimization(const char* reason) { 1273 Add<HDeoptimize>(reason, Deoptimizer::EAGER); 1274 FinishExitCurrentBlock(New<HAbnormalExit>()); 1275 } 1276 1277 1278 HValue* HGraphBuilder::BuildCheckString(HValue* string) { 1279 if (!string->type().IsString()) { 1280 DCHECK(!string->IsConstant() || 1281 !HConstant::cast(string)->HasStringValue()); 1282 BuildCheckHeapObject(string); 1283 return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING); 1284 } 1285 return string; 1286 } 1287 1288 1289 HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) { 1290 if (object->type().IsJSObject()) return object; 1291 if (function->IsConstant() && 1292 HConstant::cast(function)->handle(isolate())->IsJSFunction()) { 1293 Handle<JSFunction> f = Handle<JSFunction>::cast( 1294 HConstant::cast(function)->handle(isolate())); 1295 SharedFunctionInfo* shared = f->shared(); 1296 if (shared->strict_mode() == STRICT || shared->native()) return object; 1297 } 1298 return Add<HWrapReceiver>(object, function); 1299 } 1300 1301 1302 HValue* HGraphBuilder::BuildCheckForCapacityGrow( 1303 HValue* object, 1304 HValue* elements, 1305 ElementsKind kind, 1306 HValue* length, 1307 HValue* key, 1308 bool is_js_array, 1309 PropertyAccessType access_type) { 1310 IfBuilder length_checker(this); 1311 1312 Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ; 1313 length_checker.If<HCompareNumericAndBranch>(key, length, token); 1314 1315 length_checker.Then(); 1316 1317 HValue* current_capacity = AddLoadFixedArrayLength(elements); 1318 1319 IfBuilder capacity_checker(this); 1320 1321 capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity, 1322 Token::GTE); 1323 capacity_checker.Then(); 1324 1325 HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap)); 1326 HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap); 1327 1328 Add<HBoundsCheck>(key, max_capacity); 1329 1330 HValue* new_capacity = BuildNewElementsCapacity(key); 1331 HValue* new_elements = BuildGrowElementsCapacity(object, elements, 1332 kind, kind, length, 1333 new_capacity); 1334 1335 environment()->Push(new_elements); 1336 capacity_checker.Else(); 1337 1338 environment()->Push(elements); 1339 capacity_checker.End(); 1340 1341 if (is_js_array) { 1342 HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1()); 1343 new_length->ClearFlag(HValue::kCanOverflow); 1344 1345 Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind), 1346 new_length); 1347 } 1348 1349 if (access_type == STORE && kind == FAST_SMI_ELEMENTS) { 1350 HValue* checked_elements = environment()->Top(); 1351 1352 // Write zero to ensure that the new element is initialized with some smi. 1353 Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), kind); 1354 } 1355 1356 length_checker.Else(); 1357 Add<HBoundsCheck>(key, length); 1358 1359 environment()->Push(elements); 1360 length_checker.End(); 1361 1362 return environment()->Pop(); 1363 } 1364 1365 1366 HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object, 1367 HValue* elements, 1368 ElementsKind kind, 1369 HValue* length) { 1370 Factory* factory = isolate()->factory(); 1371 1372 IfBuilder cow_checker(this); 1373 1374 cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map()); 1375 cow_checker.Then(); 1376 1377 HValue* capacity = AddLoadFixedArrayLength(elements); 1378 1379 HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, 1380 kind, length, capacity); 1381 1382 environment()->Push(new_elements); 1383 1384 cow_checker.Else(); 1385 1386 environment()->Push(elements); 1387 1388 cow_checker.End(); 1389 1390 return environment()->Pop(); 1391 } 1392 1393 1394 void HGraphBuilder::BuildTransitionElementsKind(HValue* object, 1395 HValue* map, 1396 ElementsKind from_kind, 1397 ElementsKind to_kind, 1398 bool is_jsarray) { 1399 DCHECK(!IsFastHoleyElementsKind(from_kind) || 1400 IsFastHoleyElementsKind(to_kind)); 1401 1402 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) { 1403 Add<HTrapAllocationMemento>(object); 1404 } 1405 1406 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) { 1407 HInstruction* elements = AddLoadElements(object); 1408 1409 HInstruction* empty_fixed_array = Add<HConstant>( 1410 isolate()->factory()->empty_fixed_array()); 1411 1412 IfBuilder if_builder(this); 1413 1414 if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array); 1415 1416 if_builder.Then(); 1417 1418 HInstruction* elements_length = AddLoadFixedArrayLength(elements); 1419 1420 HInstruction* array_length = is_jsarray 1421 ? Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), 1422 HObjectAccess::ForArrayLength(from_kind)) 1423 : elements_length; 1424 1425 BuildGrowElementsCapacity(object, elements, from_kind, to_kind, 1426 array_length, elements_length); 1427 1428 if_builder.End(); 1429 } 1430 1431 Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map); 1432 } 1433 1434 1435 void HGraphBuilder::BuildJSObjectCheck(HValue* receiver, 1436 int bit_field_mask) { 1437 // Check that the object isn't a smi. 1438 Add<HCheckHeapObject>(receiver); 1439 1440 // Get the map of the receiver. 1441 HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL), 1442 HObjectAccess::ForMap()); 1443 1444 // Check the instance type and if an access check is needed, this can be 1445 // done with a single load, since both bytes are adjacent in the map. 1446 HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField()); 1447 HValue* instance_type_and_bit_field = 1448 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), access); 1449 1450 HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8)); 1451 HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND, 1452 instance_type_and_bit_field, 1453 mask); 1454 HValue* sub_result = AddUncasted<HSub>(and_result, 1455 Add<HConstant>(JS_OBJECT_TYPE)); 1456 Add<HBoundsCheck>(sub_result, 1457 Add<HConstant>(LAST_JS_OBJECT_TYPE + 1 - JS_OBJECT_TYPE)); 1458 } 1459 1460 1461 void HGraphBuilder::BuildKeyedIndexCheck(HValue* key, 1462 HIfContinuation* join_continuation) { 1463 // The sometimes unintuitively backward ordering of the ifs below is 1464 // convoluted, but necessary. All of the paths must guarantee that the 1465 // if-true of the continuation returns a smi element index and the if-false of 1466 // the continuation returns either a symbol or a unique string key. All other 1467 // object types cause a deopt to fall back to the runtime. 1468 1469 IfBuilder key_smi_if(this); 1470 key_smi_if.If<HIsSmiAndBranch>(key); 1471 key_smi_if.Then(); 1472 { 1473 Push(key); // Nothing to do, just continue to true of continuation. 1474 } 1475 key_smi_if.Else(); 1476 { 1477 HValue* map = Add<HLoadNamedField>(key, static_cast<HValue*>(NULL), 1478 HObjectAccess::ForMap()); 1479 HValue* instance_type = 1480 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), 1481 HObjectAccess::ForMapInstanceType()); 1482 1483 // Non-unique string, check for a string with a hash code that is actually 1484 // an index. 1485 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE); 1486 IfBuilder not_string_or_name_if(this); 1487 not_string_or_name_if.If<HCompareNumericAndBranch>( 1488 instance_type, 1489 Add<HConstant>(LAST_UNIQUE_NAME_TYPE), 1490 Token::GT); 1491 1492 not_string_or_name_if.Then(); 1493 { 1494 // Non-smi, non-Name, non-String: Try to convert to smi in case of 1495 // HeapNumber. 1496 // TODO(danno): This could call some variant of ToString 1497 Push(AddUncasted<HForceRepresentation>(key, Representation::Smi())); 1498 } 1499 not_string_or_name_if.Else(); 1500 { 1501 // String or Name: check explicitly for Name, they can short-circuit 1502 // directly to unique non-index key path. 1503 IfBuilder not_symbol_if(this); 1504 not_symbol_if.If<HCompareNumericAndBranch>( 1505 instance_type, 1506 Add<HConstant>(SYMBOL_TYPE), 1507 Token::NE); 1508 1509 not_symbol_if.Then(); 1510 { 1511 // String: check whether the String is a String of an index. If it is, 1512 // extract the index value from the hash. 1513 HValue* hash = 1514 Add<HLoadNamedField>(key, static_cast<HValue*>(NULL), 1515 HObjectAccess::ForNameHashField()); 1516 HValue* not_index_mask = Add<HConstant>(static_cast<int>( 1517 String::kContainsCachedArrayIndexMask)); 1518 1519 HValue* not_index_test = AddUncasted<HBitwise>( 1520 Token::BIT_AND, hash, not_index_mask); 1521 1522 IfBuilder string_index_if(this); 1523 string_index_if.If<HCompareNumericAndBranch>(not_index_test, 1524 graph()->GetConstant0(), 1525 Token::EQ); 1526 string_index_if.Then(); 1527 { 1528 // String with index in hash: extract string and merge to index path. 1529 Push(BuildDecodeField<String::ArrayIndexValueBits>(hash)); 1530 } 1531 string_index_if.Else(); 1532 { 1533 // Key is a non-index String, check for uniqueness/internalization. 1534 // If it's not internalized yet, internalize it now. 1535 HValue* not_internalized_bit = AddUncasted<HBitwise>( 1536 Token::BIT_AND, 1537 instance_type, 1538 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask))); 1539 1540 IfBuilder internalized(this); 1541 internalized.If<HCompareNumericAndBranch>(not_internalized_bit, 1542 graph()->GetConstant0(), 1543 Token::EQ); 1544 internalized.Then(); 1545 Push(key); 1546 1547 internalized.Else(); 1548 Add<HPushArguments>(key); 1549 HValue* intern_key = Add<HCallRuntime>( 1550 isolate()->factory()->empty_string(), 1551 Runtime::FunctionForId(Runtime::kInternalizeString), 1); 1552 Push(intern_key); 1553 1554 internalized.End(); 1555 // Key guaranteed to be a unique string 1556 } 1557 string_index_if.JoinContinuation(join_continuation); 1558 } 1559 not_symbol_if.Else(); 1560 { 1561 Push(key); // Key is symbol 1562 } 1563 not_symbol_if.JoinContinuation(join_continuation); 1564 } 1565 not_string_or_name_if.JoinContinuation(join_continuation); 1566 } 1567 key_smi_if.JoinContinuation(join_continuation); 1568 } 1569 1570 1571 void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) { 1572 // Get the the instance type of the receiver, and make sure that it is 1573 // not one of the global object types. 1574 HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL), 1575 HObjectAccess::ForMap()); 1576 HValue* instance_type = 1577 Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), 1578 HObjectAccess::ForMapInstanceType()); 1579 STATIC_ASSERT(JS_BUILTINS_OBJECT_TYPE == JS_GLOBAL_OBJECT_TYPE + 1); 1580 HValue* min_global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE); 1581 HValue* max_global_type = Add<HConstant>(JS_BUILTINS_OBJECT_TYPE); 1582 1583 IfBuilder if_global_object(this); 1584 if_global_object.If<HCompareNumericAndBranch>(instance_type, 1585 max_global_type, 1586 Token::LTE); 1587 if_global_object.And(); 1588 if_global_object.If<HCompareNumericAndBranch>(instance_type, 1589 min_global_type, 1590 Token::GTE); 1591 if_global_object.ThenDeopt("receiver was a global object"); 1592 if_global_object.End(); 1593 } 1594 1595 1596 void HGraphBuilder::BuildTestForDictionaryProperties( 1597 HValue* object, 1598 HIfContinuation* continuation) { 1599 HValue* properties = Add<HLoadNamedField>( 1600 object, static_cast<HValue*>(NULL), 1601 HObjectAccess::ForPropertiesPointer()); 1602 HValue* properties_map = 1603 Add<HLoadNamedField>(properties, static_cast<HValue*>(NULL), 1604 HObjectAccess::ForMap()); 1605 HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex); 1606 IfBuilder builder(this); 1607 builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map); 1608 builder.CaptureContinuation(continuation); 1609 } 1610 1611 1612 HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object, 1613 HValue* key) { 1614 // Load the map of the receiver, compute the keyed lookup cache hash 1615 // based on 32 bits of the map pointer and the string hash. 1616 HValue* object_map = 1617 Add<HLoadNamedField>(object, static_cast<HValue*>(NULL), 1618 HObjectAccess::ForMapAsInteger32()); 1619 HValue* shifted_map = AddUncasted<HShr>( 1620 object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift)); 1621 HValue* string_hash = 1622 Add<HLoadNamedField>(key, static_cast<HValue*>(NULL), 1623 HObjectAccess::ForStringHashField()); 1624 HValue* shifted_hash = AddUncasted<HShr>( 1625 string_hash, Add<HConstant>(String::kHashShift)); 1626 HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map, 1627 shifted_hash); 1628 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); 1629 return AddUncasted<HBitwise>(Token::BIT_AND, xor_result, 1630 Add<HConstant>(mask)); 1631 } 1632 1633 1634 HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) { 1635 int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed()); 1636 HValue* seed = Add<HConstant>(seed_value); 1637 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed); 1638 1639 // hash = ~hash + (hash << 15); 1640 HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15)); 1641 HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, 1642 graph()->GetConstantMinus1()); 1643 hash = AddUncasted<HAdd>(shifted_hash, not_hash); 1644 1645 // hash = hash ^ (hash >> 12); 1646 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12)); 1647 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1648 1649 // hash = hash + (hash << 2); 1650 shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2)); 1651 hash = AddUncasted<HAdd>(hash, shifted_hash); 1652 1653 // hash = hash ^ (hash >> 4); 1654 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4)); 1655 hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1656 1657 // hash = hash * 2057; 1658 hash = AddUncasted<HMul>(hash, Add<HConstant>(2057)); 1659 hash->ClearFlag(HValue::kCanOverflow); 1660 1661 // hash = hash ^ (hash >> 16); 1662 shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16)); 1663 return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash); 1664 } 1665 1666 1667 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver, 1668 HValue* elements, 1669 HValue* key, 1670 HValue* hash) { 1671 HValue* capacity = Add<HLoadKeyed>( 1672 elements, 1673 Add<HConstant>(NameDictionary::kCapacityIndex), 1674 static_cast<HValue*>(NULL), 1675 FAST_ELEMENTS); 1676 1677 HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1()); 1678 mask->ChangeRepresentation(Representation::Integer32()); 1679 mask->ClearFlag(HValue::kCanOverflow); 1680 1681 HValue* entry = hash; 1682 HValue* count = graph()->GetConstant1(); 1683 Push(entry); 1684 Push(count); 1685 1686 HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(), 1687 graph()->CreateBasicBlock()); 1688 HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(), 1689 graph()->CreateBasicBlock()); 1690 LoopBuilder probe_loop(this); 1691 probe_loop.BeginBody(2); // Drop entry, count from last environment to 1692 // appease live range building without simulates. 1693 1694 count = Pop(); 1695 entry = Pop(); 1696 entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask); 1697 int entry_size = SeededNumberDictionary::kEntrySize; 1698 HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size)); 1699 base_index->ClearFlag(HValue::kCanOverflow); 1700 int start_offset = SeededNumberDictionary::kElementsStartIndex; 1701 HValue* key_index = 1702 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset)); 1703 key_index->ClearFlag(HValue::kCanOverflow); 1704 1705 HValue* candidate_key = Add<HLoadKeyed>( 1706 elements, key_index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 1707 IfBuilder if_undefined(this); 1708 if_undefined.If<HCompareObjectEqAndBranch>(candidate_key, 1709 graph()->GetConstantUndefined()); 1710 if_undefined.Then(); 1711 { 1712 // element == undefined means "not found". Call the runtime. 1713 // TODO(jkummerow): walk the prototype chain instead. 1714 Add<HPushArguments>(receiver, key); 1715 Push(Add<HCallRuntime>(isolate()->factory()->empty_string(), 1716 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 1717 2)); 1718 } 1719 if_undefined.Else(); 1720 { 1721 IfBuilder if_match(this); 1722 if_match.If<HCompareObjectEqAndBranch>(candidate_key, key); 1723 if_match.Then(); 1724 if_match.Else(); 1725 1726 // Update non-internalized string in the dictionary with internalized key? 1727 IfBuilder if_update_with_internalized(this); 1728 HValue* smi_check = 1729 if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key); 1730 if_update_with_internalized.And(); 1731 HValue* map = AddLoadMap(candidate_key, smi_check); 1732 HValue* instance_type = Add<HLoadNamedField>( 1733 map, static_cast<HValue*>(NULL), HObjectAccess::ForMapInstanceType()); 1734 HValue* not_internalized_bit = AddUncasted<HBitwise>( 1735 Token::BIT_AND, instance_type, 1736 Add<HConstant>(static_cast<int>(kIsNotInternalizedMask))); 1737 if_update_with_internalized.If<HCompareNumericAndBranch>( 1738 not_internalized_bit, graph()->GetConstant0(), Token::NE); 1739 if_update_with_internalized.And(); 1740 if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>( 1741 candidate_key, graph()->GetConstantHole()); 1742 if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key, 1743 key, Token::EQ); 1744 if_update_with_internalized.Then(); 1745 // Replace a key that is a non-internalized string by the equivalent 1746 // internalized string for faster further lookups. 1747 Add<HStoreKeyed>(elements, key_index, key, FAST_ELEMENTS); 1748 if_update_with_internalized.Else(); 1749 1750 if_update_with_internalized.JoinContinuation(&found_key_match_continuation); 1751 if_match.JoinContinuation(&found_key_match_continuation); 1752 1753 IfBuilder found_key_match(this, &found_key_match_continuation); 1754 found_key_match.Then(); 1755 // Key at current probe matches. Relevant bits in the |details| field must 1756 // be zero, otherwise the dictionary element requires special handling. 1757 HValue* details_index = 1758 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2)); 1759 details_index->ClearFlag(HValue::kCanOverflow); 1760 HValue* details = Add<HLoadKeyed>( 1761 elements, details_index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 1762 int details_mask = PropertyDetails::TypeField::kMask | 1763 PropertyDetails::DeletedField::kMask; 1764 details = AddUncasted<HBitwise>(Token::BIT_AND, details, 1765 Add<HConstant>(details_mask)); 1766 IfBuilder details_compare(this); 1767 details_compare.If<HCompareNumericAndBranch>( 1768 details, graph()->GetConstant0(), Token::EQ); 1769 details_compare.Then(); 1770 HValue* result_index = 1771 AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1)); 1772 result_index->ClearFlag(HValue::kCanOverflow); 1773 Push(Add<HLoadKeyed>(elements, result_index, static_cast<HValue*>(NULL), 1774 FAST_ELEMENTS)); 1775 details_compare.Else(); 1776 Add<HPushArguments>(receiver, key); 1777 Push(Add<HCallRuntime>(isolate()->factory()->empty_string(), 1778 Runtime::FunctionForId(Runtime::kKeyedGetProperty), 1779 2)); 1780 details_compare.End(); 1781 1782 found_key_match.Else(); 1783 found_key_match.JoinContinuation(&return_or_loop_continuation); 1784 } 1785 if_undefined.JoinContinuation(&return_or_loop_continuation); 1786 1787 IfBuilder return_or_loop(this, &return_or_loop_continuation); 1788 return_or_loop.Then(); 1789 probe_loop.Break(); 1790 1791 return_or_loop.Else(); 1792 entry = AddUncasted<HAdd>(entry, count); 1793 entry->ClearFlag(HValue::kCanOverflow); 1794 count = AddUncasted<HAdd>(count, graph()->GetConstant1()); 1795 count->ClearFlag(HValue::kCanOverflow); 1796 Push(entry); 1797 Push(count); 1798 1799 probe_loop.EndBody(); 1800 1801 return_or_loop.End(); 1802 1803 return Pop(); 1804 } 1805 1806 1807 HValue* HGraphBuilder::BuildRegExpConstructResult(HValue* length, 1808 HValue* index, 1809 HValue* input) { 1810 NoObservableSideEffectsScope scope(this); 1811 HConstant* max_length = Add<HConstant>(JSObject::kInitialMaxFastElementArray); 1812 Add<HBoundsCheck>(length, max_length); 1813 1814 // Generate size calculation code here in order to make it dominate 1815 // the JSRegExpResult allocation. 1816 ElementsKind elements_kind = FAST_ELEMENTS; 1817 HValue* size = BuildCalculateElementsSize(elements_kind, length); 1818 1819 // Allocate the JSRegExpResult and the FixedArray in one step. 1820 HValue* result = Add<HAllocate>( 1821 Add<HConstant>(JSRegExpResult::kSize), HType::JSArray(), 1822 NOT_TENURED, JS_ARRAY_TYPE); 1823 1824 // Initialize the JSRegExpResult header. 1825 HValue* global_object = Add<HLoadNamedField>( 1826 context(), static_cast<HValue*>(NULL), 1827 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 1828 HValue* native_context = Add<HLoadNamedField>( 1829 global_object, static_cast<HValue*>(NULL), 1830 HObjectAccess::ForGlobalObjectNativeContext()); 1831 Add<HStoreNamedField>( 1832 result, HObjectAccess::ForMap(), 1833 Add<HLoadNamedField>( 1834 native_context, static_cast<HValue*>(NULL), 1835 HObjectAccess::ForContextSlot(Context::REGEXP_RESULT_MAP_INDEX))); 1836 HConstant* empty_fixed_array = 1837 Add<HConstant>(isolate()->factory()->empty_fixed_array()); 1838 Add<HStoreNamedField>( 1839 result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset), 1840 empty_fixed_array); 1841 Add<HStoreNamedField>( 1842 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), 1843 empty_fixed_array); 1844 Add<HStoreNamedField>( 1845 result, HObjectAccess::ForJSArrayOffset(JSArray::kLengthOffset), length); 1846 1847 // Initialize the additional fields. 1848 Add<HStoreNamedField>( 1849 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kIndexOffset), 1850 index); 1851 Add<HStoreNamedField>( 1852 result, HObjectAccess::ForJSArrayOffset(JSRegExpResult::kInputOffset), 1853 input); 1854 1855 // Allocate and initialize the elements header. 1856 HAllocate* elements = BuildAllocateElements(elements_kind, size); 1857 BuildInitializeElementsHeader(elements, elements_kind, length); 1858 1859 if (!elements->has_size_upper_bound()) { 1860 HConstant* size_in_bytes_upper_bound = EstablishElementsAllocationSize( 1861 elements_kind, max_length->Integer32Value()); 1862 elements->set_size_upper_bound(size_in_bytes_upper_bound); 1863 } 1864 1865 Add<HStoreNamedField>( 1866 result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset), 1867 elements); 1868 1869 // Initialize the elements contents with undefined. 1870 BuildFillElementsWithValue( 1871 elements, elements_kind, graph()->GetConstant0(), length, 1872 graph()->GetConstantUndefined()); 1873 1874 return result; 1875 } 1876 1877 1878 HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) { 1879 NoObservableSideEffectsScope scope(this); 1880 1881 // Convert constant numbers at compile time. 1882 if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) { 1883 Handle<Object> number = HConstant::cast(object)->handle(isolate()); 1884 Handle<String> result = isolate()->factory()->NumberToString(number); 1885 return Add<HConstant>(result); 1886 } 1887 1888 // Create a joinable continuation. 1889 HIfContinuation found(graph()->CreateBasicBlock(), 1890 graph()->CreateBasicBlock()); 1891 1892 // Load the number string cache. 1893 HValue* number_string_cache = 1894 Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex); 1895 1896 // Make the hash mask from the length of the number string cache. It 1897 // contains two elements (number and string) for each cache entry. 1898 HValue* mask = AddLoadFixedArrayLength(number_string_cache); 1899 mask->set_type(HType::Smi()); 1900 mask = AddUncasted<HSar>(mask, graph()->GetConstant1()); 1901 mask = AddUncasted<HSub>(mask, graph()->GetConstant1()); 1902 1903 // Check whether object is a smi. 1904 IfBuilder if_objectissmi(this); 1905 if_objectissmi.If<HIsSmiAndBranch>(object); 1906 if_objectissmi.Then(); 1907 { 1908 // Compute hash for smi similar to smi_get_hash(). 1909 HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask); 1910 1911 // Load the key. 1912 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1()); 1913 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, 1914 static_cast<HValue*>(NULL), 1915 FAST_ELEMENTS, ALLOW_RETURN_HOLE); 1916 1917 // Check if object == key. 1918 IfBuilder if_objectiskey(this); 1919 if_objectiskey.If<HCompareObjectEqAndBranch>(object, key); 1920 if_objectiskey.Then(); 1921 { 1922 // Make the key_index available. 1923 Push(key_index); 1924 } 1925 if_objectiskey.JoinContinuation(&found); 1926 } 1927 if_objectissmi.Else(); 1928 { 1929 if (type->Is(Type::SignedSmall())) { 1930 if_objectissmi.Deopt("Expected smi"); 1931 } else { 1932 // Check if the object is a heap number. 1933 IfBuilder if_objectisnumber(this); 1934 HValue* objectisnumber = if_objectisnumber.If<HCompareMap>( 1935 object, isolate()->factory()->heap_number_map()); 1936 if_objectisnumber.Then(); 1937 { 1938 // Compute hash for heap number similar to double_get_hash(). 1939 HValue* low = Add<HLoadNamedField>( 1940 object, objectisnumber, 1941 HObjectAccess::ForHeapNumberValueLowestBits()); 1942 HValue* high = Add<HLoadNamedField>( 1943 object, objectisnumber, 1944 HObjectAccess::ForHeapNumberValueHighestBits()); 1945 HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high); 1946 hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask); 1947 1948 // Load the key. 1949 HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1()); 1950 HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, 1951 static_cast<HValue*>(NULL), 1952 FAST_ELEMENTS, ALLOW_RETURN_HOLE); 1953 1954 // Check if the key is a heap number and compare it with the object. 1955 IfBuilder if_keyisnotsmi(this); 1956 HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key); 1957 if_keyisnotsmi.Then(); 1958 { 1959 IfBuilder if_keyisheapnumber(this); 1960 if_keyisheapnumber.If<HCompareMap>( 1961 key, isolate()->factory()->heap_number_map()); 1962 if_keyisheapnumber.Then(); 1963 { 1964 // Check if values of key and object match. 1965 IfBuilder if_keyeqobject(this); 1966 if_keyeqobject.If<HCompareNumericAndBranch>( 1967 Add<HLoadNamedField>(key, keyisnotsmi, 1968 HObjectAccess::ForHeapNumberValue()), 1969 Add<HLoadNamedField>(object, objectisnumber, 1970 HObjectAccess::ForHeapNumberValue()), 1971 Token::EQ); 1972 if_keyeqobject.Then(); 1973 { 1974 // Make the key_index available. 1975 Push(key_index); 1976 } 1977 if_keyeqobject.JoinContinuation(&found); 1978 } 1979 if_keyisheapnumber.JoinContinuation(&found); 1980 } 1981 if_keyisnotsmi.JoinContinuation(&found); 1982 } 1983 if_objectisnumber.Else(); 1984 { 1985 if (type->Is(Type::Number())) { 1986 if_objectisnumber.Deopt("Expected heap number"); 1987 } 1988 } 1989 if_objectisnumber.JoinContinuation(&found); 1990 } 1991 } 1992 if_objectissmi.JoinContinuation(&found); 1993 1994 // Check for cache hit. 1995 IfBuilder if_found(this, &found); 1996 if_found.Then(); 1997 { 1998 // Count number to string operation in native code. 1999 AddIncrementCounter(isolate()->counters()->number_to_string_native()); 2000 2001 // Load the value in case of cache hit. 2002 HValue* key_index = Pop(); 2003 HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1()); 2004 Push(Add<HLoadKeyed>(number_string_cache, value_index, 2005 static_cast<HValue*>(NULL), 2006 FAST_ELEMENTS, ALLOW_RETURN_HOLE)); 2007 } 2008 if_found.Else(); 2009 { 2010 // Cache miss, fallback to runtime. 2011 Add<HPushArguments>(object); 2012 Push(Add<HCallRuntime>( 2013 isolate()->factory()->empty_string(), 2014 Runtime::FunctionForId(Runtime::kNumberToStringSkipCache), 2015 1)); 2016 } 2017 if_found.End(); 2018 2019 return Pop(); 2020 } 2021 2022 2023 HAllocate* HGraphBuilder::BuildAllocate( 2024 HValue* object_size, 2025 HType type, 2026 InstanceType instance_type, 2027 HAllocationMode allocation_mode) { 2028 // Compute the effective allocation size. 2029 HValue* size = object_size; 2030 if (allocation_mode.CreateAllocationMementos()) { 2031 size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize)); 2032 size->ClearFlag(HValue::kCanOverflow); 2033 } 2034 2035 // Perform the actual allocation. 2036 HAllocate* object = Add<HAllocate>( 2037 size, type, allocation_mode.GetPretenureMode(), 2038 instance_type, allocation_mode.feedback_site()); 2039 2040 // Setup the allocation memento. 2041 if (allocation_mode.CreateAllocationMementos()) { 2042 BuildCreateAllocationMemento( 2043 object, object_size, allocation_mode.current_site()); 2044 } 2045 2046 return object; 2047 } 2048 2049 2050 HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length, 2051 HValue* right_length) { 2052 // Compute the combined string length and check against max string length. 2053 HValue* length = AddUncasted<HAdd>(left_length, right_length); 2054 // Check that length <= kMaxLength <=> length < MaxLength + 1. 2055 HValue* max_length = Add<HConstant>(String::kMaxLength + 1); 2056 Add<HBoundsCheck>(length, max_length); 2057 return length; 2058 } 2059 2060 2061 HValue* HGraphBuilder::BuildCreateConsString( 2062 HValue* length, 2063 HValue* left, 2064 HValue* right, 2065 HAllocationMode allocation_mode) { 2066 // Determine the string instance types. 2067 HInstruction* left_instance_type = AddLoadStringInstanceType(left); 2068 HInstruction* right_instance_type = AddLoadStringInstanceType(right); 2069 2070 // Allocate the cons string object. HAllocate does not care whether we 2071 // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use 2072 // CONS_STRING_TYPE here. Below we decide whether the cons string is 2073 // one-byte or two-byte and set the appropriate map. 2074 DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE, 2075 CONS_ONE_BYTE_STRING_TYPE)); 2076 HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize), 2077 HType::String(), CONS_STRING_TYPE, 2078 allocation_mode); 2079 2080 // Compute intersection and difference of instance types. 2081 HValue* anded_instance_types = AddUncasted<HBitwise>( 2082 Token::BIT_AND, left_instance_type, right_instance_type); 2083 HValue* xored_instance_types = AddUncasted<HBitwise>( 2084 Token::BIT_XOR, left_instance_type, right_instance_type); 2085 2086 // We create a one-byte cons string if 2087 // 1. both strings are one-byte, or 2088 // 2. at least one of the strings is two-byte, but happens to contain only 2089 // one-byte characters. 2090 // To do this, we check 2091 // 1. if both strings are one-byte, or if the one-byte data hint is set in 2092 // both strings, or 2093 // 2. if one of the strings has the one-byte data hint set and the other 2094 // string is one-byte. 2095 IfBuilder if_onebyte(this); 2096 STATIC_ASSERT(kOneByteStringTag != 0); 2097 STATIC_ASSERT(kOneByteDataHintMask != 0); 2098 if_onebyte.If<HCompareNumericAndBranch>( 2099 AddUncasted<HBitwise>( 2100 Token::BIT_AND, anded_instance_types, 2101 Add<HConstant>(static_cast<int32_t>( 2102 kStringEncodingMask | kOneByteDataHintMask))), 2103 graph()->GetConstant0(), Token::NE); 2104 if_onebyte.Or(); 2105 STATIC_ASSERT(kOneByteStringTag != 0 && 2106 kOneByteDataHintTag != 0 && 2107 kOneByteDataHintTag != kOneByteStringTag); 2108 if_onebyte.If<HCompareNumericAndBranch>( 2109 AddUncasted<HBitwise>( 2110 Token::BIT_AND, xored_instance_types, 2111 Add<HConstant>(static_cast<int32_t>( 2112 kOneByteStringTag | kOneByteDataHintTag))), 2113 Add<HConstant>(static_cast<int32_t>( 2114 kOneByteStringTag | kOneByteDataHintTag)), Token::EQ); 2115 if_onebyte.Then(); 2116 { 2117 // We can safely skip the write barrier for storing the map here. 2118 Add<HStoreNamedField>( 2119 result, HObjectAccess::ForMap(), 2120 Add<HConstant>(isolate()->factory()->cons_one_byte_string_map())); 2121 } 2122 if_onebyte.Else(); 2123 { 2124 // We can safely skip the write barrier for storing the map here. 2125 Add<HStoreNamedField>( 2126 result, HObjectAccess::ForMap(), 2127 Add<HConstant>(isolate()->factory()->cons_string_map())); 2128 } 2129 if_onebyte.End(); 2130 2131 // Initialize the cons string fields. 2132 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(), 2133 Add<HConstant>(String::kEmptyHashField)); 2134 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length); 2135 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left); 2136 Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right); 2137 2138 // Count the native string addition. 2139 AddIncrementCounter(isolate()->counters()->string_add_native()); 2140 2141 return result; 2142 } 2143 2144 2145 void HGraphBuilder::BuildCopySeqStringChars(HValue* src, 2146 HValue* src_offset, 2147 String::Encoding src_encoding, 2148 HValue* dst, 2149 HValue* dst_offset, 2150 String::Encoding dst_encoding, 2151 HValue* length) { 2152 DCHECK(dst_encoding != String::ONE_BYTE_ENCODING || 2153 src_encoding == String::ONE_BYTE_ENCODING); 2154 LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement); 2155 HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT); 2156 { 2157 HValue* src_index = AddUncasted<HAdd>(src_offset, index); 2158 HValue* value = 2159 AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index); 2160 HValue* dst_index = AddUncasted<HAdd>(dst_offset, index); 2161 Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value); 2162 } 2163 loop.EndBody(); 2164 } 2165 2166 2167 HValue* HGraphBuilder::BuildObjectSizeAlignment( 2168 HValue* unaligned_size, int header_size) { 2169 DCHECK((header_size & kObjectAlignmentMask) == 0); 2170 HValue* size = AddUncasted<HAdd>( 2171 unaligned_size, Add<HConstant>(static_cast<int32_t>( 2172 header_size + kObjectAlignmentMask))); 2173 size->ClearFlag(HValue::kCanOverflow); 2174 return AddUncasted<HBitwise>( 2175 Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>( 2176 ~kObjectAlignmentMask))); 2177 } 2178 2179 2180 HValue* HGraphBuilder::BuildUncheckedStringAdd( 2181 HValue* left, 2182 HValue* right, 2183 HAllocationMode allocation_mode) { 2184 // Determine the string lengths. 2185 HValue* left_length = AddLoadStringLength(left); 2186 HValue* right_length = AddLoadStringLength(right); 2187 2188 // Compute the combined string length. 2189 HValue* length = BuildAddStringLengths(left_length, right_length); 2190 2191 // Do some manual constant folding here. 2192 if (left_length->IsConstant()) { 2193 HConstant* c_left_length = HConstant::cast(left_length); 2194 DCHECK_NE(0, c_left_length->Integer32Value()); 2195 if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) { 2196 // The right string contains at least one character. 2197 return BuildCreateConsString(length, left, right, allocation_mode); 2198 } 2199 } else if (right_length->IsConstant()) { 2200 HConstant* c_right_length = HConstant::cast(right_length); 2201 DCHECK_NE(0, c_right_length->Integer32Value()); 2202 if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) { 2203 // The left string contains at least one character. 2204 return BuildCreateConsString(length, left, right, allocation_mode); 2205 } 2206 } 2207 2208 // Check if we should create a cons string. 2209 IfBuilder if_createcons(this); 2210 if_createcons.If<HCompareNumericAndBranch>( 2211 length, Add<HConstant>(ConsString::kMinLength), Token::GTE); 2212 if_createcons.Then(); 2213 { 2214 // Create a cons string. 2215 Push(BuildCreateConsString(length, left, right, allocation_mode)); 2216 } 2217 if_createcons.Else(); 2218 { 2219 // Determine the string instance types. 2220 HValue* left_instance_type = AddLoadStringInstanceType(left); 2221 HValue* right_instance_type = AddLoadStringInstanceType(right); 2222 2223 // Compute union and difference of instance types. 2224 HValue* ored_instance_types = AddUncasted<HBitwise>( 2225 Token::BIT_OR, left_instance_type, right_instance_type); 2226 HValue* xored_instance_types = AddUncasted<HBitwise>( 2227 Token::BIT_XOR, left_instance_type, right_instance_type); 2228 2229 // Check if both strings have the same encoding and both are 2230 // sequential. 2231 IfBuilder if_sameencodingandsequential(this); 2232 if_sameencodingandsequential.If<HCompareNumericAndBranch>( 2233 AddUncasted<HBitwise>( 2234 Token::BIT_AND, xored_instance_types, 2235 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))), 2236 graph()->GetConstant0(), Token::EQ); 2237 if_sameencodingandsequential.And(); 2238 STATIC_ASSERT(kSeqStringTag == 0); 2239 if_sameencodingandsequential.If<HCompareNumericAndBranch>( 2240 AddUncasted<HBitwise>( 2241 Token::BIT_AND, ored_instance_types, 2242 Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))), 2243 graph()->GetConstant0(), Token::EQ); 2244 if_sameencodingandsequential.Then(); 2245 { 2246 HConstant* string_map = 2247 Add<HConstant>(isolate()->factory()->string_map()); 2248 HConstant* one_byte_string_map = 2249 Add<HConstant>(isolate()->factory()->one_byte_string_map()); 2250 2251 // Determine map and size depending on whether result is one-byte string. 2252 IfBuilder if_onebyte(this); 2253 STATIC_ASSERT(kOneByteStringTag != 0); 2254 if_onebyte.If<HCompareNumericAndBranch>( 2255 AddUncasted<HBitwise>( 2256 Token::BIT_AND, ored_instance_types, 2257 Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))), 2258 graph()->GetConstant0(), Token::NE); 2259 if_onebyte.Then(); 2260 { 2261 // Allocate sequential one-byte string object. 2262 Push(length); 2263 Push(one_byte_string_map); 2264 } 2265 if_onebyte.Else(); 2266 { 2267 // Allocate sequential two-byte string object. 2268 HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1()); 2269 size->ClearFlag(HValue::kCanOverflow); 2270 size->SetFlag(HValue::kUint32); 2271 Push(size); 2272 Push(string_map); 2273 } 2274 if_onebyte.End(); 2275 HValue* map = Pop(); 2276 2277 // Calculate the number of bytes needed for the characters in the 2278 // string while observing object alignment. 2279 STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0); 2280 HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize); 2281 2282 // Allocate the string object. HAllocate does not care whether we pass 2283 // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE. 2284 HAllocate* result = BuildAllocate( 2285 size, HType::String(), STRING_TYPE, allocation_mode); 2286 Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map); 2287 2288 // Initialize the string fields. 2289 Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(), 2290 Add<HConstant>(String::kEmptyHashField)); 2291 Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length); 2292 2293 // Copy characters to the result string. 2294 IfBuilder if_twobyte(this); 2295 if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map); 2296 if_twobyte.Then(); 2297 { 2298 // Copy characters from the left string. 2299 BuildCopySeqStringChars( 2300 left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, 2301 result, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, 2302 left_length); 2303 2304 // Copy characters from the right string. 2305 BuildCopySeqStringChars( 2306 right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, 2307 result, left_length, String::TWO_BYTE_ENCODING, 2308 right_length); 2309 } 2310 if_twobyte.Else(); 2311 { 2312 // Copy characters from the left string. 2313 BuildCopySeqStringChars( 2314 left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, 2315 result, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, 2316 left_length); 2317 2318 // Copy characters from the right string. 2319 BuildCopySeqStringChars( 2320 right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, 2321 result, left_length, String::ONE_BYTE_ENCODING, 2322 right_length); 2323 } 2324 if_twobyte.End(); 2325 2326 // Count the native string addition. 2327 AddIncrementCounter(isolate()->counters()->string_add_native()); 2328 2329 // Return the sequential string. 2330 Push(result); 2331 } 2332 if_sameencodingandsequential.Else(); 2333 { 2334 // Fallback to the runtime to add the two strings. 2335 Add<HPushArguments>(left, right); 2336 Push(Add<HCallRuntime>( 2337 isolate()->factory()->empty_string(), 2338 Runtime::FunctionForId(Runtime::kStringAdd), 2339 2)); 2340 } 2341 if_sameencodingandsequential.End(); 2342 } 2343 if_createcons.End(); 2344 2345 return Pop(); 2346 } 2347 2348 2349 HValue* HGraphBuilder::BuildStringAdd( 2350 HValue* left, 2351 HValue* right, 2352 HAllocationMode allocation_mode) { 2353 NoObservableSideEffectsScope no_effects(this); 2354 2355 // Determine string lengths. 2356 HValue* left_length = AddLoadStringLength(left); 2357 HValue* right_length = AddLoadStringLength(right); 2358 2359 // Check if left string is empty. 2360 IfBuilder if_leftempty(this); 2361 if_leftempty.If<HCompareNumericAndBranch>( 2362 left_length, graph()->GetConstant0(), Token::EQ); 2363 if_leftempty.Then(); 2364 { 2365 // Count the native string addition. 2366 AddIncrementCounter(isolate()->counters()->string_add_native()); 2367 2368 // Just return the right string. 2369 Push(right); 2370 } 2371 if_leftempty.Else(); 2372 { 2373 // Check if right string is empty. 2374 IfBuilder if_rightempty(this); 2375 if_rightempty.If<HCompareNumericAndBranch>( 2376 right_length, graph()->GetConstant0(), Token::EQ); 2377 if_rightempty.Then(); 2378 { 2379 // Count the native string addition. 2380 AddIncrementCounter(isolate()->counters()->string_add_native()); 2381 2382 // Just return the left string. 2383 Push(left); 2384 } 2385 if_rightempty.Else(); 2386 { 2387 // Add the two non-empty strings. 2388 Push(BuildUncheckedStringAdd(left, right, allocation_mode)); 2389 } 2390 if_rightempty.End(); 2391 } 2392 if_leftempty.End(); 2393 2394 return Pop(); 2395 } 2396 2397 2398 HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess( 2399 HValue* checked_object, 2400 HValue* key, 2401 HValue* val, 2402 bool is_js_array, 2403 ElementsKind elements_kind, 2404 PropertyAccessType access_type, 2405 LoadKeyedHoleMode load_mode, 2406 KeyedAccessStoreMode store_mode) { 2407 DCHECK((!IsExternalArrayElementsKind(elements_kind) && 2408 !IsFixedTypedArrayElementsKind(elements_kind)) || 2409 !is_js_array); 2410 // No GVNFlag is necessary for ElementsKind if there is an explicit dependency 2411 // on a HElementsTransition instruction. The flag can also be removed if the 2412 // map to check has FAST_HOLEY_ELEMENTS, since there can be no further 2413 // ElementsKind transitions. Finally, the dependency can be removed for stores 2414 // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the 2415 // generated store code. 2416 if ((elements_kind == FAST_HOLEY_ELEMENTS) || 2417 (elements_kind == FAST_ELEMENTS && access_type == STORE)) { 2418 checked_object->ClearDependsOnFlag(kElementsKind); 2419 } 2420 2421 bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind); 2422 bool fast_elements = IsFastObjectElementsKind(elements_kind); 2423 HValue* elements = AddLoadElements(checked_object); 2424 if (access_type == STORE && (fast_elements || fast_smi_only_elements) && 2425 store_mode != STORE_NO_TRANSITION_HANDLE_COW) { 2426 HCheckMaps* check_cow_map = Add<HCheckMaps>( 2427 elements, isolate()->factory()->fixed_array_map()); 2428 check_cow_map->ClearDependsOnFlag(kElementsKind); 2429 } 2430 HInstruction* length = NULL; 2431 if (is_js_array) { 2432 length = Add<HLoadNamedField>( 2433 checked_object->ActualValue(), checked_object, 2434 HObjectAccess::ForArrayLength(elements_kind)); 2435 } else { 2436 length = AddLoadFixedArrayLength(elements); 2437 } 2438 length->set_type(HType::Smi()); 2439 HValue* checked_key = NULL; 2440 if (IsExternalArrayElementsKind(elements_kind) || 2441 IsFixedTypedArrayElementsKind(elements_kind)) { 2442 HValue* backing_store; 2443 if (IsExternalArrayElementsKind(elements_kind)) { 2444 backing_store = Add<HLoadNamedField>( 2445 elements, static_cast<HValue*>(NULL), 2446 HObjectAccess::ForExternalArrayExternalPointer()); 2447 } else { 2448 backing_store = elements; 2449 } 2450 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) { 2451 NoObservableSideEffectsScope no_effects(this); 2452 IfBuilder length_checker(this); 2453 length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT); 2454 length_checker.Then(); 2455 IfBuilder negative_checker(this); 2456 HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>( 2457 key, graph()->GetConstant0(), Token::GTE); 2458 negative_checker.Then(); 2459 HInstruction* result = AddElementAccess( 2460 backing_store, key, val, bounds_check, elements_kind, access_type); 2461 negative_checker.ElseDeopt("Negative key encountered"); 2462 negative_checker.End(); 2463 length_checker.End(); 2464 return result; 2465 } else { 2466 DCHECK(store_mode == STANDARD_STORE); 2467 checked_key = Add<HBoundsCheck>(key, length); 2468 return AddElementAccess( 2469 backing_store, checked_key, val, 2470 checked_object, elements_kind, access_type); 2471 } 2472 } 2473 DCHECK(fast_smi_only_elements || 2474 fast_elements || 2475 IsFastDoubleElementsKind(elements_kind)); 2476 2477 // In case val is stored into a fast smi array, assure that the value is a smi 2478 // before manipulating the backing store. Otherwise the actual store may 2479 // deopt, leaving the backing store in an invalid state. 2480 if (access_type == STORE && IsFastSmiElementsKind(elements_kind) && 2481 !val->type().IsSmi()) { 2482 val = AddUncasted<HForceRepresentation>(val, Representation::Smi()); 2483 } 2484 2485 if (IsGrowStoreMode(store_mode)) { 2486 NoObservableSideEffectsScope no_effects(this); 2487 Representation representation = HStoreKeyed::RequiredValueRepresentation( 2488 elements_kind, STORE_TO_INITIALIZED_ENTRY); 2489 val = AddUncasted<HForceRepresentation>(val, representation); 2490 elements = BuildCheckForCapacityGrow(checked_object, elements, 2491 elements_kind, length, key, 2492 is_js_array, access_type); 2493 checked_key = key; 2494 } else { 2495 checked_key = Add<HBoundsCheck>(key, length); 2496 2497 if (access_type == STORE && (fast_elements || fast_smi_only_elements)) { 2498 if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) { 2499 NoObservableSideEffectsScope no_effects(this); 2500 elements = BuildCopyElementsOnWrite(checked_object, elements, 2501 elements_kind, length); 2502 } else { 2503 HCheckMaps* check_cow_map = Add<HCheckMaps>( 2504 elements, isolate()->factory()->fixed_array_map()); 2505 check_cow_map->ClearDependsOnFlag(kElementsKind); 2506 } 2507 } 2508 } 2509 return AddElementAccess(elements, checked_key, val, checked_object, 2510 elements_kind, access_type, load_mode); 2511 } 2512 2513 2514 HValue* HGraphBuilder::BuildAllocateArrayFromLength( 2515 JSArrayBuilder* array_builder, 2516 HValue* length_argument) { 2517 if (length_argument->IsConstant() && 2518 HConstant::cast(length_argument)->HasSmiValue()) { 2519 int array_length = HConstant::cast(length_argument)->Integer32Value(); 2520 if (array_length == 0) { 2521 return array_builder->AllocateEmptyArray(); 2522 } else { 2523 return array_builder->AllocateArray(length_argument, 2524 array_length, 2525 length_argument); 2526 } 2527 } 2528 2529 HValue* constant_zero = graph()->GetConstant0(); 2530 HConstant* max_alloc_length = 2531 Add<HConstant>(JSObject::kInitialMaxFastElementArray); 2532 HInstruction* checked_length = Add<HBoundsCheck>(length_argument, 2533 max_alloc_length); 2534 IfBuilder if_builder(this); 2535 if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero, 2536 Token::EQ); 2537 if_builder.Then(); 2538 const int initial_capacity = JSArray::kPreallocatedArrayElements; 2539 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity); 2540 Push(initial_capacity_node); // capacity 2541 Push(constant_zero); // length 2542 if_builder.Else(); 2543 if (!(top_info()->IsStub()) && 2544 IsFastPackedElementsKind(array_builder->kind())) { 2545 // We'll come back later with better (holey) feedback. 2546 if_builder.Deopt("Holey array despite packed elements_kind feedback"); 2547 } else { 2548 Push(checked_length); // capacity 2549 Push(checked_length); // length 2550 } 2551 if_builder.End(); 2552 2553 // Figure out total size 2554 HValue* length = Pop(); 2555 HValue* capacity = Pop(); 2556 return array_builder->AllocateArray(capacity, max_alloc_length, length); 2557 } 2558 2559 2560 HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind, 2561 HValue* capacity) { 2562 int elements_size = IsFastDoubleElementsKind(kind) 2563 ? kDoubleSize 2564 : kPointerSize; 2565 2566 HConstant* elements_size_value = Add<HConstant>(elements_size); 2567 HInstruction* mul = HMul::NewImul(zone(), context(), 2568 capacity->ActualValue(), 2569 elements_size_value); 2570 AddInstruction(mul); 2571 mul->ClearFlag(HValue::kCanOverflow); 2572 2573 STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize); 2574 2575 HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize); 2576 HValue* total_size = AddUncasted<HAdd>(mul, header_size); 2577 total_size->ClearFlag(HValue::kCanOverflow); 2578 return total_size; 2579 } 2580 2581 2582 HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) { 2583 int base_size = JSArray::kSize; 2584 if (mode == TRACK_ALLOCATION_SITE) { 2585 base_size += AllocationMemento::kSize; 2586 } 2587 HConstant* size_in_bytes = Add<HConstant>(base_size); 2588 return Add<HAllocate>( 2589 size_in_bytes, HType::JSArray(), NOT_TENURED, JS_OBJECT_TYPE); 2590 } 2591 2592 2593 HConstant* HGraphBuilder::EstablishElementsAllocationSize( 2594 ElementsKind kind, 2595 int capacity) { 2596 int base_size = IsFastDoubleElementsKind(kind) 2597 ? FixedDoubleArray::SizeFor(capacity) 2598 : FixedArray::SizeFor(capacity); 2599 2600 return Add<HConstant>(base_size); 2601 } 2602 2603 2604 HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind, 2605 HValue* size_in_bytes) { 2606 InstanceType instance_type = IsFastDoubleElementsKind(kind) 2607 ? FIXED_DOUBLE_ARRAY_TYPE 2608 : FIXED_ARRAY_TYPE; 2609 2610 return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED, 2611 instance_type); 2612 } 2613 2614 2615 void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements, 2616 ElementsKind kind, 2617 HValue* capacity) { 2618 Factory* factory = isolate()->factory(); 2619 Handle<Map> map = IsFastDoubleElementsKind(kind) 2620 ? factory->fixed_double_array_map() 2621 : factory->fixed_array_map(); 2622 2623 Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map)); 2624 Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(), 2625 capacity); 2626 } 2627 2628 2629 HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader( 2630 ElementsKind kind, 2631 HValue* capacity) { 2632 // The HForceRepresentation is to prevent possible deopt on int-smi 2633 // conversion after allocation but before the new object fields are set. 2634 capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi()); 2635 HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity); 2636 HValue* new_elements = BuildAllocateElements(kind, size_in_bytes); 2637 BuildInitializeElementsHeader(new_elements, kind, capacity); 2638 return new_elements; 2639 } 2640 2641 2642 void HGraphBuilder::BuildJSArrayHeader(HValue* array, 2643 HValue* array_map, 2644 HValue* elements, 2645 AllocationSiteMode mode, 2646 ElementsKind elements_kind, 2647 HValue* allocation_site_payload, 2648 HValue* length_field) { 2649 Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map); 2650 2651 HConstant* empty_fixed_array = 2652 Add<HConstant>(isolate()->factory()->empty_fixed_array()); 2653 2654 Add<HStoreNamedField>( 2655 array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array); 2656 2657 Add<HStoreNamedField>( 2658 array, HObjectAccess::ForElementsPointer(), 2659 elements != NULL ? elements : empty_fixed_array); 2660 2661 Add<HStoreNamedField>( 2662 array, HObjectAccess::ForArrayLength(elements_kind), length_field); 2663 2664 if (mode == TRACK_ALLOCATION_SITE) { 2665 BuildCreateAllocationMemento( 2666 array, Add<HConstant>(JSArray::kSize), allocation_site_payload); 2667 } 2668 } 2669 2670 2671 HInstruction* HGraphBuilder::AddElementAccess( 2672 HValue* elements, 2673 HValue* checked_key, 2674 HValue* val, 2675 HValue* dependency, 2676 ElementsKind elements_kind, 2677 PropertyAccessType access_type, 2678 LoadKeyedHoleMode load_mode) { 2679 if (access_type == STORE) { 2680 DCHECK(val != NULL); 2681 if (elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS || 2682 elements_kind == UINT8_CLAMPED_ELEMENTS) { 2683 val = Add<HClampToUint8>(val); 2684 } 2685 return Add<HStoreKeyed>(elements, checked_key, val, elements_kind, 2686 STORE_TO_INITIALIZED_ENTRY); 2687 } 2688 2689 DCHECK(access_type == LOAD); 2690 DCHECK(val == NULL); 2691 HLoadKeyed* load = Add<HLoadKeyed>( 2692 elements, checked_key, dependency, elements_kind, load_mode); 2693 if (FLAG_opt_safe_uint32_operations && 2694 (elements_kind == EXTERNAL_UINT32_ELEMENTS || 2695 elements_kind == UINT32_ELEMENTS)) { 2696 graph()->RecordUint32Instruction(load); 2697 } 2698 return load; 2699 } 2700 2701 2702 HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object, 2703 HValue* dependency) { 2704 return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap()); 2705 } 2706 2707 2708 HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object, 2709 HValue* dependency) { 2710 return Add<HLoadNamedField>( 2711 object, dependency, HObjectAccess::ForElementsPointer()); 2712 } 2713 2714 2715 HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength( 2716 HValue* array, 2717 HValue* dependency) { 2718 return Add<HLoadNamedField>( 2719 array, dependency, HObjectAccess::ForFixedArrayLength()); 2720 } 2721 2722 2723 HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array, 2724 ElementsKind kind, 2725 HValue* dependency) { 2726 return Add<HLoadNamedField>( 2727 array, dependency, HObjectAccess::ForArrayLength(kind)); 2728 } 2729 2730 2731 HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) { 2732 HValue* half_old_capacity = AddUncasted<HShr>(old_capacity, 2733 graph_->GetConstant1()); 2734 2735 HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity); 2736 new_capacity->ClearFlag(HValue::kCanOverflow); 2737 2738 HValue* min_growth = Add<HConstant>(16); 2739 2740 new_capacity = AddUncasted<HAdd>(new_capacity, min_growth); 2741 new_capacity->ClearFlag(HValue::kCanOverflow); 2742 2743 return new_capacity; 2744 } 2745 2746 2747 HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object, 2748 HValue* elements, 2749 ElementsKind kind, 2750 ElementsKind new_kind, 2751 HValue* length, 2752 HValue* new_capacity) { 2753 Add<HBoundsCheck>(new_capacity, Add<HConstant>( 2754 (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> 2755 ElementsKindToShiftSize(new_kind))); 2756 2757 HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader( 2758 new_kind, new_capacity); 2759 2760 BuildCopyElements(elements, kind, new_elements, 2761 new_kind, length, new_capacity); 2762 2763 Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(), 2764 new_elements); 2765 2766 return new_elements; 2767 } 2768 2769 2770 void HGraphBuilder::BuildFillElementsWithValue(HValue* elements, 2771 ElementsKind elements_kind, 2772 HValue* from, 2773 HValue* to, 2774 HValue* value) { 2775 if (to == NULL) { 2776 to = AddLoadFixedArrayLength(elements); 2777 } 2778 2779 // Special loop unfolding case 2780 STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= 2781 kElementLoopUnrollThreshold); 2782 int initial_capacity = -1; 2783 if (from->IsInteger32Constant() && to->IsInteger32Constant()) { 2784 int constant_from = from->GetInteger32Constant(); 2785 int constant_to = to->GetInteger32Constant(); 2786 2787 if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) { 2788 initial_capacity = constant_to; 2789 } 2790 } 2791 2792 // Since we're about to store a hole value, the store instruction below must 2793 // assume an elements kind that supports heap object values. 2794 if (IsFastSmiOrObjectElementsKind(elements_kind)) { 2795 elements_kind = FAST_HOLEY_ELEMENTS; 2796 } 2797 2798 if (initial_capacity >= 0) { 2799 for (int i = 0; i < initial_capacity; i++) { 2800 HInstruction* key = Add<HConstant>(i); 2801 Add<HStoreKeyed>(elements, key, value, elements_kind); 2802 } 2803 } else { 2804 // Carefully loop backwards so that the "from" remains live through the loop 2805 // rather than the to. This often corresponds to keeping length live rather 2806 // then capacity, which helps register allocation, since length is used more 2807 // other than capacity after filling with holes. 2808 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); 2809 2810 HValue* key = builder.BeginBody(to, from, Token::GT); 2811 2812 HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1()); 2813 adjusted_key->ClearFlag(HValue::kCanOverflow); 2814 2815 Add<HStoreKeyed>(elements, adjusted_key, value, elements_kind); 2816 2817 builder.EndBody(); 2818 } 2819 } 2820 2821 2822 void HGraphBuilder::BuildFillElementsWithHole(HValue* elements, 2823 ElementsKind elements_kind, 2824 HValue* from, 2825 HValue* to) { 2826 // Fast elements kinds need to be initialized in case statements below cause a 2827 // garbage collection. 2828 Factory* factory = isolate()->factory(); 2829 2830 double nan_double = FixedDoubleArray::hole_nan_as_double(); 2831 HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) 2832 ? Add<HConstant>(factory->the_hole_value()) 2833 : Add<HConstant>(nan_double); 2834 2835 BuildFillElementsWithValue(elements, elements_kind, from, to, hole); 2836 } 2837 2838 2839 void HGraphBuilder::BuildCopyElements(HValue* from_elements, 2840 ElementsKind from_elements_kind, 2841 HValue* to_elements, 2842 ElementsKind to_elements_kind, 2843 HValue* length, 2844 HValue* capacity) { 2845 int constant_capacity = -1; 2846 if (capacity != NULL && 2847 capacity->IsConstant() && 2848 HConstant::cast(capacity)->HasInteger32Value()) { 2849 int constant_candidate = HConstant::cast(capacity)->Integer32Value(); 2850 if (constant_candidate <= kElementLoopUnrollThreshold) { 2851 constant_capacity = constant_candidate; 2852 } 2853 } 2854 2855 bool pre_fill_with_holes = 2856 IsFastDoubleElementsKind(from_elements_kind) && 2857 IsFastObjectElementsKind(to_elements_kind); 2858 if (pre_fill_with_holes) { 2859 // If the copy might trigger a GC, make sure that the FixedArray is 2860 // pre-initialized with holes to make sure that it's always in a 2861 // consistent state. 2862 BuildFillElementsWithHole(to_elements, to_elements_kind, 2863 graph()->GetConstant0(), NULL); 2864 } 2865 2866 if (constant_capacity != -1) { 2867 // Unroll the loop for small elements kinds. 2868 for (int i = 0; i < constant_capacity; i++) { 2869 HValue* key_constant = Add<HConstant>(i); 2870 HInstruction* value = Add<HLoadKeyed>(from_elements, key_constant, 2871 static_cast<HValue*>(NULL), 2872 from_elements_kind); 2873 Add<HStoreKeyed>(to_elements, key_constant, value, to_elements_kind); 2874 } 2875 } else { 2876 if (!pre_fill_with_holes && 2877 (capacity == NULL || !length->Equals(capacity))) { 2878 BuildFillElementsWithHole(to_elements, to_elements_kind, 2879 length, NULL); 2880 } 2881 2882 if (capacity == NULL) { 2883 capacity = AddLoadFixedArrayLength(to_elements); 2884 } 2885 2886 LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement); 2887 2888 HValue* key = builder.BeginBody(length, graph()->GetConstant0(), 2889 Token::GT); 2890 2891 key = AddUncasted<HSub>(key, graph()->GetConstant1()); 2892 key->ClearFlag(HValue::kCanOverflow); 2893 2894 HValue* element = Add<HLoadKeyed>(from_elements, key, 2895 static_cast<HValue*>(NULL), 2896 from_elements_kind, 2897 ALLOW_RETURN_HOLE); 2898 2899 ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) && 2900 IsFastSmiElementsKind(to_elements_kind)) 2901 ? FAST_HOLEY_ELEMENTS : to_elements_kind; 2902 2903 if (IsHoleyElementsKind(from_elements_kind) && 2904 from_elements_kind != to_elements_kind) { 2905 IfBuilder if_hole(this); 2906 if_hole.If<HCompareHoleAndBranch>(element); 2907 if_hole.Then(); 2908 HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind) 2909 ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double()) 2910 : graph()->GetConstantHole(); 2911 Add<HStoreKeyed>(to_elements, key, hole_constant, kind); 2912 if_hole.Else(); 2913 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind); 2914 store->SetFlag(HValue::kAllowUndefinedAsNaN); 2915 if_hole.End(); 2916 } else { 2917 HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind); 2918 store->SetFlag(HValue::kAllowUndefinedAsNaN); 2919 } 2920 2921 builder.EndBody(); 2922 } 2923 2924 Counters* counters = isolate()->counters(); 2925 AddIncrementCounter(counters->inlined_copied_elements()); 2926 } 2927 2928 2929 HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate, 2930 HValue* allocation_site, 2931 AllocationSiteMode mode, 2932 ElementsKind kind) { 2933 HAllocate* array = AllocateJSArrayObject(mode); 2934 2935 HValue* map = AddLoadMap(boilerplate); 2936 HValue* elements = AddLoadElements(boilerplate); 2937 HValue* length = AddLoadArrayLength(boilerplate, kind); 2938 2939 BuildJSArrayHeader(array, 2940 map, 2941 elements, 2942 mode, 2943 FAST_ELEMENTS, 2944 allocation_site, 2945 length); 2946 return array; 2947 } 2948 2949 2950 HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate, 2951 HValue* allocation_site, 2952 AllocationSiteMode mode) { 2953 HAllocate* array = AllocateJSArrayObject(mode); 2954 2955 HValue* map = AddLoadMap(boilerplate); 2956 2957 BuildJSArrayHeader(array, 2958 map, 2959 NULL, // set elements to empty fixed array 2960 mode, 2961 FAST_ELEMENTS, 2962 allocation_site, 2963 graph()->GetConstant0()); 2964 return array; 2965 } 2966 2967 2968 HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate, 2969 HValue* allocation_site, 2970 AllocationSiteMode mode, 2971 ElementsKind kind) { 2972 HValue* boilerplate_elements = AddLoadElements(boilerplate); 2973 HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements); 2974 2975 // Generate size calculation code here in order to make it dominate 2976 // the JSArray allocation. 2977 HValue* elements_size = BuildCalculateElementsSize(kind, capacity); 2978 2979 // Create empty JSArray object for now, store elimination should remove 2980 // redundant initialization of elements and length fields and at the same 2981 // time the object will be fully prepared for GC if it happens during 2982 // elements allocation. 2983 HValue* result = BuildCloneShallowArrayEmpty( 2984 boilerplate, allocation_site, mode); 2985 2986 HAllocate* elements = BuildAllocateElements(kind, elements_size); 2987 2988 // This function implicitly relies on the fact that the 2989 // FastCloneShallowArrayStub is called only for literals shorter than 2990 // JSObject::kInitialMaxFastElementArray. 2991 // Can't add HBoundsCheck here because otherwise the stub will eager a frame. 2992 HConstant* size_upper_bound = EstablishElementsAllocationSize( 2993 kind, JSObject::kInitialMaxFastElementArray); 2994 elements->set_size_upper_bound(size_upper_bound); 2995 2996 Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements); 2997 2998 // The allocation for the cloned array above causes register pressure on 2999 // machines with low register counts. Force a reload of the boilerplate 3000 // elements here to free up a register for the allocation to avoid unnecessary 3001 // spillage. 3002 boilerplate_elements = AddLoadElements(boilerplate); 3003 boilerplate_elements->SetFlag(HValue::kCantBeReplaced); 3004 3005 // Copy the elements array header. 3006 for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) { 3007 HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i); 3008 Add<HStoreNamedField>(elements, access, 3009 Add<HLoadNamedField>(boilerplate_elements, 3010 static_cast<HValue*>(NULL), access)); 3011 } 3012 3013 // And the result of the length 3014 HValue* length = AddLoadArrayLength(boilerplate, kind); 3015 Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length); 3016 3017 BuildCopyElements(boilerplate_elements, kind, elements, 3018 kind, length, NULL); 3019 return result; 3020 } 3021 3022 3023 void HGraphBuilder::BuildCompareNil( 3024 HValue* value, 3025 Type* type, 3026 HIfContinuation* continuation) { 3027 IfBuilder if_nil(this); 3028 bool some_case_handled = false; 3029 bool some_case_missing = false; 3030 3031 if (type->Maybe(Type::Null())) { 3032 if (some_case_handled) if_nil.Or(); 3033 if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull()); 3034 some_case_handled = true; 3035 } else { 3036 some_case_missing = true; 3037 } 3038 3039 if (type->Maybe(Type::Undefined())) { 3040 if (some_case_handled) if_nil.Or(); 3041 if_nil.If<HCompareObjectEqAndBranch>(value, 3042 graph()->GetConstantUndefined()); 3043 some_case_handled = true; 3044 } else { 3045 some_case_missing = true; 3046 } 3047 3048 if (type->Maybe(Type::Undetectable())) { 3049 if (some_case_handled) if_nil.Or(); 3050 if_nil.If<HIsUndetectableAndBranch>(value); 3051 some_case_handled = true; 3052 } else { 3053 some_case_missing = true; 3054 } 3055 3056 if (some_case_missing) { 3057 if_nil.Then(); 3058 if_nil.Else(); 3059 if (type->NumClasses() == 1) { 3060 BuildCheckHeapObject(value); 3061 // For ICs, the map checked below is a sentinel map that gets replaced by 3062 // the monomorphic map when the code is used as a template to generate a 3063 // new IC. For optimized functions, there is no sentinel map, the map 3064 // emitted below is the actual monomorphic map. 3065 Add<HCheckMaps>(value, type->Classes().Current()); 3066 } else { 3067 if_nil.Deopt("Too many undetectable types"); 3068 } 3069 } 3070 3071 if_nil.CaptureContinuation(continuation); 3072 } 3073 3074 3075 void HGraphBuilder::BuildCreateAllocationMemento( 3076 HValue* previous_object, 3077 HValue* previous_object_size, 3078 HValue* allocation_site) { 3079 DCHECK(allocation_site != NULL); 3080 HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>( 3081 previous_object, previous_object_size, HType::HeapObject()); 3082 AddStoreMapConstant( 3083 allocation_memento, isolate()->factory()->allocation_memento_map()); 3084 Add<HStoreNamedField>( 3085 allocation_memento, 3086 HObjectAccess::ForAllocationMementoSite(), 3087 allocation_site); 3088 if (FLAG_allocation_site_pretenuring) { 3089 HValue* memento_create_count = Add<HLoadNamedField>( 3090 allocation_site, static_cast<HValue*>(NULL), 3091 HObjectAccess::ForAllocationSiteOffset( 3092 AllocationSite::kPretenureCreateCountOffset)); 3093 memento_create_count = AddUncasted<HAdd>( 3094 memento_create_count, graph()->GetConstant1()); 3095 // This smi value is reset to zero after every gc, overflow isn't a problem 3096 // since the counter is bounded by the new space size. 3097 memento_create_count->ClearFlag(HValue::kCanOverflow); 3098 Add<HStoreNamedField>( 3099 allocation_site, HObjectAccess::ForAllocationSiteOffset( 3100 AllocationSite::kPretenureCreateCountOffset), memento_create_count); 3101 } 3102 } 3103 3104 3105 HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) { 3106 // Get the global context, then the native context 3107 HInstruction* context = 3108 Add<HLoadNamedField>(closure, static_cast<HValue*>(NULL), 3109 HObjectAccess::ForFunctionContextPointer()); 3110 HInstruction* global_object = Add<HLoadNamedField>( 3111 context, static_cast<HValue*>(NULL), 3112 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 3113 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset( 3114 GlobalObject::kNativeContextOffset); 3115 return Add<HLoadNamedField>( 3116 global_object, static_cast<HValue*>(NULL), access); 3117 } 3118 3119 3120 HInstruction* HGraphBuilder::BuildGetNativeContext() { 3121 // Get the global context, then the native context 3122 HValue* global_object = Add<HLoadNamedField>( 3123 context(), static_cast<HValue*>(NULL), 3124 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 3125 return Add<HLoadNamedField>( 3126 global_object, static_cast<HValue*>(NULL), 3127 HObjectAccess::ForObservableJSObjectOffset( 3128 GlobalObject::kNativeContextOffset)); 3129 } 3130 3131 3132 HInstruction* HGraphBuilder::BuildGetArrayFunction() { 3133 HInstruction* native_context = BuildGetNativeContext(); 3134 HInstruction* index = 3135 Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX)); 3136 return Add<HLoadKeyed>( 3137 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 3138 } 3139 3140 3141 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 3142 ElementsKind kind, 3143 HValue* allocation_site_payload, 3144 HValue* constructor_function, 3145 AllocationSiteOverrideMode override_mode) : 3146 builder_(builder), 3147 kind_(kind), 3148 allocation_site_payload_(allocation_site_payload), 3149 constructor_function_(constructor_function) { 3150 DCHECK(!allocation_site_payload->IsConstant() || 3151 HConstant::cast(allocation_site_payload)->handle( 3152 builder_->isolate())->IsAllocationSite()); 3153 mode_ = override_mode == DISABLE_ALLOCATION_SITES 3154 ? DONT_TRACK_ALLOCATION_SITE 3155 : AllocationSite::GetMode(kind); 3156 } 3157 3158 3159 HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder, 3160 ElementsKind kind, 3161 HValue* constructor_function) : 3162 builder_(builder), 3163 kind_(kind), 3164 mode_(DONT_TRACK_ALLOCATION_SITE), 3165 allocation_site_payload_(NULL), 3166 constructor_function_(constructor_function) { 3167 } 3168 3169 3170 HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() { 3171 if (!builder()->top_info()->IsStub()) { 3172 // A constant map is fine. 3173 Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_), 3174 builder()->isolate()); 3175 return builder()->Add<HConstant>(map); 3176 } 3177 3178 if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) { 3179 // No need for a context lookup if the kind_ matches the initial 3180 // map, because we can just load the map in that case. 3181 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 3182 return builder()->Add<HLoadNamedField>( 3183 constructor_function_, static_cast<HValue*>(NULL), access); 3184 } 3185 3186 // TODO(mvstanton): we should always have a constructor function if we 3187 // are creating a stub. 3188 HInstruction* native_context = constructor_function_ != NULL 3189 ? builder()->BuildGetNativeContext(constructor_function_) 3190 : builder()->BuildGetNativeContext(); 3191 3192 HInstruction* index = builder()->Add<HConstant>( 3193 static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX)); 3194 3195 HInstruction* map_array = builder()->Add<HLoadKeyed>( 3196 native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 3197 3198 HInstruction* kind_index = builder()->Add<HConstant>(kind_); 3199 3200 return builder()->Add<HLoadKeyed>( 3201 map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS); 3202 } 3203 3204 3205 HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() { 3206 // Find the map near the constructor function 3207 HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap(); 3208 return builder()->Add<HLoadNamedField>( 3209 constructor_function_, static_cast<HValue*>(NULL), access); 3210 } 3211 3212 3213 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() { 3214 HConstant* capacity = builder()->Add<HConstant>(initial_capacity()); 3215 return AllocateArray(capacity, 3216 capacity, 3217 builder()->graph()->GetConstant0()); 3218 } 3219 3220 3221 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( 3222 HValue* capacity, 3223 HConstant* capacity_upper_bound, 3224 HValue* length_field, 3225 FillMode fill_mode) { 3226 return AllocateArray(capacity, 3227 capacity_upper_bound->GetInteger32Constant(), 3228 length_field, 3229 fill_mode); 3230 } 3231 3232 3233 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( 3234 HValue* capacity, 3235 int capacity_upper_bound, 3236 HValue* length_field, 3237 FillMode fill_mode) { 3238 HConstant* elememts_size_upper_bound = capacity->IsInteger32Constant() 3239 ? HConstant::cast(capacity) 3240 : builder()->EstablishElementsAllocationSize(kind_, capacity_upper_bound); 3241 3242 HAllocate* array = AllocateArray(capacity, length_field, fill_mode); 3243 if (!elements_location_->has_size_upper_bound()) { 3244 elements_location_->set_size_upper_bound(elememts_size_upper_bound); 3245 } 3246 return array; 3247 } 3248 3249 3250 HAllocate* HGraphBuilder::JSArrayBuilder::AllocateArray( 3251 HValue* capacity, 3252 HValue* length_field, 3253 FillMode fill_mode) { 3254 // These HForceRepresentations are because we store these as fields in the 3255 // objects we construct, and an int32-to-smi HChange could deopt. Accept 3256 // the deopt possibility now, before allocation occurs. 3257 capacity = 3258 builder()->AddUncasted<HForceRepresentation>(capacity, 3259 Representation::Smi()); 3260 length_field = 3261 builder()->AddUncasted<HForceRepresentation>(length_field, 3262 Representation::Smi()); 3263 3264 // Generate size calculation code here in order to make it dominate 3265 // the JSArray allocation. 3266 HValue* elements_size = 3267 builder()->BuildCalculateElementsSize(kind_, capacity); 3268 3269 // Allocate (dealing with failure appropriately) 3270 HAllocate* array_object = builder()->AllocateJSArrayObject(mode_); 3271 3272 // Fill in the fields: map, properties, length 3273 HValue* map; 3274 if (allocation_site_payload_ == NULL) { 3275 map = EmitInternalMapCode(); 3276 } else { 3277 map = EmitMapCode(); 3278 } 3279 3280 builder()->BuildJSArrayHeader(array_object, 3281 map, 3282 NULL, // set elements to empty fixed array 3283 mode_, 3284 kind_, 3285 allocation_site_payload_, 3286 length_field); 3287 3288 // Allocate and initialize the elements 3289 elements_location_ = builder()->BuildAllocateElements(kind_, elements_size); 3290 3291 builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity); 3292 3293 // Set the elements 3294 builder()->Add<HStoreNamedField>( 3295 array_object, HObjectAccess::ForElementsPointer(), elements_location_); 3296 3297 if (fill_mode == FILL_WITH_HOLE) { 3298 builder()->BuildFillElementsWithHole(elements_location_, kind_, 3299 graph()->GetConstant0(), capacity); 3300 } 3301 3302 return array_object; 3303 } 3304 3305 3306 HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) { 3307 HValue* global_object = Add<HLoadNamedField>( 3308 context(), static_cast<HValue*>(NULL), 3309 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 3310 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset( 3311 GlobalObject::kBuiltinsOffset); 3312 HValue* builtins = Add<HLoadNamedField>( 3313 global_object, static_cast<HValue*>(NULL), access); 3314 HObjectAccess function_access = HObjectAccess::ForObservableJSObjectOffset( 3315 JSBuiltinsObject::OffsetOfFunctionWithId(builtin)); 3316 return Add<HLoadNamedField>( 3317 builtins, static_cast<HValue*>(NULL), function_access); 3318 } 3319 3320 3321 HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info) 3322 : HGraphBuilder(info), 3323 function_state_(NULL), 3324 initial_function_state_(this, info, NORMAL_RETURN, 0), 3325 ast_context_(NULL), 3326 break_scope_(NULL), 3327 inlined_count_(0), 3328 globals_(10, info->zone()), 3329 osr_(new(info->zone()) HOsrBuilder(this)) { 3330 // This is not initialized in the initializer list because the 3331 // constructor for the initial state relies on function_state_ == NULL 3332 // to know it's the initial state. 3333 function_state_= &initial_function_state_; 3334 InitializeAstVisitor(info->zone()); 3335 if (FLAG_hydrogen_track_positions) { 3336 SetSourcePosition(info->shared_info()->start_position()); 3337 } 3338 } 3339 3340 3341 HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first, 3342 HBasicBlock* second, 3343 BailoutId join_id) { 3344 if (first == NULL) { 3345 return second; 3346 } else if (second == NULL) { 3347 return first; 3348 } else { 3349 HBasicBlock* join_block = graph()->CreateBasicBlock(); 3350 Goto(first, join_block); 3351 Goto(second, join_block); 3352 join_block->SetJoinId(join_id); 3353 return join_block; 3354 } 3355 } 3356 3357 3358 HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement, 3359 HBasicBlock* exit_block, 3360 HBasicBlock* continue_block) { 3361 if (continue_block != NULL) { 3362 if (exit_block != NULL) Goto(exit_block, continue_block); 3363 continue_block->SetJoinId(statement->ContinueId()); 3364 return continue_block; 3365 } 3366 return exit_block; 3367 } 3368 3369 3370 HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement, 3371 HBasicBlock* loop_entry, 3372 HBasicBlock* body_exit, 3373 HBasicBlock* loop_successor, 3374 HBasicBlock* break_block) { 3375 if (body_exit != NULL) Goto(body_exit, loop_entry); 3376 loop_entry->PostProcessLoopHeader(statement); 3377 if (break_block != NULL) { 3378 if (loop_successor != NULL) Goto(loop_successor, break_block); 3379 break_block->SetJoinId(statement->ExitId()); 3380 return break_block; 3381 } 3382 return loop_successor; 3383 } 3384 3385 3386 // Build a new loop header block and set it as the current block. 3387 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() { 3388 HBasicBlock* loop_entry = CreateLoopHeaderBlock(); 3389 Goto(loop_entry); 3390 set_current_block(loop_entry); 3391 return loop_entry; 3392 } 3393 3394 3395 HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry( 3396 IterationStatement* statement) { 3397 HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement) 3398 ? osr()->BuildOsrLoopEntry(statement) 3399 : BuildLoopEntry(); 3400 return loop_entry; 3401 } 3402 3403 3404 void HBasicBlock::FinishExit(HControlInstruction* instruction, 3405 HSourcePosition position) { 3406 Finish(instruction, position); 3407 ClearEnvironment(); 3408 } 3409 3410 3411 OStream& operator<<(OStream& os, const HBasicBlock& b) { 3412 return os << "B" << b.block_id(); 3413 } 3414 3415 3416 HGraph::HGraph(CompilationInfo* info) 3417 : isolate_(info->isolate()), 3418 next_block_id_(0), 3419 entry_block_(NULL), 3420 blocks_(8, info->zone()), 3421 values_(16, info->zone()), 3422 phi_list_(NULL), 3423 uint32_instructions_(NULL), 3424 osr_(NULL), 3425 info_(info), 3426 zone_(info->zone()), 3427 is_recursive_(false), 3428 use_optimistic_licm_(false), 3429 depends_on_empty_array_proto_elements_(false), 3430 type_change_checksum_(0), 3431 maximum_environment_size_(0), 3432 no_side_effects_scope_count_(0), 3433 disallow_adding_new_values_(false), 3434 next_inline_id_(0), 3435 inlined_functions_(5, info->zone()) { 3436 if (info->IsStub()) { 3437 CallInterfaceDescriptor descriptor = 3438 info->code_stub()->GetCallInterfaceDescriptor(); 3439 start_environment_ = new (zone_) 3440 HEnvironment(zone_, descriptor.GetEnvironmentParameterCount()); 3441 } else { 3442 TraceInlinedFunction(info->shared_info(), HSourcePosition::Unknown()); 3443 start_environment_ = 3444 new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_); 3445 } 3446 start_environment_->set_ast_id(BailoutId::FunctionEntry()); 3447 entry_block_ = CreateBasicBlock(); 3448 entry_block_->SetInitialEnvironment(start_environment_); 3449 } 3450 3451 3452 HBasicBlock* HGraph::CreateBasicBlock() { 3453 HBasicBlock* result = new(zone()) HBasicBlock(this); 3454 blocks_.Add(result, zone()); 3455 return result; 3456 } 3457 3458 3459 void HGraph::FinalizeUniqueness() { 3460 DisallowHeapAllocation no_gc; 3461 DCHECK(!OptimizingCompilerThread::IsOptimizerThread(isolate())); 3462 for (int i = 0; i < blocks()->length(); ++i) { 3463 for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) { 3464 it.Current()->FinalizeUniqueness(); 3465 } 3466 } 3467 } 3468 3469 3470 int HGraph::TraceInlinedFunction( 3471 Handle<SharedFunctionInfo> shared, 3472 HSourcePosition position) { 3473 if (!FLAG_hydrogen_track_positions) { 3474 return 0; 3475 } 3476 3477 int id = 0; 3478 for (; id < inlined_functions_.length(); id++) { 3479 if (inlined_functions_[id].shared().is_identical_to(shared)) { 3480 break; 3481 } 3482 } 3483 3484 if (id == inlined_functions_.length()) { 3485 inlined_functions_.Add(InlinedFunctionInfo(shared), zone()); 3486 3487 if (!shared->script()->IsUndefined()) { 3488 Handle<Script> script(Script::cast(shared->script())); 3489 if (!script->source()->IsUndefined()) { 3490 CodeTracer::Scope tracing_scopex(isolate()->GetCodeTracer()); 3491 OFStream os(tracing_scopex.file()); 3492 os << "--- FUNCTION SOURCE (" << shared->DebugName()->ToCString().get() 3493 << ") id{" << info()->optimization_id() << "," << id << "} ---\n"; 3494 { 3495 ConsStringIteratorOp op; 3496 StringCharacterStream stream(String::cast(script->source()), 3497 &op, 3498 shared->start_position()); 3499 // fun->end_position() points to the last character in the stream. We 3500 // need to compensate by adding one to calculate the length. 3501 int source_len = 3502 shared->end_position() - shared->start_position() + 1; 3503 for (int i = 0; i < source_len; i++) { 3504 if (stream.HasMore()) { 3505 os << AsReversiblyEscapedUC16(stream.GetNext()); 3506 } 3507 } 3508 } 3509 3510 os << "\n--- END ---\n"; 3511 } 3512 } 3513 } 3514 3515 int inline_id = next_inline_id_++; 3516 3517 if (inline_id != 0) { 3518 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer()); 3519 OFStream os(tracing_scope.file()); 3520 os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{" 3521 << info()->optimization_id() << "," << id << "} AS " << inline_id 3522 << " AT " << position << endl; 3523 } 3524 3525 return inline_id; 3526 } 3527 3528 3529 int HGraph::SourcePositionToScriptPosition(HSourcePosition pos) { 3530 if (!FLAG_hydrogen_track_positions || pos.IsUnknown()) { 3531 return pos.raw(); 3532 } 3533 3534 return inlined_functions_[pos.inlining_id()].start_position() + 3535 pos.position(); 3536 } 3537 3538 3539 // Block ordering was implemented with two mutually recursive methods, 3540 // HGraph::Postorder and HGraph::PostorderLoopBlocks. 3541 // The recursion could lead to stack overflow so the algorithm has been 3542 // implemented iteratively. 3543 // At a high level the algorithm looks like this: 3544 // 3545 // Postorder(block, loop_header) : { 3546 // if (block has already been visited or is of another loop) return; 3547 // mark block as visited; 3548 // if (block is a loop header) { 3549 // VisitLoopMembers(block, loop_header); 3550 // VisitSuccessorsOfLoopHeader(block); 3551 // } else { 3552 // VisitSuccessors(block) 3553 // } 3554 // put block in result list; 3555 // } 3556 // 3557 // VisitLoopMembers(block, outer_loop_header) { 3558 // foreach (block b in block loop members) { 3559 // VisitSuccessorsOfLoopMember(b, outer_loop_header); 3560 // if (b is loop header) VisitLoopMembers(b); 3561 // } 3562 // } 3563 // 3564 // VisitSuccessorsOfLoopMember(block, outer_loop_header) { 3565 // foreach (block b in block successors) Postorder(b, outer_loop_header) 3566 // } 3567 // 3568 // VisitSuccessorsOfLoopHeader(block) { 3569 // foreach (block b in block successors) Postorder(b, block) 3570 // } 3571 // 3572 // VisitSuccessors(block, loop_header) { 3573 // foreach (block b in block successors) Postorder(b, loop_header) 3574 // } 3575 // 3576 // The ordering is started calling Postorder(entry, NULL). 3577 // 3578 // Each instance of PostorderProcessor represents the "stack frame" of the 3579 // recursion, and particularly keeps the state of the loop (iteration) of the 3580 // "Visit..." function it represents. 3581 // To recycle memory we keep all the frames in a double linked list but 3582 // this means that we cannot use constructors to initialize the frames. 3583 // 3584 class PostorderProcessor : public ZoneObject { 3585 public: 3586 // Back link (towards the stack bottom). 3587 PostorderProcessor* parent() {return father_; } 3588 // Forward link (towards the stack top). 3589 PostorderProcessor* child() {return child_; } 3590 HBasicBlock* block() { return block_; } 3591 HLoopInformation* loop() { return loop_; } 3592 HBasicBlock* loop_header() { return loop_header_; } 3593 3594 static PostorderProcessor* CreateEntryProcessor(Zone* zone, 3595 HBasicBlock* block) { 3596 PostorderProcessor* result = new(zone) PostorderProcessor(NULL); 3597 return result->SetupSuccessors(zone, block, NULL); 3598 } 3599 3600 PostorderProcessor* PerformStep(Zone* zone, 3601 ZoneList<HBasicBlock*>* order) { 3602 PostorderProcessor* next = 3603 PerformNonBacktrackingStep(zone, order); 3604 if (next != NULL) { 3605 return next; 3606 } else { 3607 return Backtrack(zone, order); 3608 } 3609 } 3610 3611 private: 3612 explicit PostorderProcessor(PostorderProcessor* father) 3613 : father_(father), child_(NULL), successor_iterator(NULL) { } 3614 3615 // Each enum value states the cycle whose state is kept by this instance. 3616 enum LoopKind { 3617 NONE, 3618 SUCCESSORS, 3619 SUCCESSORS_OF_LOOP_HEADER, 3620 LOOP_MEMBERS, 3621 SUCCESSORS_OF_LOOP_MEMBER 3622 }; 3623 3624 // Each "Setup..." method is like a constructor for a cycle state. 3625 PostorderProcessor* SetupSuccessors(Zone* zone, 3626 HBasicBlock* block, 3627 HBasicBlock* loop_header) { 3628 if (block == NULL || block->IsOrdered() || 3629 block->parent_loop_header() != loop_header) { 3630 kind_ = NONE; 3631 block_ = NULL; 3632 loop_ = NULL; 3633 loop_header_ = NULL; 3634 return this; 3635 } else { 3636 block_ = block; 3637 loop_ = NULL; 3638 block->MarkAsOrdered(); 3639 3640 if (block->IsLoopHeader()) { 3641 kind_ = SUCCESSORS_OF_LOOP_HEADER; 3642 loop_header_ = block; 3643 InitializeSuccessors(); 3644 PostorderProcessor* result = Push(zone); 3645 return result->SetupLoopMembers(zone, block, block->loop_information(), 3646 loop_header); 3647 } else { 3648 DCHECK(block->IsFinished()); 3649 kind_ = SUCCESSORS; 3650 loop_header_ = loop_header; 3651 InitializeSuccessors(); 3652 return this; 3653 } 3654 } 3655 } 3656 3657 PostorderProcessor* SetupLoopMembers(Zone* zone, 3658 HBasicBlock* block, 3659 HLoopInformation* loop, 3660 HBasicBlock* loop_header) { 3661 kind_ = LOOP_MEMBERS; 3662 block_ = block; 3663 loop_ = loop; 3664 loop_header_ = loop_header; 3665 InitializeLoopMembers(); 3666 return this; 3667 } 3668 3669 PostorderProcessor* SetupSuccessorsOfLoopMember( 3670 HBasicBlock* block, 3671 HLoopInformation* loop, 3672 HBasicBlock* loop_header) { 3673 kind_ = SUCCESSORS_OF_LOOP_MEMBER; 3674 block_ = block; 3675 loop_ = loop; 3676 loop_header_ = loop_header; 3677 InitializeSuccessors(); 3678 return this; 3679 } 3680 3681 // This method "allocates" a new stack frame. 3682 PostorderProcessor* Push(Zone* zone) { 3683 if (child_ == NULL) { 3684 child_ = new(zone) PostorderProcessor(this); 3685 } 3686 return child_; 3687 } 3688 3689 void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) { 3690 DCHECK(block_->end()->FirstSuccessor() == NULL || 3691 order->Contains(block_->end()->FirstSuccessor()) || 3692 block_->end()->FirstSuccessor()->IsLoopHeader()); 3693 DCHECK(block_->end()->SecondSuccessor() == NULL || 3694 order->Contains(block_->end()->SecondSuccessor()) || 3695 block_->end()->SecondSuccessor()->IsLoopHeader()); 3696 order->Add(block_, zone); 3697 } 3698 3699 // This method is the basic block to walk up the stack. 3700 PostorderProcessor* Pop(Zone* zone, 3701 ZoneList<HBasicBlock*>* order) { 3702 switch (kind_) { 3703 case SUCCESSORS: 3704 case SUCCESSORS_OF_LOOP_HEADER: 3705 ClosePostorder(order, zone); 3706 return father_; 3707 case LOOP_MEMBERS: 3708 return father_; 3709 case SUCCESSORS_OF_LOOP_MEMBER: 3710 if (block()->IsLoopHeader() && block() != loop_->loop_header()) { 3711 // In this case we need to perform a LOOP_MEMBERS cycle so we 3712 // initialize it and return this instead of father. 3713 return SetupLoopMembers(zone, block(), 3714 block()->loop_information(), loop_header_); 3715 } else { 3716 return father_; 3717 } 3718 case NONE: 3719 return father_; 3720 } 3721 UNREACHABLE(); 3722 return NULL; 3723 } 3724 3725 // Walks up the stack. 3726 PostorderProcessor* Backtrack(Zone* zone, 3727 ZoneList<HBasicBlock*>* order) { 3728 PostorderProcessor* parent = Pop(zone, order); 3729 while (parent != NULL) { 3730 PostorderProcessor* next = 3731 parent->PerformNonBacktrackingStep(zone, order); 3732 if (next != NULL) { 3733 return next; 3734 } else { 3735 parent = parent->Pop(zone, order); 3736 } 3737 } 3738 return NULL; 3739 } 3740 3741 PostorderProcessor* PerformNonBacktrackingStep( 3742 Zone* zone, 3743 ZoneList<HBasicBlock*>* order) { 3744 HBasicBlock* next_block; 3745 switch (kind_) { 3746 case SUCCESSORS: 3747 next_block = AdvanceSuccessors(); 3748 if (next_block != NULL) { 3749 PostorderProcessor* result = Push(zone); 3750 return result->SetupSuccessors(zone, next_block, loop_header_); 3751 } 3752 break; 3753 case SUCCESSORS_OF_LOOP_HEADER: 3754 next_block = AdvanceSuccessors(); 3755 if (next_block != NULL) { 3756 PostorderProcessor* result = Push(zone); 3757 return result->SetupSuccessors(zone, next_block, block()); 3758 } 3759 break; 3760 case LOOP_MEMBERS: 3761 next_block = AdvanceLoopMembers(); 3762 if (next_block != NULL) { 3763 PostorderProcessor* result = Push(zone); 3764 return result->SetupSuccessorsOfLoopMember(next_block, 3765 loop_, loop_header_); 3766 } 3767 break; 3768 case SUCCESSORS_OF_LOOP_MEMBER: 3769 next_block = AdvanceSuccessors(); 3770 if (next_block != NULL) { 3771 PostorderProcessor* result = Push(zone); 3772 return result->SetupSuccessors(zone, next_block, loop_header_); 3773 } 3774 break; 3775 case NONE: 3776 return NULL; 3777 } 3778 return NULL; 3779 } 3780 3781 // The following two methods implement a "foreach b in successors" cycle. 3782 void InitializeSuccessors() { 3783 loop_index = 0; 3784 loop_length = 0; 3785 successor_iterator = HSuccessorIterator(block_->end()); 3786 } 3787 3788 HBasicBlock* AdvanceSuccessors() { 3789 if (!successor_iterator.Done()) { 3790 HBasicBlock* result = successor_iterator.Current(); 3791 successor_iterator.Advance(); 3792 return result; 3793 } 3794 return NULL; 3795 } 3796 3797 // The following two methods implement a "foreach b in loop members" cycle. 3798 void InitializeLoopMembers() { 3799 loop_index = 0; 3800 loop_length = loop_->blocks()->length(); 3801 } 3802 3803 HBasicBlock* AdvanceLoopMembers() { 3804 if (loop_index < loop_length) { 3805 HBasicBlock* result = loop_->blocks()->at(loop_index); 3806 loop_index++; 3807 return result; 3808 } else { 3809 return NULL; 3810 } 3811 } 3812 3813 LoopKind kind_; 3814 PostorderProcessor* father_; 3815 PostorderProcessor* child_; 3816 HLoopInformation* loop_; 3817 HBasicBlock* block_; 3818 HBasicBlock* loop_header_; 3819 int loop_index; 3820 int loop_length; 3821 HSuccessorIterator successor_iterator; 3822 }; 3823 3824 3825 void HGraph::OrderBlocks() { 3826 CompilationPhase phase("H_Block ordering", info()); 3827 3828 #ifdef DEBUG 3829 // Initially the blocks must not be ordered. 3830 for (int i = 0; i < blocks_.length(); ++i) { 3831 DCHECK(!blocks_[i]->IsOrdered()); 3832 } 3833 #endif 3834 3835 PostorderProcessor* postorder = 3836 PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]); 3837 blocks_.Rewind(0); 3838 while (postorder) { 3839 postorder = postorder->PerformStep(zone(), &blocks_); 3840 } 3841 3842 #ifdef DEBUG 3843 // Now all blocks must be marked as ordered. 3844 for (int i = 0; i < blocks_.length(); ++i) { 3845 DCHECK(blocks_[i]->IsOrdered()); 3846 } 3847 #endif 3848 3849 // Reverse block list and assign block IDs. 3850 for (int i = 0, j = blocks_.length(); --j >= i; ++i) { 3851 HBasicBlock* bi = blocks_[i]; 3852 HBasicBlock* bj = blocks_[j]; 3853 bi->set_block_id(j); 3854 bj->set_block_id(i); 3855 blocks_[i] = bj; 3856 blocks_[j] = bi; 3857 } 3858 } 3859 3860 3861 void HGraph::AssignDominators() { 3862 HPhase phase("H_Assign dominators", this); 3863 for (int i = 0; i < blocks_.length(); ++i) { 3864 HBasicBlock* block = blocks_[i]; 3865 if (block->IsLoopHeader()) { 3866 // Only the first predecessor of a loop header is from outside the loop. 3867 // All others are back edges, and thus cannot dominate the loop header. 3868 block->AssignCommonDominator(block->predecessors()->first()); 3869 block->AssignLoopSuccessorDominators(); 3870 } else { 3871 for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) { 3872 blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j)); 3873 } 3874 } 3875 } 3876 } 3877 3878 3879 bool HGraph::CheckArgumentsPhiUses() { 3880 int block_count = blocks_.length(); 3881 for (int i = 0; i < block_count; ++i) { 3882 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 3883 HPhi* phi = blocks_[i]->phis()->at(j); 3884 // We don't support phi uses of arguments for now. 3885 if (phi->CheckFlag(HValue::kIsArguments)) return false; 3886 } 3887 } 3888 return true; 3889 } 3890 3891 3892 bool HGraph::CheckConstPhiUses() { 3893 int block_count = blocks_.length(); 3894 for (int i = 0; i < block_count; ++i) { 3895 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 3896 HPhi* phi = blocks_[i]->phis()->at(j); 3897 // Check for the hole value (from an uninitialized const). 3898 for (int k = 0; k < phi->OperandCount(); k++) { 3899 if (phi->OperandAt(k) == GetConstantHole()) return false; 3900 } 3901 } 3902 } 3903 return true; 3904 } 3905 3906 3907 void HGraph::CollectPhis() { 3908 int block_count = blocks_.length(); 3909 phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone()); 3910 for (int i = 0; i < block_count; ++i) { 3911 for (int j = 0; j < blocks_[i]->phis()->length(); ++j) { 3912 HPhi* phi = blocks_[i]->phis()->at(j); 3913 phi_list_->Add(phi, zone()); 3914 } 3915 } 3916 } 3917 3918 3919 // Implementation of utility class to encapsulate the translation state for 3920 // a (possibly inlined) function. 3921 FunctionState::FunctionState(HOptimizedGraphBuilder* owner, 3922 CompilationInfo* info, 3923 InliningKind inlining_kind, 3924 int inlining_id) 3925 : owner_(owner), 3926 compilation_info_(info), 3927 call_context_(NULL), 3928 inlining_kind_(inlining_kind), 3929 function_return_(NULL), 3930 test_context_(NULL), 3931 entry_(NULL), 3932 arguments_object_(NULL), 3933 arguments_elements_(NULL), 3934 inlining_id_(inlining_id), 3935 outer_source_position_(HSourcePosition::Unknown()), 3936 outer_(owner->function_state()) { 3937 if (outer_ != NULL) { 3938 // State for an inline function. 3939 if (owner->ast_context()->IsTest()) { 3940 HBasicBlock* if_true = owner->graph()->CreateBasicBlock(); 3941 HBasicBlock* if_false = owner->graph()->CreateBasicBlock(); 3942 if_true->MarkAsInlineReturnTarget(owner->current_block()); 3943 if_false->MarkAsInlineReturnTarget(owner->current_block()); 3944 TestContext* outer_test_context = TestContext::cast(owner->ast_context()); 3945 Expression* cond = outer_test_context->condition(); 3946 // The AstContext constructor pushed on the context stack. This newed 3947 // instance is the reason that AstContext can't be BASE_EMBEDDED. 3948 test_context_ = new TestContext(owner, cond, if_true, if_false); 3949 } else { 3950 function_return_ = owner->graph()->CreateBasicBlock(); 3951 function_return()->MarkAsInlineReturnTarget(owner->current_block()); 3952 } 3953 // Set this after possibly allocating a new TestContext above. 3954 call_context_ = owner->ast_context(); 3955 } 3956 3957 // Push on the state stack. 3958 owner->set_function_state(this); 3959 3960 if (FLAG_hydrogen_track_positions) { 3961 outer_source_position_ = owner->source_position(); 3962 owner->EnterInlinedSource( 3963 info->shared_info()->start_position(), 3964 inlining_id); 3965 owner->SetSourcePosition(info->shared_info()->start_position()); 3966 } 3967 } 3968 3969 3970 FunctionState::~FunctionState() { 3971 delete test_context_; 3972 owner_->set_function_state(outer_); 3973 3974 if (FLAG_hydrogen_track_positions) { 3975 owner_->set_source_position(outer_source_position_); 3976 owner_->EnterInlinedSource( 3977 outer_->compilation_info()->shared_info()->start_position(), 3978 outer_->inlining_id()); 3979 } 3980 } 3981 3982 3983 // Implementation of utility classes to represent an expression's context in 3984 // the AST. 3985 AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind) 3986 : owner_(owner), 3987 kind_(kind), 3988 outer_(owner->ast_context()), 3989 for_typeof_(false) { 3990 owner->set_ast_context(this); // Push. 3991 #ifdef DEBUG 3992 DCHECK(owner->environment()->frame_type() == JS_FUNCTION); 3993 original_length_ = owner->environment()->length(); 3994 #endif 3995 } 3996 3997 3998 AstContext::~AstContext() { 3999 owner_->set_ast_context(outer_); // Pop. 4000 } 4001 4002 4003 EffectContext::~EffectContext() { 4004 DCHECK(owner()->HasStackOverflow() || 4005 owner()->current_block() == NULL || 4006 (owner()->environment()->length() == original_length_ && 4007 owner()->environment()->frame_type() == JS_FUNCTION)); 4008 } 4009 4010 4011 ValueContext::~ValueContext() { 4012 DCHECK(owner()->HasStackOverflow() || 4013 owner()->current_block() == NULL || 4014 (owner()->environment()->length() == original_length_ + 1 && 4015 owner()->environment()->frame_type() == JS_FUNCTION)); 4016 } 4017 4018 4019 void EffectContext::ReturnValue(HValue* value) { 4020 // The value is simply ignored. 4021 } 4022 4023 4024 void ValueContext::ReturnValue(HValue* value) { 4025 // The value is tracked in the bailout environment, and communicated 4026 // through the environment as the result of the expression. 4027 if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) { 4028 owner()->Bailout(kBadValueContextForArgumentsValue); 4029 } 4030 owner()->Push(value); 4031 } 4032 4033 4034 void TestContext::ReturnValue(HValue* value) { 4035 BuildBranch(value); 4036 } 4037 4038 4039 void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 4040 DCHECK(!instr->IsControlInstruction()); 4041 owner()->AddInstruction(instr); 4042 if (instr->HasObservableSideEffects()) { 4043 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 4044 } 4045 } 4046 4047 4048 void EffectContext::ReturnControl(HControlInstruction* instr, 4049 BailoutId ast_id) { 4050 DCHECK(!instr->HasObservableSideEffects()); 4051 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 4052 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 4053 instr->SetSuccessorAt(0, empty_true); 4054 instr->SetSuccessorAt(1, empty_false); 4055 owner()->FinishCurrentBlock(instr); 4056 HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id); 4057 owner()->set_current_block(join); 4058 } 4059 4060 4061 void EffectContext::ReturnContinuation(HIfContinuation* continuation, 4062 BailoutId ast_id) { 4063 HBasicBlock* true_branch = NULL; 4064 HBasicBlock* false_branch = NULL; 4065 continuation->Continue(&true_branch, &false_branch); 4066 if (!continuation->IsTrueReachable()) { 4067 owner()->set_current_block(false_branch); 4068 } else if (!continuation->IsFalseReachable()) { 4069 owner()->set_current_block(true_branch); 4070 } else { 4071 HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id); 4072 owner()->set_current_block(join); 4073 } 4074 } 4075 4076 4077 void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 4078 DCHECK(!instr->IsControlInstruction()); 4079 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 4080 return owner()->Bailout(kBadValueContextForArgumentsObjectValue); 4081 } 4082 owner()->AddInstruction(instr); 4083 owner()->Push(instr); 4084 if (instr->HasObservableSideEffects()) { 4085 owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 4086 } 4087 } 4088 4089 4090 void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) { 4091 DCHECK(!instr->HasObservableSideEffects()); 4092 if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) { 4093 return owner()->Bailout(kBadValueContextForArgumentsObjectValue); 4094 } 4095 HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock(); 4096 HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock(); 4097 instr->SetSuccessorAt(0, materialize_true); 4098 instr->SetSuccessorAt(1, materialize_false); 4099 owner()->FinishCurrentBlock(instr); 4100 owner()->set_current_block(materialize_true); 4101 owner()->Push(owner()->graph()->GetConstantTrue()); 4102 owner()->set_current_block(materialize_false); 4103 owner()->Push(owner()->graph()->GetConstantFalse()); 4104 HBasicBlock* join = 4105 owner()->CreateJoin(materialize_true, materialize_false, ast_id); 4106 owner()->set_current_block(join); 4107 } 4108 4109 4110 void ValueContext::ReturnContinuation(HIfContinuation* continuation, 4111 BailoutId ast_id) { 4112 HBasicBlock* materialize_true = NULL; 4113 HBasicBlock* materialize_false = NULL; 4114 continuation->Continue(&materialize_true, &materialize_false); 4115 if (continuation->IsTrueReachable()) { 4116 owner()->set_current_block(materialize_true); 4117 owner()->Push(owner()->graph()->GetConstantTrue()); 4118 owner()->set_current_block(materialize_true); 4119 } 4120 if (continuation->IsFalseReachable()) { 4121 owner()->set_current_block(materialize_false); 4122 owner()->Push(owner()->graph()->GetConstantFalse()); 4123 owner()->set_current_block(materialize_false); 4124 } 4125 if (continuation->TrueAndFalseReachable()) { 4126 HBasicBlock* join = 4127 owner()->CreateJoin(materialize_true, materialize_false, ast_id); 4128 owner()->set_current_block(join); 4129 } 4130 } 4131 4132 4133 void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) { 4134 DCHECK(!instr->IsControlInstruction()); 4135 HOptimizedGraphBuilder* builder = owner(); 4136 builder->AddInstruction(instr); 4137 // We expect a simulate after every expression with side effects, though 4138 // this one isn't actually needed (and wouldn't work if it were targeted). 4139 if (instr->HasObservableSideEffects()) { 4140 builder->Push(instr); 4141 builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 4142 builder->Pop(); 4143 } 4144 BuildBranch(instr); 4145 } 4146 4147 4148 void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) { 4149 DCHECK(!instr->HasObservableSideEffects()); 4150 HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock(); 4151 HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock(); 4152 instr->SetSuccessorAt(0, empty_true); 4153 instr->SetSuccessorAt(1, empty_false); 4154 owner()->FinishCurrentBlock(instr); 4155 owner()->Goto(empty_true, if_true(), owner()->function_state()); 4156 owner()->Goto(empty_false, if_false(), owner()->function_state()); 4157 owner()->set_current_block(NULL); 4158 } 4159 4160 4161 void TestContext::ReturnContinuation(HIfContinuation* continuation, 4162 BailoutId ast_id) { 4163 HBasicBlock* true_branch = NULL; 4164 HBasicBlock* false_branch = NULL; 4165 continuation->Continue(&true_branch, &false_branch); 4166 if (continuation->IsTrueReachable()) { 4167 owner()->Goto(true_branch, if_true(), owner()->function_state()); 4168 } 4169 if (continuation->IsFalseReachable()) { 4170 owner()->Goto(false_branch, if_false(), owner()->function_state()); 4171 } 4172 owner()->set_current_block(NULL); 4173 } 4174 4175 4176 void TestContext::BuildBranch(HValue* value) { 4177 // We expect the graph to be in edge-split form: there is no edge that 4178 // connects a branch node to a join node. We conservatively ensure that 4179 // property by always adding an empty block on the outgoing edges of this 4180 // branch. 4181 HOptimizedGraphBuilder* builder = owner(); 4182 if (value != NULL && value->CheckFlag(HValue::kIsArguments)) { 4183 builder->Bailout(kArgumentsObjectValueInATestContext); 4184 } 4185 ToBooleanStub::Types expected(condition()->to_boolean_types()); 4186 ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None()); 4187 } 4188 4189 4190 // HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts. 4191 #define CHECK_BAILOUT(call) \ 4192 do { \ 4193 call; \ 4194 if (HasStackOverflow()) return; \ 4195 } while (false) 4196 4197 4198 #define CHECK_ALIVE(call) \ 4199 do { \ 4200 call; \ 4201 if (HasStackOverflow() || current_block() == NULL) return; \ 4202 } while (false) 4203 4204 4205 #define CHECK_ALIVE_OR_RETURN(call, value) \ 4206 do { \ 4207 call; \ 4208 if (HasStackOverflow() || current_block() == NULL) return value; \ 4209 } while (false) 4210 4211 4212 void HOptimizedGraphBuilder::Bailout(BailoutReason reason) { 4213 current_info()->AbortOptimization(reason); 4214 SetStackOverflow(); 4215 } 4216 4217 4218 void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) { 4219 EffectContext for_effect(this); 4220 Visit(expr); 4221 } 4222 4223 4224 void HOptimizedGraphBuilder::VisitForValue(Expression* expr, 4225 ArgumentsAllowedFlag flag) { 4226 ValueContext for_value(this, flag); 4227 Visit(expr); 4228 } 4229 4230 4231 void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) { 4232 ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED); 4233 for_value.set_for_typeof(true); 4234 Visit(expr); 4235 } 4236 4237 4238 void HOptimizedGraphBuilder::VisitForControl(Expression* expr, 4239 HBasicBlock* true_block, 4240 HBasicBlock* false_block) { 4241 TestContext for_test(this, expr, true_block, false_block); 4242 Visit(expr); 4243 } 4244 4245 4246 void HOptimizedGraphBuilder::VisitExpressions( 4247 ZoneList<Expression*>* exprs) { 4248 for (int i = 0; i < exprs->length(); ++i) { 4249 CHECK_ALIVE(VisitForValue(exprs->at(i))); 4250 } 4251 } 4252 4253 4254 bool HOptimizedGraphBuilder::BuildGraph() { 4255 if (current_info()->function()->is_generator()) { 4256 Bailout(kFunctionIsAGenerator); 4257 return false; 4258 } 4259 Scope* scope = current_info()->scope(); 4260 if (scope->HasIllegalRedeclaration()) { 4261 Bailout(kFunctionWithIllegalRedeclaration); 4262 return false; 4263 } 4264 if (scope->calls_eval()) { 4265 Bailout(kFunctionCallsEval); 4266 return false; 4267 } 4268 SetUpScope(scope); 4269 4270 // Add an edge to the body entry. This is warty: the graph's start 4271 // environment will be used by the Lithium translation as the initial 4272 // environment on graph entry, but it has now been mutated by the 4273 // Hydrogen translation of the instructions in the start block. This 4274 // environment uses values which have not been defined yet. These 4275 // Hydrogen instructions will then be replayed by the Lithium 4276 // translation, so they cannot have an environment effect. The edge to 4277 // the body's entry block (along with some special logic for the start 4278 // block in HInstruction::InsertAfter) seals the start block from 4279 // getting unwanted instructions inserted. 4280 // 4281 // TODO(kmillikin): Fix this. Stop mutating the initial environment. 4282 // Make the Hydrogen instructions in the initial block into Hydrogen 4283 // values (but not instructions), present in the initial environment and 4284 // not replayed by the Lithium translation. 4285 HEnvironment* initial_env = environment()->CopyWithoutHistory(); 4286 HBasicBlock* body_entry = CreateBasicBlock(initial_env); 4287 Goto(body_entry); 4288 body_entry->SetJoinId(BailoutId::FunctionEntry()); 4289 set_current_block(body_entry); 4290 4291 // Handle implicit declaration of the function name in named function 4292 // expressions before other declarations. 4293 if (scope->is_function_scope() && scope->function() != NULL) { 4294 VisitVariableDeclaration(scope->function()); 4295 } 4296 VisitDeclarations(scope->declarations()); 4297 Add<HSimulate>(BailoutId::Declarations()); 4298 4299 Add<HStackCheck>(HStackCheck::kFunctionEntry); 4300 4301 VisitStatements(current_info()->function()->body()); 4302 if (HasStackOverflow()) return false; 4303 4304 if (current_block() != NULL) { 4305 Add<HReturn>(graph()->GetConstantUndefined()); 4306 set_current_block(NULL); 4307 } 4308 4309 // If the checksum of the number of type info changes is the same as the 4310 // last time this function was compiled, then this recompile is likely not 4311 // due to missing/inadequate type feedback, but rather too aggressive 4312 // optimization. Disable optimistic LICM in that case. 4313 Handle<Code> unoptimized_code(current_info()->shared_info()->code()); 4314 DCHECK(unoptimized_code->kind() == Code::FUNCTION); 4315 Handle<TypeFeedbackInfo> type_info( 4316 TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info())); 4317 int checksum = type_info->own_type_change_checksum(); 4318 int composite_checksum = graph()->update_type_change_checksum(checksum); 4319 graph()->set_use_optimistic_licm( 4320 !type_info->matches_inlined_type_change_checksum(composite_checksum)); 4321 type_info->set_inlined_type_change_checksum(composite_checksum); 4322 4323 // Perform any necessary OSR-specific cleanups or changes to the graph. 4324 osr()->FinishGraph(); 4325 4326 return true; 4327 } 4328 4329 4330 bool HGraph::Optimize(BailoutReason* bailout_reason) { 4331 OrderBlocks(); 4332 AssignDominators(); 4333 4334 // We need to create a HConstant "zero" now so that GVN will fold every 4335 // zero-valued constant in the graph together. 4336 // The constant is needed to make idef-based bounds check work: the pass 4337 // evaluates relations with "zero" and that zero cannot be created after GVN. 4338 GetConstant0(); 4339 4340 #ifdef DEBUG 4341 // Do a full verify after building the graph and computing dominators. 4342 Verify(true); 4343 #endif 4344 4345 if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) { 4346 Run<HEnvironmentLivenessAnalysisPhase>(); 4347 } 4348 4349 if (!CheckConstPhiUses()) { 4350 *bailout_reason = kUnsupportedPhiUseOfConstVariable; 4351 return false; 4352 } 4353 Run<HRedundantPhiEliminationPhase>(); 4354 if (!CheckArgumentsPhiUses()) { 4355 *bailout_reason = kUnsupportedPhiUseOfArguments; 4356 return false; 4357 } 4358 4359 // Find and mark unreachable code to simplify optimizations, especially gvn, 4360 // where unreachable code could unnecessarily defeat LICM. 4361 Run<HMarkUnreachableBlocksPhase>(); 4362 4363 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>(); 4364 if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>(); 4365 4366 if (FLAG_load_elimination) Run<HLoadEliminationPhase>(); 4367 4368 CollectPhis(); 4369 4370 if (has_osr()) osr()->FinishOsrValues(); 4371 4372 Run<HInferRepresentationPhase>(); 4373 4374 // Remove HSimulate instructions that have turned out not to be needed 4375 // after all by folding them into the following HSimulate. 4376 // This must happen after inferring representations. 4377 Run<HMergeRemovableSimulatesPhase>(); 4378 4379 Run<HMarkDeoptimizeOnUndefinedPhase>(); 4380 Run<HRepresentationChangesPhase>(); 4381 4382 Run<HInferTypesPhase>(); 4383 4384 // Must be performed before canonicalization to ensure that Canonicalize 4385 // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with 4386 // zero. 4387 if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>(); 4388 4389 if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>(); 4390 4391 if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>(); 4392 4393 if (FLAG_check_elimination) Run<HCheckEliminationPhase>(); 4394 4395 if (FLAG_store_elimination) Run<HStoreEliminationPhase>(); 4396 4397 Run<HRangeAnalysisPhase>(); 4398 4399 Run<HComputeChangeUndefinedToNaN>(); 4400 4401 // Eliminate redundant stack checks on backwards branches. 4402 Run<HStackCheckEliminationPhase>(); 4403 4404 if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>(); 4405 if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>(); 4406 if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>(); 4407 if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>(); 4408 4409 RestoreActualValues(); 4410 4411 // Find unreachable code a second time, GVN and other optimizations may have 4412 // made blocks unreachable that were previously reachable. 4413 Run<HMarkUnreachableBlocksPhase>(); 4414 4415 return true; 4416 } 4417 4418 4419 void HGraph::RestoreActualValues() { 4420 HPhase phase("H_Restore actual values", this); 4421 4422 for (int block_index = 0; block_index < blocks()->length(); block_index++) { 4423 HBasicBlock* block = blocks()->at(block_index); 4424 4425 #ifdef DEBUG 4426 for (int i = 0; i < block->phis()->length(); i++) { 4427 HPhi* phi = block->phis()->at(i); 4428 DCHECK(phi->ActualValue() == phi); 4429 } 4430 #endif 4431 4432 for (HInstructionIterator it(block); !it.Done(); it.Advance()) { 4433 HInstruction* instruction = it.Current(); 4434 if (instruction->ActualValue() == instruction) continue; 4435 if (instruction->CheckFlag(HValue::kIsDead)) { 4436 // The instruction was marked as deleted but left in the graph 4437 // as a control flow dependency point for subsequent 4438 // instructions. 4439 instruction->DeleteAndReplaceWith(instruction->ActualValue()); 4440 } else { 4441 DCHECK(instruction->IsInformativeDefinition()); 4442 if (instruction->IsPurelyInformativeDefinition()) { 4443 instruction->DeleteAndReplaceWith(instruction->RedefinedOperand()); 4444 } else { 4445 instruction->ReplaceAllUsesWith(instruction->ActualValue()); 4446 } 4447 } 4448 } 4449 } 4450 } 4451 4452 4453 void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) { 4454 ZoneList<HValue*> arguments(count, zone()); 4455 for (int i = 0; i < count; ++i) { 4456 arguments.Add(Pop(), zone()); 4457 } 4458 4459 HPushArguments* push_args = New<HPushArguments>(); 4460 while (!arguments.is_empty()) { 4461 push_args->AddInput(arguments.RemoveLast()); 4462 } 4463 AddInstruction(push_args); 4464 } 4465 4466 4467 template <class Instruction> 4468 HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) { 4469 PushArgumentsFromEnvironment(call->argument_count()); 4470 return call; 4471 } 4472 4473 4474 void HOptimizedGraphBuilder::SetUpScope(Scope* scope) { 4475 // First special is HContext. 4476 HInstruction* context = Add<HContext>(); 4477 environment()->BindContext(context); 4478 4479 // Create an arguments object containing the initial parameters. Set the 4480 // initial values of parameters including "this" having parameter index 0. 4481 DCHECK_EQ(scope->num_parameters() + 1, environment()->parameter_count()); 4482 HArgumentsObject* arguments_object = 4483 New<HArgumentsObject>(environment()->parameter_count()); 4484 for (int i = 0; i < environment()->parameter_count(); ++i) { 4485 HInstruction* parameter = Add<HParameter>(i); 4486 arguments_object->AddArgument(parameter, zone()); 4487 environment()->Bind(i, parameter); 4488 } 4489 AddInstruction(arguments_object); 4490 graph()->SetArgumentsObject(arguments_object); 4491 4492 HConstant* undefined_constant = graph()->GetConstantUndefined(); 4493 // Initialize specials and locals to undefined. 4494 for (int i = environment()->parameter_count() + 1; 4495 i < environment()->length(); 4496 ++i) { 4497 environment()->Bind(i, undefined_constant); 4498 } 4499 4500 // Handle the arguments and arguments shadow variables specially (they do 4501 // not have declarations). 4502 if (scope->arguments() != NULL) { 4503 if (!scope->arguments()->IsStackAllocated()) { 4504 return Bailout(kContextAllocatedArguments); 4505 } 4506 4507 environment()->Bind(scope->arguments(), 4508 graph()->GetArgumentsObject()); 4509 } 4510 } 4511 4512 4513 Type* HOptimizedGraphBuilder::ToType(Handle<Map> map) { 4514 return IC::MapToType<Type>(map, zone()); 4515 } 4516 4517 4518 void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) { 4519 for (int i = 0; i < statements->length(); i++) { 4520 Statement* stmt = statements->at(i); 4521 CHECK_ALIVE(Visit(stmt)); 4522 if (stmt->IsJump()) break; 4523 } 4524 } 4525 4526 4527 void HOptimizedGraphBuilder::VisitBlock(Block* stmt) { 4528 DCHECK(!HasStackOverflow()); 4529 DCHECK(current_block() != NULL); 4530 DCHECK(current_block()->HasPredecessor()); 4531 4532 Scope* outer_scope = scope(); 4533 Scope* scope = stmt->scope(); 4534 BreakAndContinueInfo break_info(stmt, outer_scope); 4535 4536 { BreakAndContinueScope push(&break_info, this); 4537 if (scope != NULL) { 4538 // Load the function object. 4539 Scope* declaration_scope = scope->DeclarationScope(); 4540 HInstruction* function; 4541 HValue* outer_context = environment()->context(); 4542 if (declaration_scope->is_global_scope() || 4543 declaration_scope->is_eval_scope()) { 4544 function = new(zone()) HLoadContextSlot( 4545 outer_context, Context::CLOSURE_INDEX, HLoadContextSlot::kNoCheck); 4546 } else { 4547 function = New<HThisFunction>(); 4548 } 4549 AddInstruction(function); 4550 // Allocate a block context and store it to the stack frame. 4551 HInstruction* inner_context = Add<HAllocateBlockContext>( 4552 outer_context, function, scope->GetScopeInfo()); 4553 HInstruction* instr = Add<HStoreFrameContext>(inner_context); 4554 if (instr->HasObservableSideEffects()) { 4555 AddSimulate(stmt->EntryId(), REMOVABLE_SIMULATE); 4556 } 4557 set_scope(scope); 4558 environment()->BindContext(inner_context); 4559 VisitDeclarations(scope->declarations()); 4560 AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE); 4561 } 4562 CHECK_BAILOUT(VisitStatements(stmt->statements())); 4563 } 4564 set_scope(outer_scope); 4565 if (scope != NULL && current_block() != NULL) { 4566 HValue* inner_context = environment()->context(); 4567 HValue* outer_context = Add<HLoadNamedField>( 4568 inner_context, static_cast<HValue*>(NULL), 4569 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 4570 4571 HInstruction* instr = Add<HStoreFrameContext>(outer_context); 4572 if (instr->HasObservableSideEffects()) { 4573 AddSimulate(stmt->ExitId(), REMOVABLE_SIMULATE); 4574 } 4575 environment()->BindContext(outer_context); 4576 } 4577 HBasicBlock* break_block = break_info.break_block(); 4578 if (break_block != NULL) { 4579 if (current_block() != NULL) Goto(break_block); 4580 break_block->SetJoinId(stmt->ExitId()); 4581 set_current_block(break_block); 4582 } 4583 } 4584 4585 4586 void HOptimizedGraphBuilder::VisitExpressionStatement( 4587 ExpressionStatement* stmt) { 4588 DCHECK(!HasStackOverflow()); 4589 DCHECK(current_block() != NULL); 4590 DCHECK(current_block()->HasPredecessor()); 4591 VisitForEffect(stmt->expression()); 4592 } 4593 4594 4595 void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) { 4596 DCHECK(!HasStackOverflow()); 4597 DCHECK(current_block() != NULL); 4598 DCHECK(current_block()->HasPredecessor()); 4599 } 4600 4601 4602 void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) { 4603 DCHECK(!HasStackOverflow()); 4604 DCHECK(current_block() != NULL); 4605 DCHECK(current_block()->HasPredecessor()); 4606 if (stmt->condition()->ToBooleanIsTrue()) { 4607 Add<HSimulate>(stmt->ThenId()); 4608 Visit(stmt->then_statement()); 4609 } else if (stmt->condition()->ToBooleanIsFalse()) { 4610 Add<HSimulate>(stmt->ElseId()); 4611 Visit(stmt->else_statement()); 4612 } else { 4613 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 4614 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 4615 CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false)); 4616 4617 if (cond_true->HasPredecessor()) { 4618 cond_true->SetJoinId(stmt->ThenId()); 4619 set_current_block(cond_true); 4620 CHECK_BAILOUT(Visit(stmt->then_statement())); 4621 cond_true = current_block(); 4622 } else { 4623 cond_true = NULL; 4624 } 4625 4626 if (cond_false->HasPredecessor()) { 4627 cond_false->SetJoinId(stmt->ElseId()); 4628 set_current_block(cond_false); 4629 CHECK_BAILOUT(Visit(stmt->else_statement())); 4630 cond_false = current_block(); 4631 } else { 4632 cond_false = NULL; 4633 } 4634 4635 HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId()); 4636 set_current_block(join); 4637 } 4638 } 4639 4640 4641 HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get( 4642 BreakableStatement* stmt, 4643 BreakType type, 4644 Scope** scope, 4645 int* drop_extra) { 4646 *drop_extra = 0; 4647 BreakAndContinueScope* current = this; 4648 while (current != NULL && current->info()->target() != stmt) { 4649 *drop_extra += current->info()->drop_extra(); 4650 current = current->next(); 4651 } 4652 DCHECK(current != NULL); // Always found (unless stack is malformed). 4653 *scope = current->info()->scope(); 4654 4655 if (type == BREAK) { 4656 *drop_extra += current->info()->drop_extra(); 4657 } 4658 4659 HBasicBlock* block = NULL; 4660 switch (type) { 4661 case BREAK: 4662 block = current->info()->break_block(); 4663 if (block == NULL) { 4664 block = current->owner()->graph()->CreateBasicBlock(); 4665 current->info()->set_break_block(block); 4666 } 4667 break; 4668 4669 case CONTINUE: 4670 block = current->info()->continue_block(); 4671 if (block == NULL) { 4672 block = current->owner()->graph()->CreateBasicBlock(); 4673 current->info()->set_continue_block(block); 4674 } 4675 break; 4676 } 4677 4678 return block; 4679 } 4680 4681 4682 void HOptimizedGraphBuilder::VisitContinueStatement( 4683 ContinueStatement* stmt) { 4684 DCHECK(!HasStackOverflow()); 4685 DCHECK(current_block() != NULL); 4686 DCHECK(current_block()->HasPredecessor()); 4687 Scope* outer_scope = NULL; 4688 Scope* inner_scope = scope(); 4689 int drop_extra = 0; 4690 HBasicBlock* continue_block = break_scope()->Get( 4691 stmt->target(), BreakAndContinueScope::CONTINUE, 4692 &outer_scope, &drop_extra); 4693 HValue* context = environment()->context(); 4694 Drop(drop_extra); 4695 int context_pop_count = inner_scope->ContextChainLength(outer_scope); 4696 if (context_pop_count > 0) { 4697 while (context_pop_count-- > 0) { 4698 HInstruction* context_instruction = Add<HLoadNamedField>( 4699 context, static_cast<HValue*>(NULL), 4700 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 4701 context = context_instruction; 4702 } 4703 HInstruction* instr = Add<HStoreFrameContext>(context); 4704 if (instr->HasObservableSideEffects()) { 4705 AddSimulate(stmt->target()->EntryId(), REMOVABLE_SIMULATE); 4706 } 4707 environment()->BindContext(context); 4708 } 4709 4710 Goto(continue_block); 4711 set_current_block(NULL); 4712 } 4713 4714 4715 void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) { 4716 DCHECK(!HasStackOverflow()); 4717 DCHECK(current_block() != NULL); 4718 DCHECK(current_block()->HasPredecessor()); 4719 Scope* outer_scope = NULL; 4720 Scope* inner_scope = scope(); 4721 int drop_extra = 0; 4722 HBasicBlock* break_block = break_scope()->Get( 4723 stmt->target(), BreakAndContinueScope::BREAK, 4724 &outer_scope, &drop_extra); 4725 HValue* context = environment()->context(); 4726 Drop(drop_extra); 4727 int context_pop_count = inner_scope->ContextChainLength(outer_scope); 4728 if (context_pop_count > 0) { 4729 while (context_pop_count-- > 0) { 4730 HInstruction* context_instruction = Add<HLoadNamedField>( 4731 context, static_cast<HValue*>(NULL), 4732 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 4733 context = context_instruction; 4734 } 4735 HInstruction* instr = Add<HStoreFrameContext>(context); 4736 if (instr->HasObservableSideEffects()) { 4737 AddSimulate(stmt->target()->ExitId(), REMOVABLE_SIMULATE); 4738 } 4739 environment()->BindContext(context); 4740 } 4741 Goto(break_block); 4742 set_current_block(NULL); 4743 } 4744 4745 4746 void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) { 4747 DCHECK(!HasStackOverflow()); 4748 DCHECK(current_block() != NULL); 4749 DCHECK(current_block()->HasPredecessor()); 4750 FunctionState* state = function_state(); 4751 AstContext* context = call_context(); 4752 if (context == NULL) { 4753 // Not an inlined return, so an actual one. 4754 CHECK_ALIVE(VisitForValue(stmt->expression())); 4755 HValue* result = environment()->Pop(); 4756 Add<HReturn>(result); 4757 } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) { 4758 // Return from an inlined construct call. In a test context the return value 4759 // will always evaluate to true, in a value context the return value needs 4760 // to be a JSObject. 4761 if (context->IsTest()) { 4762 TestContext* test = TestContext::cast(context); 4763 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4764 Goto(test->if_true(), state); 4765 } else if (context->IsEffect()) { 4766 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4767 Goto(function_return(), state); 4768 } else { 4769 DCHECK(context->IsValue()); 4770 CHECK_ALIVE(VisitForValue(stmt->expression())); 4771 HValue* return_value = Pop(); 4772 HValue* receiver = environment()->arguments_environment()->Lookup(0); 4773 HHasInstanceTypeAndBranch* typecheck = 4774 New<HHasInstanceTypeAndBranch>(return_value, 4775 FIRST_SPEC_OBJECT_TYPE, 4776 LAST_SPEC_OBJECT_TYPE); 4777 HBasicBlock* if_spec_object = graph()->CreateBasicBlock(); 4778 HBasicBlock* not_spec_object = graph()->CreateBasicBlock(); 4779 typecheck->SetSuccessorAt(0, if_spec_object); 4780 typecheck->SetSuccessorAt(1, not_spec_object); 4781 FinishCurrentBlock(typecheck); 4782 AddLeaveInlined(if_spec_object, return_value, state); 4783 AddLeaveInlined(not_spec_object, receiver, state); 4784 } 4785 } else if (state->inlining_kind() == SETTER_CALL_RETURN) { 4786 // Return from an inlined setter call. The returned value is never used, the 4787 // value of an assignment is always the value of the RHS of the assignment. 4788 CHECK_ALIVE(VisitForEffect(stmt->expression())); 4789 if (context->IsTest()) { 4790 HValue* rhs = environment()->arguments_environment()->Lookup(1); 4791 context->ReturnValue(rhs); 4792 } else if (context->IsEffect()) { 4793 Goto(function_return(), state); 4794 } else { 4795 DCHECK(context->IsValue()); 4796 HValue* rhs = environment()->arguments_environment()->Lookup(1); 4797 AddLeaveInlined(rhs, state); 4798 } 4799 } else { 4800 // Return from a normal inlined function. Visit the subexpression in the 4801 // expression context of the call. 4802 if (context->IsTest()) { 4803 TestContext* test = TestContext::cast(context); 4804 VisitForControl(stmt->expression(), test->if_true(), test->if_false()); 4805 } else if (context->IsEffect()) { 4806 // Visit in value context and ignore the result. This is needed to keep 4807 // environment in sync with full-codegen since some visitors (e.g. 4808 // VisitCountOperation) use the operand stack differently depending on 4809 // context. 4810 CHECK_ALIVE(VisitForValue(stmt->expression())); 4811 Pop(); 4812 Goto(function_return(), state); 4813 } else { 4814 DCHECK(context->IsValue()); 4815 CHECK_ALIVE(VisitForValue(stmt->expression())); 4816 AddLeaveInlined(Pop(), state); 4817 } 4818 } 4819 set_current_block(NULL); 4820 } 4821 4822 4823 void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) { 4824 DCHECK(!HasStackOverflow()); 4825 DCHECK(current_block() != NULL); 4826 DCHECK(current_block()->HasPredecessor()); 4827 return Bailout(kWithStatement); 4828 } 4829 4830 4831 void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) { 4832 DCHECK(!HasStackOverflow()); 4833 DCHECK(current_block() != NULL); 4834 DCHECK(current_block()->HasPredecessor()); 4835 4836 ZoneList<CaseClause*>* clauses = stmt->cases(); 4837 int clause_count = clauses->length(); 4838 ZoneList<HBasicBlock*> body_blocks(clause_count, zone()); 4839 4840 CHECK_ALIVE(VisitForValue(stmt->tag())); 4841 Add<HSimulate>(stmt->EntryId()); 4842 HValue* tag_value = Top(); 4843 Type* tag_type = stmt->tag()->bounds().lower; 4844 4845 // 1. Build all the tests, with dangling true branches 4846 BailoutId default_id = BailoutId::None(); 4847 for (int i = 0; i < clause_count; ++i) { 4848 CaseClause* clause = clauses->at(i); 4849 if (clause->is_default()) { 4850 body_blocks.Add(NULL, zone()); 4851 if (default_id.IsNone()) default_id = clause->EntryId(); 4852 continue; 4853 } 4854 4855 // Generate a compare and branch. 4856 CHECK_ALIVE(VisitForValue(clause->label())); 4857 HValue* label_value = Pop(); 4858 4859 Type* label_type = clause->label()->bounds().lower; 4860 Type* combined_type = clause->compare_type(); 4861 HControlInstruction* compare = BuildCompareInstruction( 4862 Token::EQ_STRICT, tag_value, label_value, tag_type, label_type, 4863 combined_type, 4864 ScriptPositionToSourcePosition(stmt->tag()->position()), 4865 ScriptPositionToSourcePosition(clause->label()->position()), 4866 PUSH_BEFORE_SIMULATE, clause->id()); 4867 4868 HBasicBlock* next_test_block = graph()->CreateBasicBlock(); 4869 HBasicBlock* body_block = graph()->CreateBasicBlock(); 4870 body_blocks.Add(body_block, zone()); 4871 compare->SetSuccessorAt(0, body_block); 4872 compare->SetSuccessorAt(1, next_test_block); 4873 FinishCurrentBlock(compare); 4874 4875 set_current_block(body_block); 4876 Drop(1); // tag_value 4877 4878 set_current_block(next_test_block); 4879 } 4880 4881 // Save the current block to use for the default or to join with the 4882 // exit. 4883 HBasicBlock* last_block = current_block(); 4884 Drop(1); // tag_value 4885 4886 // 2. Loop over the clauses and the linked list of tests in lockstep, 4887 // translating the clause bodies. 4888 HBasicBlock* fall_through_block = NULL; 4889 4890 BreakAndContinueInfo break_info(stmt, scope()); 4891 { BreakAndContinueScope push(&break_info, this); 4892 for (int i = 0; i < clause_count; ++i) { 4893 CaseClause* clause = clauses->at(i); 4894 4895 // Identify the block where normal (non-fall-through) control flow 4896 // goes to. 4897 HBasicBlock* normal_block = NULL; 4898 if (clause->is_default()) { 4899 if (last_block == NULL) continue; 4900 normal_block = last_block; 4901 last_block = NULL; // Cleared to indicate we've handled it. 4902 } else { 4903 normal_block = body_blocks[i]; 4904 } 4905 4906 if (fall_through_block == NULL) { 4907 set_current_block(normal_block); 4908 } else { 4909 HBasicBlock* join = CreateJoin(fall_through_block, 4910 normal_block, 4911 clause->EntryId()); 4912 set_current_block(join); 4913 } 4914 4915 CHECK_BAILOUT(VisitStatements(clause->statements())); 4916 fall_through_block = current_block(); 4917 } 4918 } 4919 4920 // Create an up-to-3-way join. Use the break block if it exists since 4921 // it's already a join block. 4922 HBasicBlock* break_block = break_info.break_block(); 4923 if (break_block == NULL) { 4924 set_current_block(CreateJoin(fall_through_block, 4925 last_block, 4926 stmt->ExitId())); 4927 } else { 4928 if (fall_through_block != NULL) Goto(fall_through_block, break_block); 4929 if (last_block != NULL) Goto(last_block, break_block); 4930 break_block->SetJoinId(stmt->ExitId()); 4931 set_current_block(break_block); 4932 } 4933 } 4934 4935 4936 void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt, 4937 HBasicBlock* loop_entry) { 4938 Add<HSimulate>(stmt->StackCheckId()); 4939 HStackCheck* stack_check = 4940 HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch)); 4941 DCHECK(loop_entry->IsLoopHeader()); 4942 loop_entry->loop_information()->set_stack_check(stack_check); 4943 CHECK_BAILOUT(Visit(stmt->body())); 4944 } 4945 4946 4947 void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) { 4948 DCHECK(!HasStackOverflow()); 4949 DCHECK(current_block() != NULL); 4950 DCHECK(current_block()->HasPredecessor()); 4951 DCHECK(current_block() != NULL); 4952 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 4953 4954 BreakAndContinueInfo break_info(stmt, scope()); 4955 { 4956 BreakAndContinueScope push(&break_info, this); 4957 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry)); 4958 } 4959 HBasicBlock* body_exit = 4960 JoinContinue(stmt, current_block(), break_info.continue_block()); 4961 HBasicBlock* loop_successor = NULL; 4962 if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) { 4963 set_current_block(body_exit); 4964 loop_successor = graph()->CreateBasicBlock(); 4965 if (stmt->cond()->ToBooleanIsFalse()) { 4966 loop_entry->loop_information()->stack_check()->Eliminate(); 4967 Goto(loop_successor); 4968 body_exit = NULL; 4969 } else { 4970 // The block for a true condition, the actual predecessor block of the 4971 // back edge. 4972 body_exit = graph()->CreateBasicBlock(); 4973 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor)); 4974 } 4975 if (body_exit != NULL && body_exit->HasPredecessor()) { 4976 body_exit->SetJoinId(stmt->BackEdgeId()); 4977 } else { 4978 body_exit = NULL; 4979 } 4980 if (loop_successor->HasPredecessor()) { 4981 loop_successor->SetJoinId(stmt->ExitId()); 4982 } else { 4983 loop_successor = NULL; 4984 } 4985 } 4986 HBasicBlock* loop_exit = CreateLoop(stmt, 4987 loop_entry, 4988 body_exit, 4989 loop_successor, 4990 break_info.break_block()); 4991 set_current_block(loop_exit); 4992 } 4993 4994 4995 void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) { 4996 DCHECK(!HasStackOverflow()); 4997 DCHECK(current_block() != NULL); 4998 DCHECK(current_block()->HasPredecessor()); 4999 DCHECK(current_block() != NULL); 5000 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 5001 5002 // If the condition is constant true, do not generate a branch. 5003 HBasicBlock* loop_successor = NULL; 5004 if (!stmt->cond()->ToBooleanIsTrue()) { 5005 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 5006 loop_successor = graph()->CreateBasicBlock(); 5007 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 5008 if (body_entry->HasPredecessor()) { 5009 body_entry->SetJoinId(stmt->BodyId()); 5010 set_current_block(body_entry); 5011 } 5012 if (loop_successor->HasPredecessor()) { 5013 loop_successor->SetJoinId(stmt->ExitId()); 5014 } else { 5015 loop_successor = NULL; 5016 } 5017 } 5018 5019 BreakAndContinueInfo break_info(stmt, scope()); 5020 if (current_block() != NULL) { 5021 BreakAndContinueScope push(&break_info, this); 5022 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry)); 5023 } 5024 HBasicBlock* body_exit = 5025 JoinContinue(stmt, current_block(), break_info.continue_block()); 5026 HBasicBlock* loop_exit = CreateLoop(stmt, 5027 loop_entry, 5028 body_exit, 5029 loop_successor, 5030 break_info.break_block()); 5031 set_current_block(loop_exit); 5032 } 5033 5034 5035 void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) { 5036 DCHECK(!HasStackOverflow()); 5037 DCHECK(current_block() != NULL); 5038 DCHECK(current_block()->HasPredecessor()); 5039 if (stmt->init() != NULL) { 5040 CHECK_ALIVE(Visit(stmt->init())); 5041 } 5042 DCHECK(current_block() != NULL); 5043 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 5044 5045 HBasicBlock* loop_successor = NULL; 5046 if (stmt->cond() != NULL) { 5047 HBasicBlock* body_entry = graph()->CreateBasicBlock(); 5048 loop_successor = graph()->CreateBasicBlock(); 5049 CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor)); 5050 if (body_entry->HasPredecessor()) { 5051 body_entry->SetJoinId(stmt->BodyId()); 5052 set_current_block(body_entry); 5053 } 5054 if (loop_successor->HasPredecessor()) { 5055 loop_successor->SetJoinId(stmt->ExitId()); 5056 } else { 5057 loop_successor = NULL; 5058 } 5059 } 5060 5061 BreakAndContinueInfo break_info(stmt, scope()); 5062 if (current_block() != NULL) { 5063 BreakAndContinueScope push(&break_info, this); 5064 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry)); 5065 } 5066 HBasicBlock* body_exit = 5067 JoinContinue(stmt, current_block(), break_info.continue_block()); 5068 5069 if (stmt->next() != NULL && body_exit != NULL) { 5070 set_current_block(body_exit); 5071 CHECK_BAILOUT(Visit(stmt->next())); 5072 body_exit = current_block(); 5073 } 5074 5075 HBasicBlock* loop_exit = CreateLoop(stmt, 5076 loop_entry, 5077 body_exit, 5078 loop_successor, 5079 break_info.break_block()); 5080 set_current_block(loop_exit); 5081 } 5082 5083 5084 void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) { 5085 DCHECK(!HasStackOverflow()); 5086 DCHECK(current_block() != NULL); 5087 DCHECK(current_block()->HasPredecessor()); 5088 5089 if (!FLAG_optimize_for_in) { 5090 return Bailout(kForInStatementOptimizationIsDisabled); 5091 } 5092 5093 if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) { 5094 return Bailout(kForInStatementIsNotFastCase); 5095 } 5096 5097 if (!stmt->each()->IsVariableProxy() || 5098 !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) { 5099 return Bailout(kForInStatementWithNonLocalEachVariable); 5100 } 5101 5102 Variable* each_var = stmt->each()->AsVariableProxy()->var(); 5103 5104 CHECK_ALIVE(VisitForValue(stmt->enumerable())); 5105 HValue* enumerable = Top(); // Leave enumerable at the top. 5106 5107 HInstruction* map = Add<HForInPrepareMap>(enumerable); 5108 Add<HSimulate>(stmt->PrepareId()); 5109 5110 HInstruction* array = Add<HForInCacheArray>( 5111 enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex); 5112 5113 HInstruction* enum_length = Add<HMapEnumLength>(map); 5114 5115 HInstruction* start_index = Add<HConstant>(0); 5116 5117 Push(map); 5118 Push(array); 5119 Push(enum_length); 5120 Push(start_index); 5121 5122 HInstruction* index_cache = Add<HForInCacheArray>( 5123 enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex); 5124 HForInCacheArray::cast(array)->set_index_cache( 5125 HForInCacheArray::cast(index_cache)); 5126 5127 HBasicBlock* loop_entry = BuildLoopEntry(stmt); 5128 5129 HValue* index = environment()->ExpressionStackAt(0); 5130 HValue* limit = environment()->ExpressionStackAt(1); 5131 5132 // Check that we still have more keys. 5133 HCompareNumericAndBranch* compare_index = 5134 New<HCompareNumericAndBranch>(index, limit, Token::LT); 5135 compare_index->set_observed_input_representation( 5136 Representation::Smi(), Representation::Smi()); 5137 5138 HBasicBlock* loop_body = graph()->CreateBasicBlock(); 5139 HBasicBlock* loop_successor = graph()->CreateBasicBlock(); 5140 5141 compare_index->SetSuccessorAt(0, loop_body); 5142 compare_index->SetSuccessorAt(1, loop_successor); 5143 FinishCurrentBlock(compare_index); 5144 5145 set_current_block(loop_successor); 5146 Drop(5); 5147 5148 set_current_block(loop_body); 5149 5150 HValue* key = Add<HLoadKeyed>( 5151 environment()->ExpressionStackAt(2), // Enum cache. 5152 environment()->ExpressionStackAt(0), // Iteration index. 5153 environment()->ExpressionStackAt(0), 5154 FAST_ELEMENTS); 5155 5156 // Check if the expected map still matches that of the enumerable. 5157 // If not just deoptimize. 5158 Add<HCheckMapValue>(environment()->ExpressionStackAt(4), 5159 environment()->ExpressionStackAt(3)); 5160 5161 Bind(each_var, key); 5162 5163 BreakAndContinueInfo break_info(stmt, scope(), 5); 5164 { 5165 BreakAndContinueScope push(&break_info, this); 5166 CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry)); 5167 } 5168 5169 HBasicBlock* body_exit = 5170 JoinContinue(stmt, current_block(), break_info.continue_block()); 5171 5172 if (body_exit != NULL) { 5173 set_current_block(body_exit); 5174 5175 HValue* current_index = Pop(); 5176 Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1())); 5177 body_exit = current_block(); 5178 } 5179 5180 HBasicBlock* loop_exit = CreateLoop(stmt, 5181 loop_entry, 5182 body_exit, 5183 loop_successor, 5184 break_info.break_block()); 5185 5186 set_current_block(loop_exit); 5187 } 5188 5189 5190 void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) { 5191 DCHECK(!HasStackOverflow()); 5192 DCHECK(current_block() != NULL); 5193 DCHECK(current_block()->HasPredecessor()); 5194 return Bailout(kForOfStatement); 5195 } 5196 5197 5198 void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) { 5199 DCHECK(!HasStackOverflow()); 5200 DCHECK(current_block() != NULL); 5201 DCHECK(current_block()->HasPredecessor()); 5202 return Bailout(kTryCatchStatement); 5203 } 5204 5205 5206 void HOptimizedGraphBuilder::VisitTryFinallyStatement( 5207 TryFinallyStatement* stmt) { 5208 DCHECK(!HasStackOverflow()); 5209 DCHECK(current_block() != NULL); 5210 DCHECK(current_block()->HasPredecessor()); 5211 return Bailout(kTryFinallyStatement); 5212 } 5213 5214 5215 void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) { 5216 DCHECK(!HasStackOverflow()); 5217 DCHECK(current_block() != NULL); 5218 DCHECK(current_block()->HasPredecessor()); 5219 return Bailout(kDebuggerStatement); 5220 } 5221 5222 5223 void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) { 5224 UNREACHABLE(); 5225 } 5226 5227 5228 void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) { 5229 DCHECK(!HasStackOverflow()); 5230 DCHECK(current_block() != NULL); 5231 DCHECK(current_block()->HasPredecessor()); 5232 Handle<SharedFunctionInfo> shared_info = expr->shared_info(); 5233 if (shared_info.is_null()) { 5234 shared_info = 5235 Compiler::BuildFunctionInfo(expr, current_info()->script(), top_info()); 5236 } 5237 // We also have a stack overflow if the recursive compilation did. 5238 if (HasStackOverflow()) return; 5239 HFunctionLiteral* instr = 5240 New<HFunctionLiteral>(shared_info, expr->pretenure()); 5241 return ast_context()->ReturnInstruction(instr, expr->id()); 5242 } 5243 5244 5245 void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) { 5246 DCHECK(!HasStackOverflow()); 5247 DCHECK(current_block() != NULL); 5248 DCHECK(current_block()->HasPredecessor()); 5249 return Bailout(kClassLiteral); 5250 } 5251 5252 5253 void HOptimizedGraphBuilder::VisitNativeFunctionLiteral( 5254 NativeFunctionLiteral* expr) { 5255 DCHECK(!HasStackOverflow()); 5256 DCHECK(current_block() != NULL); 5257 DCHECK(current_block()->HasPredecessor()); 5258 return Bailout(kNativeFunctionLiteral); 5259 } 5260 5261 5262 void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) { 5263 DCHECK(!HasStackOverflow()); 5264 DCHECK(current_block() != NULL); 5265 DCHECK(current_block()->HasPredecessor()); 5266 HBasicBlock* cond_true = graph()->CreateBasicBlock(); 5267 HBasicBlock* cond_false = graph()->CreateBasicBlock(); 5268 CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false)); 5269 5270 // Visit the true and false subexpressions in the same AST context as the 5271 // whole expression. 5272 if (cond_true->HasPredecessor()) { 5273 cond_true->SetJoinId(expr->ThenId()); 5274 set_current_block(cond_true); 5275 CHECK_BAILOUT(Visit(expr->then_expression())); 5276 cond_true = current_block(); 5277 } else { 5278 cond_true = NULL; 5279 } 5280 5281 if (cond_false->HasPredecessor()) { 5282 cond_false->SetJoinId(expr->ElseId()); 5283 set_current_block(cond_false); 5284 CHECK_BAILOUT(Visit(expr->else_expression())); 5285 cond_false = current_block(); 5286 } else { 5287 cond_false = NULL; 5288 } 5289 5290 if (!ast_context()->IsTest()) { 5291 HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id()); 5292 set_current_block(join); 5293 if (join != NULL && !ast_context()->IsEffect()) { 5294 return ast_context()->ReturnValue(Pop()); 5295 } 5296 } 5297 } 5298 5299 5300 HOptimizedGraphBuilder::GlobalPropertyAccess 5301 HOptimizedGraphBuilder::LookupGlobalProperty(Variable* var, LookupIterator* it, 5302 PropertyAccessType access_type) { 5303 if (var->is_this() || !current_info()->has_global_object()) { 5304 return kUseGeneric; 5305 } 5306 5307 switch (it->state()) { 5308 case LookupIterator::ACCESSOR: 5309 case LookupIterator::ACCESS_CHECK: 5310 case LookupIterator::INTERCEPTOR: 5311 case LookupIterator::NOT_FOUND: 5312 return kUseGeneric; 5313 case LookupIterator::DATA: 5314 if (access_type == STORE && it->IsReadOnly()) return kUseGeneric; 5315 return kUseCell; 5316 case LookupIterator::JSPROXY: 5317 case LookupIterator::TRANSITION: 5318 UNREACHABLE(); 5319 } 5320 UNREACHABLE(); 5321 return kUseGeneric; 5322 } 5323 5324 5325 HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) { 5326 DCHECK(var->IsContextSlot()); 5327 HValue* context = environment()->context(); 5328 int length = scope()->ContextChainLength(var->scope()); 5329 while (length-- > 0) { 5330 context = Add<HLoadNamedField>( 5331 context, static_cast<HValue*>(NULL), 5332 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX)); 5333 } 5334 return context; 5335 } 5336 5337 5338 void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) { 5339 if (expr->is_this()) { 5340 current_info()->set_this_has_uses(true); 5341 } 5342 5343 DCHECK(!HasStackOverflow()); 5344 DCHECK(current_block() != NULL); 5345 DCHECK(current_block()->HasPredecessor()); 5346 Variable* variable = expr->var(); 5347 switch (variable->location()) { 5348 case Variable::UNALLOCATED: { 5349 if (IsLexicalVariableMode(variable->mode())) { 5350 // TODO(rossberg): should this be an DCHECK? 5351 return Bailout(kReferenceToGlobalLexicalVariable); 5352 } 5353 // Handle known global constants like 'undefined' specially to avoid a 5354 // load from a global cell for them. 5355 Handle<Object> constant_value = 5356 isolate()->factory()->GlobalConstantFor(variable->name()); 5357 if (!constant_value.is_null()) { 5358 HConstant* instr = New<HConstant>(constant_value); 5359 return ast_context()->ReturnInstruction(instr, expr->id()); 5360 } 5361 5362 Handle<GlobalObject> global(current_info()->global_object()); 5363 LookupIterator it(global, variable->name(), 5364 LookupIterator::OWN_SKIP_INTERCEPTOR); 5365 GlobalPropertyAccess type = LookupGlobalProperty(variable, &it, LOAD); 5366 5367 if (type == kUseCell) { 5368 Handle<PropertyCell> cell = it.GetPropertyCell(); 5369 if (cell->type()->IsConstant()) { 5370 PropertyCell::AddDependentCompilationInfo(cell, top_info()); 5371 Handle<Object> constant_object = cell->type()->AsConstant()->Value(); 5372 if (constant_object->IsConsString()) { 5373 constant_object = 5374 String::Flatten(Handle<String>::cast(constant_object)); 5375 } 5376 HConstant* constant = New<HConstant>(constant_object); 5377 return ast_context()->ReturnInstruction(constant, expr->id()); 5378 } else { 5379 HLoadGlobalCell* instr = 5380 New<HLoadGlobalCell>(cell, it.property_details()); 5381 return ast_context()->ReturnInstruction(instr, expr->id()); 5382 } 5383 } else { 5384 HValue* global_object = Add<HLoadNamedField>( 5385 context(), static_cast<HValue*>(NULL), 5386 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX)); 5387 HLoadGlobalGeneric* instr = 5388 New<HLoadGlobalGeneric>(global_object, 5389 variable->name(), 5390 ast_context()->is_for_typeof()); 5391 if (FLAG_vector_ics) { 5392 Handle<SharedFunctionInfo> current_shared = 5393 function_state()->compilation_info()->shared_info(); 5394 instr->SetVectorAndSlot( 5395 handle(current_shared->feedback_vector(), isolate()), 5396 expr->VariableFeedbackSlot()); 5397 } 5398 return ast_context()->ReturnInstruction(instr, expr->id()); 5399 } 5400 } 5401 5402 case Variable::PARAMETER: 5403 case Variable::LOCAL: { 5404 HValue* value = LookupAndMakeLive(variable); 5405 if (value == graph()->GetConstantHole()) { 5406 DCHECK(IsDeclaredVariableMode(variable->mode()) && 5407 variable->mode() != VAR); 5408 return Bailout(kReferenceToUninitializedVariable); 5409 } 5410 return ast_context()->ReturnValue(value); 5411 } 5412 5413 case Variable::CONTEXT: { 5414 HValue* context = BuildContextChainWalk(variable); 5415 HLoadContextSlot::Mode mode; 5416 switch (variable->mode()) { 5417 case LET: 5418 case CONST: 5419 mode = HLoadContextSlot::kCheckDeoptimize; 5420 break; 5421 case CONST_LEGACY: 5422 mode = HLoadContextSlot::kCheckReturnUndefined; 5423 break; 5424 default: 5425 mode = HLoadContextSlot::kNoCheck; 5426 break; 5427 } 5428 HLoadContextSlot* instr = 5429 new(zone()) HLoadContextSlot(context, variable->index(), mode); 5430 return ast_context()->ReturnInstruction(instr, expr->id()); 5431 } 5432 5433 case Variable::LOOKUP: 5434 return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup); 5435 } 5436 } 5437 5438 5439 void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) { 5440 DCHECK(!HasStackOverflow()); 5441 DCHECK(current_block() != NULL); 5442 DCHECK(current_block()->HasPredecessor()); 5443 HConstant* instr = New<HConstant>(expr->value()); 5444 return ast_context()->ReturnInstruction(instr, expr->id()); 5445 } 5446 5447 5448 void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) { 5449 DCHECK(!HasStackOverflow()); 5450 DCHECK(current_block() != NULL); 5451 DCHECK(current_block()->HasPredecessor()); 5452 Handle<JSFunction> closure = function_state()->compilation_info()->closure(); 5453 Handle<FixedArray> literals(closure->literals()); 5454 HRegExpLiteral* instr = New<HRegExpLiteral>(literals, 5455 expr->pattern(), 5456 expr->flags(), 5457 expr->literal_index()); 5458 return ast_context()->ReturnInstruction(instr, expr->id()); 5459 } 5460 5461 5462 static bool CanInlinePropertyAccess(Type* type) { 5463 if (type->Is(Type::NumberOrString())) return true; 5464 if (!type->IsClass()) return false; 5465 Handle<Map> map = type->AsClass()->Map(); 5466 return map->IsJSObjectMap() && 5467 !map->is_dictionary_map() && 5468 !map->has_named_interceptor(); 5469 } 5470 5471 5472 // Determines whether the given array or object literal boilerplate satisfies 5473 // all limits to be considered for fast deep-copying and computes the total 5474 // size of all objects that are part of the graph. 5475 static bool IsFastLiteral(Handle<JSObject> boilerplate, 5476 int max_depth, 5477 int* max_properties) { 5478 if (boilerplate->map()->is_deprecated() && 5479 !JSObject::TryMigrateInstance(boilerplate)) { 5480 return false; 5481 } 5482 5483 DCHECK(max_depth >= 0 && *max_properties >= 0); 5484 if (max_depth == 0) return false; 5485 5486 Isolate* isolate = boilerplate->GetIsolate(); 5487 Handle<FixedArrayBase> elements(boilerplate->elements()); 5488 if (elements->length() > 0 && 5489 elements->map() != isolate->heap()->fixed_cow_array_map()) { 5490 if (boilerplate->HasFastObjectElements()) { 5491 Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements); 5492 int length = elements->length(); 5493 for (int i = 0; i < length; i++) { 5494 if ((*max_properties)-- == 0) return false; 5495 Handle<Object> value(fast_elements->get(i), isolate); 5496 if (value->IsJSObject()) { 5497 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5498 if (!IsFastLiteral(value_object, 5499 max_depth - 1, 5500 max_properties)) { 5501 return false; 5502 } 5503 } 5504 } 5505 } else if (!boilerplate->HasFastDoubleElements()) { 5506 return false; 5507 } 5508 } 5509 5510 Handle<FixedArray> properties(boilerplate->properties()); 5511 if (properties->length() > 0) { 5512 return false; 5513 } else { 5514 Handle<DescriptorArray> descriptors( 5515 boilerplate->map()->instance_descriptors()); 5516 int limit = boilerplate->map()->NumberOfOwnDescriptors(); 5517 for (int i = 0; i < limit; i++) { 5518 PropertyDetails details = descriptors->GetDetails(i); 5519 if (details.type() != FIELD) continue; 5520 int index = descriptors->GetFieldIndex(i); 5521 if ((*max_properties)-- == 0) return false; 5522 Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate); 5523 if (value->IsJSObject()) { 5524 Handle<JSObject> value_object = Handle<JSObject>::cast(value); 5525 if (!IsFastLiteral(value_object, 5526 max_depth - 1, 5527 max_properties)) { 5528 return false; 5529 } 5530 } 5531 } 5532 } 5533 return true; 5534 } 5535 5536 5537 void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { 5538 DCHECK(!HasStackOverflow()); 5539 DCHECK(current_block() != NULL); 5540 DCHECK(current_block()->HasPredecessor()); 5541 expr->BuildConstantProperties(isolate()); 5542 Handle<JSFunction> closure = function_state()->compilation_info()->closure(); 5543 HInstruction* literal; 5544 5545 // Check whether to use fast or slow deep-copying for boilerplate. 5546 int max_properties = kMaxFastLiteralProperties; 5547 Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()), 5548 isolate()); 5549 Handle<AllocationSite> site; 5550 Handle<JSObject> boilerplate; 5551 if (!literals_cell->IsUndefined()) { 5552 // Retrieve the boilerplate 5553 site = Handle<AllocationSite>::cast(literals_cell); 5554 boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()), 5555 isolate()); 5556 } 5557 5558 if (!boilerplate.is_null() && 5559 IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) { 5560 AllocationSiteUsageContext usage_context(isolate(), site, false); 5561 usage_context.EnterNewScope(); 5562 literal = BuildFastLiteral(boilerplate, &usage_context); 5563 usage_context.ExitScope(site, boilerplate); 5564 } else { 5565 NoObservableSideEffectsScope no_effects(this); 5566 Handle<FixedArray> closure_literals(closure->literals(), isolate()); 5567 Handle<FixedArray> constant_properties = expr->constant_properties(); 5568 int literal_index = expr->literal_index(); 5569 int flags = expr->fast_elements() 5570 ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags; 5571 flags |= expr->has_function() 5572 ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; 5573 5574 Add<HPushArguments>(Add<HConstant>(closure_literals), 5575 Add<HConstant>(literal_index), 5576 Add<HConstant>(constant_properties), 5577 Add<HConstant>(flags)); 5578 5579 // TODO(mvstanton): Add a flag to turn off creation of any 5580 // AllocationMementos for this call: we are in crankshaft and should have 5581 // learned enough about transition behavior to stop emitting mementos. 5582 Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral; 5583 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(), 5584 Runtime::FunctionForId(function_id), 5585 4); 5586 } 5587 5588 // The object is expected in the bailout environment during computation 5589 // of the property values and is the value of the entire expression. 5590 Push(literal); 5591 5592 expr->CalculateEmitStore(zone()); 5593 5594 for (int i = 0; i < expr->properties()->length(); i++) { 5595 ObjectLiteral::Property* property = expr->properties()->at(i); 5596 if (property->IsCompileTimeValue()) continue; 5597 5598 Literal* key = property->key(); 5599 Expression* value = property->value(); 5600 5601 switch (property->kind()) { 5602 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 5603 DCHECK(!CompileTimeValue::IsCompileTimeValue(value)); 5604 // Fall through. 5605 case ObjectLiteral::Property::COMPUTED: 5606 if (key->value()->IsInternalizedString()) { 5607 if (property->emit_store()) { 5608 CHECK_ALIVE(VisitForValue(value)); 5609 HValue* value = Pop(); 5610 Handle<Map> map = property->GetReceiverType(); 5611 Handle<String> name = property->key()->AsPropertyName(); 5612 HInstruction* store; 5613 if (map.is_null()) { 5614 // If we don't know the monomorphic type, do a generic store. 5615 CHECK_ALIVE(store = BuildNamedGeneric( 5616 STORE, NULL, literal, name, value)); 5617 } else { 5618 PropertyAccessInfo info(this, STORE, ToType(map), name); 5619 if (info.CanAccessMonomorphic()) { 5620 HValue* checked_literal = Add<HCheckMaps>(literal, map); 5621 DCHECK(!info.IsAccessor()); 5622 store = BuildMonomorphicAccess( 5623 &info, literal, checked_literal, value, 5624 BailoutId::None(), BailoutId::None()); 5625 } else { 5626 CHECK_ALIVE(store = BuildNamedGeneric( 5627 STORE, NULL, literal, name, value)); 5628 } 5629 } 5630 AddInstruction(store); 5631 if (store->HasObservableSideEffects()) { 5632 Add<HSimulate>(key->id(), REMOVABLE_SIMULATE); 5633 } 5634 } else { 5635 CHECK_ALIVE(VisitForEffect(value)); 5636 } 5637 break; 5638 } 5639 // Fall through. 5640 case ObjectLiteral::Property::PROTOTYPE: 5641 case ObjectLiteral::Property::SETTER: 5642 case ObjectLiteral::Property::GETTER: 5643 return Bailout(kObjectLiteralWithComplexProperty); 5644 default: UNREACHABLE(); 5645 } 5646 } 5647 5648 if (expr->has_function()) { 5649 // Return the result of the transformation to fast properties 5650 // instead of the original since this operation changes the map 5651 // of the object. This makes sure that the original object won't 5652 // be used by other optimized code before it is transformed 5653 // (e.g. because of code motion). 5654 HToFastProperties* result = Add<HToFastProperties>(Pop()); 5655 return ast_context()->ReturnValue(result); 5656 } else { 5657 return ast_context()->ReturnValue(Pop()); 5658 } 5659 } 5660 5661 5662 void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { 5663 DCHECK(!HasStackOverflow()); 5664 DCHECK(current_block() != NULL); 5665 DCHECK(current_block()->HasPredecessor()); 5666 expr->BuildConstantElements(isolate()); 5667 ZoneList<Expression*>* subexprs = expr->values(); 5668 int length = subexprs->length(); 5669 HInstruction* literal; 5670 5671 Handle<AllocationSite> site; 5672 Handle<FixedArray> literals(environment()->closure()->literals(), isolate()); 5673 bool uninitialized = false; 5674 Handle<Object> literals_cell(literals->get(expr->literal_index()), 5675 isolate()); 5676 Handle<JSObject> boilerplate_object; 5677 if (literals_cell->IsUndefined()) { 5678 uninitialized = true; 5679 Handle<Object> raw_boilerplate; 5680 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 5681 isolate(), raw_boilerplate, 5682 Runtime::CreateArrayLiteralBoilerplate( 5683 isolate(), literals, expr->constant_elements()), 5684 Bailout(kArrayBoilerplateCreationFailed)); 5685 5686 boilerplate_object = Handle<JSObject>::cast(raw_boilerplate); 5687 AllocationSiteCreationContext creation_context(isolate()); 5688 site = creation_context.EnterNewScope(); 5689 if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) { 5690 return Bailout(kArrayBoilerplateCreationFailed); 5691 } 5692 creation_context.ExitScope(site, boilerplate_object); 5693 literals->set(expr->literal_index(), *site); 5694 5695 if (boilerplate_object->elements()->map() == 5696 isolate()->heap()->fixed_cow_array_map()) { 5697 isolate()->counters()->cow_arrays_created_runtime()->Increment(); 5698 } 5699 } else { 5700 DCHECK(literals_cell->IsAllocationSite()); 5701 site = Handle<AllocationSite>::cast(literals_cell); 5702 boilerplate_object = Handle<JSObject>( 5703 JSObject::cast(site->transition_info()), isolate()); 5704 } 5705 5706 DCHECK(!boilerplate_object.is_null()); 5707 DCHECK(site->SitePointsToLiteral()); 5708 5709 ElementsKind boilerplate_elements_kind = 5710 boilerplate_object->GetElementsKind(); 5711 5712 // Check whether to use fast or slow deep-copying for boilerplate. 5713 int max_properties = kMaxFastLiteralProperties; 5714 if (IsFastLiteral(boilerplate_object, 5715 kMaxFastLiteralDepth, 5716 &max_properties)) { 5717 AllocationSiteUsageContext usage_context(isolate(), site, false); 5718 usage_context.EnterNewScope(); 5719 literal = BuildFastLiteral(boilerplate_object, &usage_context); 5720 usage_context.ExitScope(site, boilerplate_object); 5721 } else { 5722 NoObservableSideEffectsScope no_effects(this); 5723 // Boilerplate already exists and constant elements are never accessed, 5724 // pass an empty fixed array to the runtime function instead. 5725 Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array(); 5726 int literal_index = expr->literal_index(); 5727 int flags = expr->depth() == 1 5728 ? ArrayLiteral::kShallowElements 5729 : ArrayLiteral::kNoFlags; 5730 flags |= ArrayLiteral::kDisableMementos; 5731 5732 Add<HPushArguments>(Add<HConstant>(literals), 5733 Add<HConstant>(literal_index), 5734 Add<HConstant>(constants), 5735 Add<HConstant>(flags)); 5736 5737 // TODO(mvstanton): Consider a flag to turn off creation of any 5738 // AllocationMementos for this call: we are in crankshaft and should have 5739 // learned enough about transition behavior to stop emitting mementos. 5740 Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral; 5741 literal = Add<HCallRuntime>(isolate()->factory()->empty_string(), 5742 Runtime::FunctionForId(function_id), 5743 4); 5744 5745 // De-opt if elements kind changed from boilerplate_elements_kind. 5746 Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate()); 5747 literal = Add<HCheckMaps>(literal, map); 5748 } 5749 5750 // The array is expected in the bailout environment during computation 5751 // of the property values and is the value of the entire expression. 5752 Push(literal); 5753 // The literal index is on the stack, too. 5754 Push(Add<HConstant>(expr->literal_index())); 5755 5756 HInstruction* elements = NULL; 5757 5758 for (int i = 0; i < length; i++) { 5759 Expression* subexpr = subexprs->at(i); 5760 // If the subexpression is a literal or a simple materialized literal it 5761 // is already set in the cloned array. 5762 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 5763 5764 CHECK_ALIVE(VisitForValue(subexpr)); 5765 HValue* value = Pop(); 5766 if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral); 5767 5768 elements = AddLoadElements(literal); 5769 5770 HValue* key = Add<HConstant>(i); 5771 5772 switch (boilerplate_elements_kind) { 5773 case FAST_SMI_ELEMENTS: 5774 case FAST_HOLEY_SMI_ELEMENTS: 5775 case FAST_ELEMENTS: 5776 case FAST_HOLEY_ELEMENTS: 5777 case FAST_DOUBLE_ELEMENTS: 5778 case FAST_HOLEY_DOUBLE_ELEMENTS: { 5779 HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value, 5780 boilerplate_elements_kind); 5781 instr->SetUninitialized(uninitialized); 5782 break; 5783 } 5784 default: 5785 UNREACHABLE(); 5786 break; 5787 } 5788 5789 Add<HSimulate>(expr->GetIdForElement(i)); 5790 } 5791 5792 Drop(1); // array literal index 5793 return ast_context()->ReturnValue(Pop()); 5794 } 5795 5796 5797 HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object, 5798 Handle<Map> map) { 5799 BuildCheckHeapObject(object); 5800 return Add<HCheckMaps>(object, map); 5801 } 5802 5803 5804 HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField( 5805 PropertyAccessInfo* info, 5806 HValue* checked_object) { 5807 // See if this is a load for an immutable property 5808 if (checked_object->ActualValue()->IsConstant()) { 5809 Handle<Object> object( 5810 HConstant::cast(checked_object->ActualValue())->handle(isolate())); 5811 5812 if (object->IsJSObject()) { 5813 LookupIterator it(object, info->name(), 5814 LookupIterator::OWN_SKIP_INTERCEPTOR); 5815 Handle<Object> value = JSObject::GetDataProperty(&it); 5816 if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) { 5817 return New<HConstant>(value); 5818 } 5819 } 5820 } 5821 5822 HObjectAccess access = info->access(); 5823 if (access.representation().IsDouble()) { 5824 // Load the heap number. 5825 checked_object = Add<HLoadNamedField>( 5826 checked_object, static_cast<HValue*>(NULL), 5827 access.WithRepresentation(Representation::Tagged())); 5828 // Load the double value from it. 5829 access = HObjectAccess::ForHeapNumberValue(); 5830 } 5831 5832 SmallMapList* map_list = info->field_maps(); 5833 if (map_list->length() == 0) { 5834 return New<HLoadNamedField>(checked_object, checked_object, access); 5835 } 5836 5837 UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone()); 5838 for (int i = 0; i < map_list->length(); ++i) { 5839 maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone()); 5840 } 5841 return New<HLoadNamedField>( 5842 checked_object, checked_object, access, maps, info->field_type()); 5843 } 5844 5845 5846 HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( 5847 PropertyAccessInfo* info, 5848 HValue* checked_object, 5849 HValue* value) { 5850 bool transition_to_field = info->IsTransition(); 5851 // TODO(verwaest): Move this logic into PropertyAccessInfo. 5852 HObjectAccess field_access = info->access(); 5853 5854 HStoreNamedField *instr; 5855 if (field_access.representation().IsDouble()) { 5856 HObjectAccess heap_number_access = 5857 field_access.WithRepresentation(Representation::Tagged()); 5858 if (transition_to_field) { 5859 // The store requires a mutable HeapNumber to be allocated. 5860 NoObservableSideEffectsScope no_side_effects(this); 5861 HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize); 5862 5863 // TODO(hpayer): Allocation site pretenuring support. 5864 HInstruction* heap_number = Add<HAllocate>(heap_number_size, 5865 HType::HeapObject(), 5866 NOT_TENURED, 5867 MUTABLE_HEAP_NUMBER_TYPE); 5868 AddStoreMapConstant( 5869 heap_number, isolate()->factory()->mutable_heap_number_map()); 5870 Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(), 5871 value); 5872 instr = New<HStoreNamedField>(checked_object->ActualValue(), 5873 heap_number_access, 5874 heap_number); 5875 } else { 5876 // Already holds a HeapNumber; load the box and write its value field. 5877 HInstruction* heap_number = Add<HLoadNamedField>( 5878 checked_object, static_cast<HValue*>(NULL), heap_number_access); 5879 instr = New<HStoreNamedField>(heap_number, 5880 HObjectAccess::ForHeapNumberValue(), 5881 value, STORE_TO_INITIALIZED_ENTRY); 5882 } 5883 } else { 5884 if (field_access.representation().IsHeapObject()) { 5885 BuildCheckHeapObject(value); 5886 } 5887 5888 if (!info->field_maps()->is_empty()) { 5889 DCHECK(field_access.representation().IsHeapObject()); 5890 value = Add<HCheckMaps>(value, info->field_maps()); 5891 } 5892 5893 // This is a normal store. 5894 instr = New<HStoreNamedField>( 5895 checked_object->ActualValue(), field_access, value, 5896 transition_to_field ? INITIALIZING_STORE : STORE_TO_INITIALIZED_ENTRY); 5897 } 5898 5899 if (transition_to_field) { 5900 Handle<Map> transition(info->transition()); 5901 DCHECK(!transition->is_deprecated()); 5902 instr->SetTransition(Add<HConstant>(transition)); 5903 } 5904 return instr; 5905 } 5906 5907 5908 bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible( 5909 PropertyAccessInfo* info) { 5910 if (!CanInlinePropertyAccess(type_)) return false; 5911 5912 // Currently only handle Type::Number as a polymorphic case. 5913 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber 5914 // instruction. 5915 if (type_->Is(Type::Number())) return false; 5916 5917 // Values are only compatible for monomorphic load if they all behave the same 5918 // regarding value wrappers. 5919 if (type_->Is(Type::NumberOrString())) { 5920 if (!info->type_->Is(Type::NumberOrString())) return false; 5921 } else { 5922 if (info->type_->Is(Type::NumberOrString())) return false; 5923 } 5924 5925 if (!LookupDescriptor()) return false; 5926 5927 if (!IsFound()) { 5928 return (!info->IsFound() || info->has_holder()) && 5929 map()->prototype() == info->map()->prototype(); 5930 } 5931 5932 // Mismatch if the other access info found the property in the prototype 5933 // chain. 5934 if (info->has_holder()) return false; 5935 5936 if (IsAccessor()) { 5937 return accessor_.is_identical_to(info->accessor_) && 5938 api_holder_.is_identical_to(info->api_holder_); 5939 } 5940 5941 if (IsConstant()) { 5942 return constant_.is_identical_to(info->constant_); 5943 } 5944 5945 DCHECK(IsField()); 5946 if (!info->IsField()) return false; 5947 5948 Representation r = access_.representation(); 5949 if (IsLoad()) { 5950 if (!info->access_.representation().IsCompatibleForLoad(r)) return false; 5951 } else { 5952 if (!info->access_.representation().IsCompatibleForStore(r)) return false; 5953 } 5954 if (info->access_.offset() != access_.offset()) return false; 5955 if (info->access_.IsInobject() != access_.IsInobject()) return false; 5956 if (IsLoad()) { 5957 if (field_maps_.is_empty()) { 5958 info->field_maps_.Clear(); 5959 } else if (!info->field_maps_.is_empty()) { 5960 for (int i = 0; i < field_maps_.length(); ++i) { 5961 info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone()); 5962 } 5963 info->field_maps_.Sort(); 5964 } 5965 } else { 5966 // We can only merge stores that agree on their field maps. The comparison 5967 // below is safe, since we keep the field maps sorted. 5968 if (field_maps_.length() != info->field_maps_.length()) return false; 5969 for (int i = 0; i < field_maps_.length(); ++i) { 5970 if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) { 5971 return false; 5972 } 5973 } 5974 } 5975 info->GeneralizeRepresentation(r); 5976 info->field_type_ = info->field_type_.Combine(field_type_); 5977 return true; 5978 } 5979 5980 5981 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() { 5982 if (!type_->IsClass()) return true; 5983 map()->LookupDescriptor(NULL, *name_, &lookup_); 5984 return LoadResult(map()); 5985 } 5986 5987 5988 bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) { 5989 if (!IsLoad() && IsProperty() && IsReadOnly()) { 5990 return false; 5991 } 5992 5993 if (IsField()) { 5994 // Construct the object field access. 5995 int index = GetLocalFieldIndexFromMap(map); 5996 access_ = HObjectAccess::ForField(map, index, representation(), name_); 5997 5998 // Load field map for heap objects. 5999 LoadFieldMaps(map); 6000 } else if (IsAccessor()) { 6001 Handle<Object> accessors = GetAccessorsFromMap(map); 6002 if (!accessors->IsAccessorPair()) return false; 6003 Object* raw_accessor = 6004 IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter() 6005 : Handle<AccessorPair>::cast(accessors)->setter(); 6006 if (!raw_accessor->IsJSFunction()) return false; 6007 Handle<JSFunction> accessor = handle(JSFunction::cast(raw_accessor)); 6008 if (accessor->shared()->IsApiFunction()) { 6009 CallOptimization call_optimization(accessor); 6010 if (call_optimization.is_simple_api_call()) { 6011 CallOptimization::HolderLookup holder_lookup; 6012 Handle<Map> receiver_map = this->map(); 6013 api_holder_ = call_optimization.LookupHolderOfExpectedType( 6014 receiver_map, &holder_lookup); 6015 } 6016 } 6017 accessor_ = accessor; 6018 } else if (IsConstant()) { 6019 constant_ = GetConstantFromMap(map); 6020 } 6021 6022 return true; 6023 } 6024 6025 6026 void HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps( 6027 Handle<Map> map) { 6028 // Clear any previously collected field maps/type. 6029 field_maps_.Clear(); 6030 field_type_ = HType::Tagged(); 6031 6032 // Figure out the field type from the accessor map. 6033 Handle<HeapType> field_type = GetFieldTypeFromMap(map); 6034 6035 // Collect the (stable) maps from the field type. 6036 int num_field_maps = field_type->NumClasses(); 6037 if (num_field_maps == 0) return; 6038 DCHECK(access_.representation().IsHeapObject()); 6039 field_maps_.Reserve(num_field_maps, zone()); 6040 HeapType::Iterator<Map> it = field_type->Classes(); 6041 while (!it.Done()) { 6042 Handle<Map> field_map = it.Current(); 6043 if (!field_map->is_stable()) { 6044 field_maps_.Clear(); 6045 return; 6046 } 6047 field_maps_.Add(field_map, zone()); 6048 it.Advance(); 6049 } 6050 field_maps_.Sort(); 6051 DCHECK_EQ(num_field_maps, field_maps_.length()); 6052 6053 // Determine field HType from field HeapType. 6054 field_type_ = HType::FromType<HeapType>(field_type); 6055 DCHECK(field_type_.IsHeapObject()); 6056 6057 // Add dependency on the map that introduced the field. 6058 Map::AddDependentCompilationInfo(GetFieldOwnerFromMap(map), 6059 DependentCode::kFieldTypeGroup, top_info()); 6060 } 6061 6062 6063 bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() { 6064 Handle<Map> map = this->map(); 6065 6066 while (map->prototype()->IsJSObject()) { 6067 holder_ = handle(JSObject::cast(map->prototype())); 6068 if (holder_->map()->is_deprecated()) { 6069 JSObject::TryMigrateInstance(holder_); 6070 } 6071 map = Handle<Map>(holder_->map()); 6072 if (!CanInlinePropertyAccess(ToType(map))) { 6073 lookup_.NotFound(); 6074 return false; 6075 } 6076 map->LookupDescriptor(*holder_, *name_, &lookup_); 6077 if (IsFound()) return LoadResult(map); 6078 } 6079 lookup_.NotFound(); 6080 return true; 6081 } 6082 6083 6084 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() { 6085 if (!CanInlinePropertyAccess(type_)) return false; 6086 if (IsJSObjectFieldAccessor()) return IsLoad(); 6087 if (this->map()->function_with_prototype() && 6088 !this->map()->has_non_instance_prototype() && 6089 name_.is_identical_to(isolate()->factory()->prototype_string())) { 6090 return IsLoad(); 6091 } 6092 if (!LookupDescriptor()) return false; 6093 if (IsFound()) return IsLoad() || !IsReadOnly(); 6094 if (!LookupInPrototypes()) return false; 6095 if (IsLoad()) return true; 6096 6097 if (IsAccessor()) return true; 6098 Handle<Map> map = this->map(); 6099 map->LookupTransition(NULL, *name_, &lookup_); 6100 if (lookup_.IsTransitionToField() && map->unused_property_fields() > 0) { 6101 // Construct the object field access. 6102 int descriptor = transition()->LastAdded(); 6103 int index = 6104 transition()->instance_descriptors()->GetFieldIndex(descriptor) - 6105 map->inobject_properties(); 6106 PropertyDetails details = 6107 transition()->instance_descriptors()->GetDetails(descriptor); 6108 Representation representation = details.representation(); 6109 access_ = HObjectAccess::ForField(map, index, representation, name_); 6110 6111 // Load field map for heap objects. 6112 LoadFieldMaps(transition()); 6113 return true; 6114 } 6115 return false; 6116 } 6117 6118 6119 bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic( 6120 SmallMapList* types) { 6121 DCHECK(type_->Is(ToType(types->first()))); 6122 if (!CanAccessMonomorphic()) return false; 6123 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism); 6124 if (types->length() > kMaxLoadPolymorphism) return false; 6125 6126 HObjectAccess access = HObjectAccess::ForMap(); // bogus default 6127 if (GetJSObjectFieldAccess(&access)) { 6128 for (int i = 1; i < types->length(); ++i) { 6129 PropertyAccessInfo test_info( 6130 builder_, access_type_, ToType(types->at(i)), name_); 6131 HObjectAccess test_access = HObjectAccess::ForMap(); // bogus default 6132 if (!test_info.GetJSObjectFieldAccess(&test_access)) return false; 6133 if (!access.Equals(test_access)) return false; 6134 } 6135 return true; 6136 } 6137 6138 // Currently only handle Type::Number as a polymorphic case. 6139 // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber 6140 // instruction. 6141 if (type_->Is(Type::Number())) return false; 6142 6143 // Multiple maps cannot transition to the same target map. 6144 DCHECK(!IsLoad() || !IsTransition()); 6145 if (IsTransition() && types->length() > 1) return false; 6146 6147 for (int i = 1; i < types->length(); ++i) { 6148 PropertyAccessInfo test_info( 6149 builder_, access_type_, ToType(types->at(i)), name_); 6150 if (!test_info.IsCompatible(this)) return false; 6151 } 6152 6153 return true; 6154 } 6155 6156 6157 Handle<Map> HOptimizedGraphBuilder::PropertyAccessInfo::map() { 6158 JSFunction* ctor = IC::GetRootConstructor( 6159 type_, current_info()->closure()->context()->native_context()); 6160 if (ctor != NULL) return handle(ctor->initial_map()); 6161 return type_->AsClass()->Map(); 6162 } 6163 6164 6165 static bool NeedsWrappingFor(Type* type, Handle<JSFunction> target) { 6166 return type->Is(Type::NumberOrString()) && 6167 target->shared()->strict_mode() == SLOPPY && 6168 !target->shared()->native(); 6169 } 6170 6171 6172 HInstruction* HOptimizedGraphBuilder::BuildMonomorphicAccess( 6173 PropertyAccessInfo* info, 6174 HValue* object, 6175 HValue* checked_object, 6176 HValue* value, 6177 BailoutId ast_id, 6178 BailoutId return_id, 6179 bool can_inline_accessor) { 6180 6181 HObjectAccess access = HObjectAccess::ForMap(); // bogus default 6182 if (info->GetJSObjectFieldAccess(&access)) { 6183 DCHECK(info->IsLoad()); 6184 return New<HLoadNamedField>(object, checked_object, access); 6185 } 6186 6187 if (info->name().is_identical_to(isolate()->factory()->prototype_string()) && 6188 info->map()->function_with_prototype()) { 6189 DCHECK(!info->map()->has_non_instance_prototype()); 6190 return New<HLoadFunctionPrototype>(checked_object); 6191 } 6192 6193 HValue* checked_holder = checked_object; 6194 if (info->has_holder()) { 6195 Handle<JSObject> prototype(JSObject::cast(info->map()->prototype())); 6196 checked_holder = BuildCheckPrototypeMaps(prototype, info->holder()); 6197 } 6198 6199 if (!info->IsFound()) { 6200 DCHECK(info->IsLoad()); 6201 return graph()->GetConstantUndefined(); 6202 } 6203 6204 if (info->IsField()) { 6205 if (info->IsLoad()) { 6206 return BuildLoadNamedField(info, checked_holder); 6207 } else { 6208 return BuildStoreNamedField(info, checked_object, value); 6209 } 6210 } 6211 6212 if (info->IsTransition()) { 6213 DCHECK(!info->IsLoad()); 6214 return BuildStoreNamedField(info, checked_object, value); 6215 } 6216 6217 if (info->IsAccessor()) { 6218 Push(checked_object); 6219 int argument_count = 1; 6220 if (!info->IsLoad()) { 6221 argument_count = 2; 6222 Push(value); 6223 } 6224 6225 if (NeedsWrappingFor(info->type(), info->accessor())) { 6226 HValue* function = Add<HConstant>(info->accessor()); 6227 PushArgumentsFromEnvironment(argument_count); 6228 return New<HCallFunction>(function, argument_count, WRAP_AND_CALL); 6229 } else if (FLAG_inline_accessors && can_inline_accessor) { 6230 bool success = info->IsLoad() 6231 ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id) 6232 : TryInlineSetter( 6233 info->accessor(), info->map(), ast_id, return_id, value); 6234 if (success || HasStackOverflow()) return NULL; 6235 } 6236 6237 PushArgumentsFromEnvironment(argument_count); 6238 return BuildCallConstantFunction(info->accessor(), argument_count); 6239 } 6240 6241 DCHECK(info->IsConstant()); 6242 if (info->IsLoad()) { 6243 return New<HConstant>(info->constant()); 6244 } else { 6245 return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant())); 6246 } 6247 } 6248 6249 6250 void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess( 6251 PropertyAccessType access_type, 6252 Expression* expr, 6253 BailoutId ast_id, 6254 BailoutId return_id, 6255 HValue* object, 6256 HValue* value, 6257 SmallMapList* types, 6258 Handle<String> name) { 6259 // Something did not match; must use a polymorphic load. 6260 int count = 0; 6261 HBasicBlock* join = NULL; 6262 HBasicBlock* number_block = NULL; 6263 bool handled_string = false; 6264 6265 bool handle_smi = false; 6266 STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism); 6267 int i; 6268 for (i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) { 6269 PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name); 6270 if (info.type()->Is(Type::String())) { 6271 if (handled_string) continue; 6272 handled_string = true; 6273 } 6274 if (info.CanAccessMonomorphic()) { 6275 count++; 6276 if (info.type()->Is(Type::Number())) { 6277 handle_smi = true; 6278 break; 6279 } 6280 } 6281 } 6282 6283 if (i < types->length()) { 6284 count = -1; 6285 types->Clear(); 6286 } else { 6287 count = 0; 6288 } 6289 HControlInstruction* smi_check = NULL; 6290 handled_string = false; 6291 6292 for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) { 6293 PropertyAccessInfo info(this, access_type, ToType(types->at(i)), name); 6294 if (info.type()->Is(Type::String())) { 6295 if (handled_string) continue; 6296 handled_string = true; 6297 } 6298 if (!info.CanAccessMonomorphic()) continue; 6299 6300 if (count == 0) { 6301 join = graph()->CreateBasicBlock(); 6302 if (handle_smi) { 6303 HBasicBlock* empty_smi_block = graph()->CreateBasicBlock(); 6304 HBasicBlock* not_smi_block = graph()->CreateBasicBlock(); 6305 number_block = graph()->CreateBasicBlock(); 6306 smi_check = New<HIsSmiAndBranch>( 6307 object, empty_smi_block, not_smi_block); 6308 FinishCurrentBlock(smi_check); 6309 GotoNoSimulate(empty_smi_block, number_block); 6310 set_current_block(not_smi_block); 6311 } else { 6312 BuildCheckHeapObject(object); 6313 } 6314 } 6315 ++count; 6316 HBasicBlock* if_true = graph()->CreateBasicBlock(); 6317 HBasicBlock* if_false = graph()->CreateBasicBlock(); 6318 HUnaryControlInstruction* compare; 6319 6320 HValue* dependency; 6321 if (info.type()->Is(Type::Number())) { 6322 Handle<Map> heap_number_map = isolate()->factory()->heap_number_map(); 6323 compare = New<HCompareMap>(object, heap_number_map, if_true, if_false); 6324 dependency = smi_check; 6325 } else if (info.type()->Is(Type::String())) { 6326 compare = New<HIsStringAndBranch>(object, if_true, if_false); 6327 dependency = compare; 6328 } else { 6329 compare = New<HCompareMap>(object, info.map(), if_true, if_false); 6330 dependency = compare; 6331 } 6332 FinishCurrentBlock(compare); 6333 6334 if (info.type()->Is(Type::Number())) { 6335 GotoNoSimulate(if_true, number_block); 6336 if_true = number_block; 6337 } 6338 6339 set_current_block(if_true); 6340 6341 HInstruction* access = BuildMonomorphicAccess( 6342 &info, object, dependency, value, ast_id, 6343 return_id, FLAG_polymorphic_inlining); 6344 6345 HValue* result = NULL; 6346 switch (access_type) { 6347 case LOAD: 6348 result = access; 6349 break; 6350 case STORE: 6351 result = value; 6352 break; 6353 } 6354 6355 if (access == NULL) { 6356 if (HasStackOverflow()) return; 6357 } else { 6358 if (!access->IsLinked()) AddInstruction(access); 6359 if (!ast_context()->IsEffect()) Push(result); 6360 } 6361 6362 if (current_block() != NULL) Goto(join); 6363 set_current_block(if_false); 6364 } 6365 6366 // Finish up. Unconditionally deoptimize if we've handled all the maps we 6367 // know about and do not want to handle ones we've never seen. Otherwise 6368 // use a generic IC. 6369 if (count == types->length() && FLAG_deoptimize_uncommon_cases) { 6370 FinishExitWithHardDeoptimization("Uknown map in polymorphic access"); 6371 } else { 6372 HInstruction* instr = BuildNamedGeneric(access_type, expr, object, name, 6373 value); 6374 AddInstruction(instr); 6375 if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value); 6376 6377 if (join != NULL) { 6378 Goto(join); 6379 } else { 6380 Add<HSimulate>(ast_id, REMOVABLE_SIMULATE); 6381 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop()); 6382 return; 6383 } 6384 } 6385 6386 DCHECK(join != NULL); 6387 if (join->HasPredecessor()) { 6388 join->SetJoinId(ast_id); 6389 set_current_block(join); 6390 if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop()); 6391 } else { 6392 set_current_block(NULL); 6393 } 6394 } 6395 6396 6397 static bool ComputeReceiverTypes(Expression*